@telnyx/ai-agent-lib 0.1.6 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -331,7 +331,7 @@ This library is built with TypeScript and provides full type definitions. All ho
331
331
 
332
332
  ## Examples
333
333
 
334
- Check out the `/src` directory for a complete example application demonstrating all features of the library.
334
+ Check out the [example](https://github.com/team-telnyx/telnyx-ai-agent-lib-example) repository for a complete example application demonstrating all features of the library.
335
335
 
336
336
  ## Dependencies
337
337
 
package/dist/client.d.ts CHANGED
@@ -16,7 +16,7 @@ export declare class TelnyxAIAgent extends EventEmitter<AIAgentEvents> {
16
16
  constructor(params: TelnyxAIAgentConstructorParams);
17
17
  connect(): Promise<void>;
18
18
  disconnect(): Promise<void>;
19
- sendConversationMessage(message: string): void;
19
+ sendConversationMessage(message: string, attachments?: string[]): void;
20
20
  get transcript(): TranscriptItem[];
21
21
  startConversation(): Promise<void>;
22
22
  endConversation(): void | undefined;
package/dist/index.js CHANGED
@@ -2558,34 +2558,67 @@ function di() {
2558
2558
  }
2559
2559
  var ui = di();
2560
2560
  const dt = /* @__PURE__ */ li(ui), Ee = new dt();
2561
- function hi(t) {
2561
+ function hi(t, e) {
2562
+ let n = 0;
2563
+ return (...i) => {
2564
+ const s = Date.now();
2565
+ s - n >= e && (n = s, t(...i));
2566
+ };
2567
+ }
2568
+ const pi = 10;
2569
+ class fi {
2570
+ animationFrameId = null;
2571
+ stream = null;
2572
+ audioContext = null;
2573
+ source = null;
2574
+ analyser = null;
2575
+ updateAgentState = hi((e) => {
2576
+ Ee.emit("conversation.agent.state", e);
2577
+ }, 100);
2578
+ setMonitoredAudioStream(e) {
2579
+ this.stream && this.stopAudioStreamMonitor(), this.stream = e, this.startAudioStreamMonitor();
2580
+ }
2581
+ stopAudioStreamMonitor() {
2582
+ this.animationFrameId && (cancelAnimationFrame(this.animationFrameId), this.animationFrameId = null), this.source?.disconnect();
2583
+ }
2584
+ startAudioStreamMonitor() {
2585
+ if (!this.stream) return;
2586
+ this.audioContext = new AudioContext(), this.source = this.audioContext.createMediaStreamSource(this.stream), this.analyser = this.audioContext.createAnalyser(), this.source.connect(this.analyser), this.analyser.fftSize = 512;
2587
+ const e = new Uint8Array(this.analyser.frequencyBinCount), n = () => {
2588
+ const i = e.reduce((s, c) => s + c, 0) / e.length;
2589
+ this.analyser?.getByteFrequencyData(e), i >= pi ? this.updateAgentState("speaking") : this.updateAgentState("listening"), this.animationFrameId = requestAnimationFrame(n);
2590
+ };
2591
+ this.animationFrameId = requestAnimationFrame(n);
2592
+ }
2593
+ }
2594
+ function gi(t) {
2562
2595
  if (!t || typeof t != "object")
2563
2596
  return !1;
2564
2597
  const e = t;
2565
2598
  return e.method === "ai_conversation" && typeof e.params == "object";
2566
2599
  }
2567
- function pi(t) {
2600
+ function vi(t) {
2568
2601
  if (!t || typeof t != "object")
2569
2602
  return !1;
2570
2603
  const e = t;
2571
2604
  return e.params.type !== "response.text.delta" ? !1 : !!e.params.delta;
2572
2605
  }
2573
- function fi(t) {
2606
+ function mi(t) {
2574
2607
  if (!t || typeof t != "object")
2575
2608
  return !1;
2576
2609
  const e = t;
2577
2610
  return e.params.type !== "conversation.item.created" ? !1 : !!e.params.item.content;
2578
2611
  }
2579
- function gi(t) {
2580
- return pi(t) ? {
2612
+ function bi(t) {
2613
+ return vi(t) ? {
2581
2614
  id: `${t.params.item_id}-${Date.now()}`,
2582
2615
  role: "assistant",
2583
2616
  content: t.params.delta,
2584
2617
  timestamp: /* @__PURE__ */ new Date()
2585
2618
  } : null;
2586
2619
  }
2587
- function vi(t) {
2588
- if (!fi(t) || t.params.item.role !== "user" || t.params.item.status !== "completed")
2620
+ function yi(t) {
2621
+ if (!mi(t) || t.params.item.role !== "user" || t.params.item.status !== "completed")
2589
2622
  return null;
2590
2623
  let e = "", n = [];
2591
2624
  return Array.isArray(t.params.item.content) ? (e = t.params.item.content.reduce((i, s) => (s.type === "text" && (i += s.text), i), ""), n = t.params.item.content.reduce((i, s) => (s.type === "image_url" && s.image_url.url && i.push({ type: "image", url: s.image_url.url }), i), [])) : e = t.params.item.content, {
@@ -2596,61 +2629,28 @@ function vi(t) {
2596
2629
  attachments: n
2597
2630
  };
2598
2631
  }
2599
- class mi extends dt {
2632
+ class _i extends dt {
2600
2633
  telnyxRTC;
2601
2634
  transcript = [];
2602
2635
  constructor(e) {
2603
2636
  super(), this.telnyxRTC = e, this.telnyxRTC.on(O.SocketMessage, this.onSocketMessage);
2604
2637
  }
2605
2638
  onSocketMessage = (e) => {
2606
- if (hi(e))
2639
+ if (gi(e))
2607
2640
  switch (e.params.type) {
2608
2641
  case "response.text.delta": {
2609
- const n = gi(e);
2642
+ const n = bi(e);
2610
2643
  n && (this.transcript.push(n), this.emit("transcript.item", n), Ee.emit("conversation.agent.state", "listening"));
2611
2644
  return;
2612
2645
  }
2613
2646
  case "conversation.item.created": {
2614
- const n = vi(e);
2647
+ const n = yi(e);
2615
2648
  n && (this.transcript.push(n), this.emit("transcript.item", n), Ee.emit("conversation.agent.state", "thinking"));
2616
2649
  return;
2617
2650
  }
2618
2651
  }
2619
2652
  };
2620
2653
  }
2621
- function bi(t, e) {
2622
- let n = 0;
2623
- return (...i) => {
2624
- const s = Date.now();
2625
- s - n >= e && (n = s, t(...i));
2626
- };
2627
- }
2628
- const yi = 10;
2629
- class _i {
2630
- animationFrameId = null;
2631
- stream = null;
2632
- audioContext = null;
2633
- source = null;
2634
- analyser = null;
2635
- updateAgentState = bi((e) => {
2636
- Ee.emit("conversation.agent.state", e);
2637
- }, 100);
2638
- setMonitoredAudioStream(e) {
2639
- this.stream && this.stopAudioStreamMonitor(), this.stream = e, this.startAudioStreamMonitor();
2640
- }
2641
- stopAudioStreamMonitor() {
2642
- this.animationFrameId && (cancelAnimationFrame(this.animationFrameId), this.animationFrameId = null), this.source?.disconnect();
2643
- }
2644
- startAudioStreamMonitor() {
2645
- if (!this.stream) return;
2646
- this.audioContext = new AudioContext(), this.source = this.audioContext.createMediaStreamSource(this.stream), this.analyser = this.audioContext.createAnalyser(), this.source.connect(this.analyser), this.analyser.fftSize = 512;
2647
- const e = new Uint8Array(this.analyser.frequencyBinCount), n = () => {
2648
- const i = e.reduce((s, c) => s + c, 0) / e.length;
2649
- this.analyser?.getByteFrequencyData(e), i >= yi ? this.updateAgentState("speaking") : this.updateAgentState("listening"), this.animationFrameId = requestAnimationFrame(n);
2650
- };
2651
- this.animationFrameId = requestAnimationFrame(n);
2652
- }
2653
- }
2654
2654
  class Ot extends dt {
2655
2655
  telnyxRTC;
2656
2656
  transcription;
@@ -2667,10 +2667,10 @@ class Ot extends dt {
2667
2667
  target_type: "ai_assistant",
2668
2668
  target_version_id: e.versionId
2669
2669
  }
2670
- }), this.telnyxRTC.on(O.Ready, this.onClientReady), this.telnyxRTC.on(O.Error, this.onClientOrSocketError), this.telnyxRTC.on(O.SocketError, this.onClientOrSocketError), this.telnyxRTC.on(O.Notification, this.onNotification), this.transcription = new mi(this.telnyxRTC), this.transcription.addListener("transcript.item", this.onTranscriptItem), Ee.addListener(
2670
+ }), this.telnyxRTC.on(O.Ready, this.onClientReady), this.telnyxRTC.on(O.Error, this.onClientOrSocketError), this.telnyxRTC.on(O.SocketError, this.onClientOrSocketError), this.telnyxRTC.on(O.Notification, this.onNotification), this.transcription = new _i(this.telnyxRTC), this.transcription.addListener("transcript.item", this.onTranscriptItem), Ee.addListener(
2671
2671
  "conversation.agent.state",
2672
2672
  this.onAgentStateChange
2673
- ), this.audioStreamMonitor = new _i();
2673
+ ), this.audioStreamMonitor = new fi();
2674
2674
  }
2675
2675
  async connect() {
2676
2676
  return this.telnyxRTC.connect();
@@ -2678,12 +2678,12 @@ class Ot extends dt {
2678
2678
  async disconnect() {
2679
2679
  this.audioStreamMonitor.stopAudioStreamMonitor(), this.telnyxRTC.disconnect(), this.telnyxRTC.off(O.Ready, this.onClientReady), this.telnyxRTC.off(O.Error, this.onClientOrSocketError), this.telnyxRTC.off(O.SocketError, this.onClientOrSocketError), this.telnyxRTC.off(O.Notification, this.onNotification), this.emit("agent.disconnected"), this.transcription.removeAllListeners(), Ee.removeAllListeners(), this.removeAllListeners();
2680
2680
  }
2681
- sendConversationMessage(e) {
2681
+ sendConversationMessage(e, n = []) {
2682
2682
  if (!this.activeCall) {
2683
2683
  console.error("No active call to send message.");
2684
2684
  return;
2685
2685
  }
2686
- this.activeCall.sendConversationMessage(e);
2686
+ this.activeCall.sendConversationMessage(e, n);
2687
2687
  }
2688
2688
  get transcript() {
2689
2689
  return this.transcription.transcript;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@telnyx/ai-agent-lib",
3
3
  "private": false,
4
- "version": "0.1.6",
4
+ "version": "0.1.7",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",