@fencyai/react 0.1.11 → 0.1.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,12 +1,13 @@
1
+ import { ChatCompletion } from '@fencyai/js';
1
2
  import { ChatCompletionChunk } from './useEventSource';
2
- interface ChatCompletion {
3
- chatCompletionId: string;
3
+ interface Completions {
4
+ chatCompletion: ChatCompletion;
4
5
  streamId: string;
5
6
  chunks: ChatCompletionChunk[];
6
7
  fullMessage: string;
7
8
  }
8
- export declare function useChatCompletion(): {
9
- chatCompletions: ChatCompletion[];
9
+ interface HookResponse {
10
+ chatCompletions: Completions[];
10
11
  createChatCompletion: (params: {
11
12
  prompt: string;
12
13
  model: 'gpt-4o-mini' | 'gpt-4o';
@@ -14,6 +15,8 @@ export declare function useChatCompletion(): {
14
15
  streamId: string;
15
16
  chatCompletionId: string;
16
17
  }>;
17
- };
18
+ latestCompletion: Completions | null;
19
+ }
20
+ export declare function useChatCompletion(): HookResponse;
18
21
  export {};
19
22
  //# sourceMappingURL=useChatCompletion.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"useChatCompletion.d.ts","sourceRoot":"","sources":["../../src/hooks/useChatCompletion.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,EAAkB,MAAM,kBAAkB,CAAA;AAGtE,UAAU,cAAc;IACpB,gBAAgB,EAAE,MAAM,CAAA;IACxB,QAAQ,EAAE,MAAM,CAAA;IAChB,MAAM,EAAE,mBAAmB,EAAE,CAAA;IAC7B,WAAW,EAAE,MAAM,CAAA;CACtB;AAED,wBAAgB,iBAAiB,IAAI;IACjC,eAAe,EAAE,cAAc,EAAE,CAAA;IACjC,oBAAoB,EAAE,CAAC,MAAM,EAAE;QAC3B,MAAM,EAAE,MAAM,CAAA;QACd,KAAK,EAAE,aAAa,GAAG,QAAQ,CAAA;KAClC,KAAK,OAAO,CAAC;QACV,QAAQ,EAAE,MAAM,CAAA;QAChB,gBAAgB,EAAE,MAAM,CAAA;KAC3B,CAAC,CAAA;CACL,CA0EA"}
1
+ {"version":3,"file":"useChatCompletion.d.ts","sourceRoot":"","sources":["../../src/hooks/useChatCompletion.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,cAAc,EAA8C,MAAM,aAAa,CAAA;AAExF,OAAO,EAAE,mBAAmB,EAAkB,MAAM,kBAAkB,CAAA;AAGtE,UAAU,WAAW;IACjB,cAAc,EAAE,cAAc,CAAA;IAC9B,QAAQ,EAAE,MAAM,CAAA;IAChB,MAAM,EAAE,mBAAmB,EAAE,CAAA;IAC7B,WAAW,EAAE,MAAM,CAAA;CACtB;AAED,UAAU,YAAY;IAClB,eAAe,EAAE,WAAW,EAAE,CAAA;IAC9B,oBAAoB,EAAE,CAAC,MAAM,EAAE;QAC3B,MAAM,EAAE,MAAM,CAAA;QACd,KAAK,EAAE,aAAa,GAAG,QAAQ,CAAA;KAClC,KAAK,OAAO,CAAC;QACV,QAAQ,EAAE,MAAM,CAAA;QAChB,gBAAgB,EAAE,MAAM,CAAA;KAC3B,CAAC,CAAA;IACF,gBAAgB,EAAE,WAAW,GAAG,IAAI,CAAA;CACvC;AAED,wBAAgB,iBAAiB,IAAI,YAAY,CA8EhD"}
@@ -6,13 +6,14 @@ import { useFency } from './useFency';
6
6
  export function useChatCompletion() {
7
7
  const fency = useFency();
8
8
  const { chunks, setUrl } = useEventSource();
9
+ const [chatCompletions, setChatCompletions] = useState([]);
9
10
  const [stream, setStream] = useState(null);
10
11
  const create = useCallback(async (params) => {
11
12
  // Step 1: Create stream if not exists
12
13
  const s = await createStream(fency.fency.publishableKey);
13
14
  setStream(s);
14
15
  // Step 2: Send chat completion
15
- await createChatCompletion(fency.fency.publishableKey, s.id, {
16
+ const chatCompletion = await createChatCompletion(fency.fency.publishableKey, s.id, {
16
17
  request: {
17
18
  openai: {
18
19
  model: params.model,
@@ -20,32 +21,33 @@ export function useChatCompletion() {
20
21
  },
21
22
  },
22
23
  });
24
+ setChatCompletions((prev) => [...prev, chatCompletion]);
23
25
  return {
24
26
  streamId: s.id,
25
27
  chatCompletionId: s.id,
26
28
  };
27
29
  }, [fency]);
28
- const chatCompletions = useMemo(() => {
29
- const chatCompletions = [];
30
- const uniqueChatCompletionIds = [
31
- ...new Set(chunks.map((chunk) => chunk.chatCompletionId)),
32
- ];
33
- for (const chatCompletionId of uniqueChatCompletionIds) {
30
+ const completions = useMemo(() => {
31
+ const completions = [];
32
+ for (const chatCompletion of chatCompletions) {
34
33
  const relevantChunks = chunks
35
- .filter((chunk) => chunk.chatCompletionId === chatCompletionId)
34
+ .filter((chunk) => chunk.chatCompletionId === chatCompletion.id)
36
35
  .sort((a, b) => a.timestamp.localeCompare(b.timestamp));
37
36
  const fullMessage = relevantChunks
38
37
  .map((chunk) => chunk.content)
39
38
  .join('');
40
- chatCompletions.push({
41
- chatCompletionId,
39
+ completions.push({
40
+ chatCompletion,
42
41
  streamId: relevantChunks[0].streamId,
43
42
  chunks: relevantChunks,
44
43
  fullMessage,
45
44
  });
46
45
  }
47
- return chatCompletions;
48
- }, [chunks]);
46
+ return completions;
47
+ }, [chunks, chatCompletions]);
48
+ const latestCompletion = useMemo(() => {
49
+ return completions[completions.length - 1];
50
+ }, [completions]);
49
51
  useEffect(() => {
50
52
  if (stream) {
51
53
  setUrl(`http://localhost:8080/v1/streams/${stream.id}?pk=${fency.fency.publishableKey}`);
@@ -53,6 +55,7 @@ export function useChatCompletion() {
53
55
  }, [stream, fency.fency.publishableKey, setUrl]);
54
56
  return {
55
57
  createChatCompletion: create,
56
- chatCompletions,
58
+ chatCompletions: completions,
59
+ latestCompletion,
57
60
  };
58
61
  }
package/dist/index.cjs CHANGED
@@ -1 +1 @@
1
- "use strict";var y=Object.defineProperty;var k=Object.getOwnPropertyDescriptor;var E=Object.getOwnPropertyNames;var T=Object.prototype.hasOwnProperty;var O=(e,t)=>{for(var n in t)y(e,n,{get:t[n],enumerable:!0})},$=(e,t,n,i)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of E(t))!T.call(e,o)&&o!==n&&y(e,o,{get:()=>t[o],enumerable:!(i=k(t,o))||i.enumerable});return e};var j=e=>$(y({},"__esModule",{value:!0}),e);var J={};O(J,{FencyProvider:()=>w,useChatCompletion:()=>x,useFency:()=>f});module.exports=j(J);var u=require("react"),g=require("react/jsx-runtime"),C=(0,u.createContext)(void 0);function w({fency:e,children:t}){let[n,i]=(0,u.useState)(null),[o,r]=(0,u.useState)(!0),[s,a]=(0,u.useState)(null);if((0,u.useEffect)(()=>{e.then(l=>{i(l),r(!1)}).catch(l=>{a(l),r(!1)})},[e]),!n)return null;let c={fency:n,loading:o,error:s};return(0,g.jsx)(C.Provider,{value:c,children:t})}var F=require("react");function f(){let e=(0,F.useContext)(C);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}var A=e=>typeof e=="object"&&e!==null&&"id"in e,K=e=>typeof e=="object"&&e!==null&&"id"in e;async function v(e,t,n={}){let i=n.apiUrl||"http://localhost:8080/v1/chat-completions",o={...n.request,streamId:t,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...n.request?.openai}},r=await fetch(i,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(o)});if(!r.ok)throw new Error(`Failed to create chat completion: ${r.status} ${r.statusText}`);let s=await r.json();if(!K(s))throw new Error("Invalid chat completion response");return s}async function b(e,t={}){let n=t.apiUrl||"http://localhost:8080/v1/streams",i=t.name||e,o=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:i})});if(!o.ok)throw new Error(`Failed to create stream: ${o.status} ${o.statusText}`);let r=await o.json();if(!A(r))throw new Error("Invalid stream response");return r}var p=require("react");var h=require("react");function I(){let[e,t]=(0,h.useState)([]),[n,i]=(0,h.useState)();return(0,h.useEffect)(()=>{if(!n)return;let o=new EventSource(n);return o.onmessage=r=>{let s=N(r);s&&t(a=>[...a,s])},o.onerror=r=>{console.error("EventSource error:",r)},()=>{o.close()}},[n]),{chunks:e,setUrl:i,url:n}}var M=e=>atob(e),N=e=>{try{let t=JSON.parse(M(e.data));return q(t)?t:null}catch(t){return console.error("Error parsing message:",t),null}},q=e=>typeof e=="object"&&e!==null&&"chatCompletionId"in e&&"streamId"in e&&"timestamp"in e&&"content"in e;function x(){let e=f(),{chunks:t,setUrl:n}=I(),[i,o]=(0,p.useState)(null),r=(0,p.useCallback)(async a=>{let c=await b(e.fency.publishableKey);return o(c),await v(e.fency.publishableKey,c.id,{request:{openai:{model:a.model,messages:[{role:"user",content:a.prompt}]}}}),{streamId:c.id,chatCompletionId:c.id}},[e]),s=(0,p.useMemo)(()=>{let a=[],c=[...new Set(t.map(l=>l.chatCompletionId))];for(let l of c){let d=t.filter(m=>m.chatCompletionId===l).sort((m,S)=>m.timestamp.localeCompare(S.timestamp)),P=d.map(m=>m.content).join("");a.push({chatCompletionId:l,streamId:d[0].streamId,chunks:d,fullMessage:P})}return a},[t]);return(0,p.useEffect)(()=>{i&&n(`http://localhost:8080/v1/streams/${i.id}?pk=${e.fency.publishableKey}`)},[i,e.fency.publishableKey,n]),{createChatCompletion:r,chatCompletions:s}}
1
+ "use strict";var g=Object.defineProperty;var T=Object.getOwnPropertyDescriptor;var O=Object.getOwnPropertyNames;var $=Object.prototype.hasOwnProperty;var j=(e,t)=>{for(var o in t)g(e,o,{get:t[o],enumerable:!0})},A=(e,t,o,i)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of O(t))!$.call(e,n)&&n!==o&&g(e,n,{get:()=>t[n],enumerable:!(i=T(t,n))||i.enumerable});return e};var K=e=>A(g({},"__esModule",{value:!0}),e);var R={};j(R,{FencyProvider:()=>v,useChatCompletion:()=>S,useFency:()=>C});module.exports=K(R);var c=require("react"),b=require("react/jsx-runtime"),F=(0,c.createContext)(void 0);function v({fency:e,children:t}){let[o,i]=(0,c.useState)(null),[n,r]=(0,c.useState)(!0),[a,u]=(0,c.useState)(null);if((0,c.useEffect)(()=>{e.then(m=>{i(m),r(!1)}).catch(m=>{u(m),r(!1)})},[e]),!o)return null;let p={fency:o,loading:n,error:a};return(0,b.jsx)(F.Provider,{value:p,children:t})}var I=require("react");function C(){let e=(0,I.useContext)(F);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}var M=e=>typeof e=="object"&&e!==null&&"id"in e,N=e=>typeof e=="object"&&e!==null&&"id"in e;async function x(e,t,o={}){let i=o.apiUrl||"http://localhost:8080/v1/chat-completions",n={...o.request,streamId:t,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...o.request?.openai}},r=await fetch(i,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(n)});if(!r.ok)throw new Error(`Failed to create chat completion: ${r.status} ${r.statusText}`);let a=await r.json();if(!N(a))throw new Error("Invalid chat completion response");return a}async function k(e,t={}){let o=t.apiUrl||"http://localhost:8080/v1/streams",i=t.name||e,n=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:i})});if(!n.ok)throw new Error(`Failed to create stream: ${n.status} ${n.statusText}`);let r=await n.json();if(!M(r))throw new Error("Invalid stream response");return r}var s=require("react");var y=require("react");function P(){let[e,t]=(0,y.useState)([]),[o,i]=(0,y.useState)();return(0,y.useEffect)(()=>{if(!o)return;let n=new EventSource(o);return n.onmessage=r=>{let a=J(r);a&&t(u=>[...u,a])},n.onerror=r=>{console.error("EventSource error:",r)},()=>{n.close()}},[o]),{chunks:e,setUrl:i,url:o}}var H=e=>atob(e),J=e=>{try{let t=JSON.parse(H(e.data));return q(t)?t:null}catch(t){return console.error("Error parsing message:",t),null}},q=e=>typeof e=="object"&&e!==null&&"chatCompletionId"in e&&"streamId"in e&&"timestamp"in e&&"content"in e;function S(){let e=C(),{chunks:t,setUrl:o}=P(),[i,n]=(0,s.useState)([]),[r,a]=(0,s.useState)(null),u=(0,s.useCallback)(async h=>{let l=await k(e.fency.publishableKey);a(l);let f=await x(e.fency.publishableKey,l.id,{request:{openai:{model:h.model,messages:[{role:"user",content:h.prompt}]}}});return n(w=>[...w,f]),{streamId:l.id,chatCompletionId:l.id}},[e]),p=(0,s.useMemo)(()=>{let h=[];for(let l of i){let f=t.filter(d=>d.chatCompletionId===l.id).sort((d,E)=>d.timestamp.localeCompare(E.timestamp)),w=f.map(d=>d.content).join("");h.push({chatCompletion:l,streamId:f[0].streamId,chunks:f,fullMessage:w})}return h},[t,i]),m=(0,s.useMemo)(()=>p[p.length-1],[p]);return(0,s.useEffect)(()=>{r&&o(`http://localhost:8080/v1/streams/${r.id}?pk=${e.fency.publishableKey}`)},[r,e.fency.publishableKey,o]),{createChatCompletion:u,chatCompletions:p,latestCompletion:m}}
package/dist/index.mjs CHANGED
@@ -1,2 +1,2 @@
1
- import{createContext as v,useEffect as b,useState as m}from"react";import{jsx as x}from"react/jsx-runtime";var h=v(void 0);function I({fency:e,children:t}){let[n,i]=m(null),[r,o]=m(!0),[s,a]=m(null);if(b(()=>{e.then(l=>{i(l),o(!1)}).catch(l=>{a(l),o(!1)})},[e]),!n)return null;let c={fency:n,loading:r,error:s};return x(h.Provider,{value:c,children:t})}import{useContext as P}from"react";function f(){let e=P(h);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}var S=e=>typeof e=="object"&&e!==null&&"id"in e,k=e=>typeof e=="object"&&e!==null&&"id"in e;async function d(e,t,n={}){let i=n.apiUrl||"http://localhost:8080/v1/chat-completions",r={...n.request,streamId:t,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...n.request?.openai}},o=await fetch(i,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(r)});if(!o.ok)throw new Error(`Failed to create chat completion: ${o.status} ${o.statusText}`);let s=await o.json();if(!k(s))throw new Error("Invalid chat completion response");return s}async function y(e,t={}){let n=t.apiUrl||"http://localhost:8080/v1/streams",i=t.name||e,r=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:i})});if(!r.ok)throw new Error(`Failed to create stream: ${r.status} ${r.statusText}`);let o=await r.json();if(!S(o))throw new Error("Invalid stream response");return o}import{useCallback as j,useEffect as A,useMemo as K,useState as M}from"react";import{useEffect as E,useState as C}from"react";function w(){let[e,t]=C([]),[n,i]=C();return E(()=>{if(!n)return;let r=new EventSource(n);return r.onmessage=o=>{let s=O(o);s&&t(a=>[...a,s])},r.onerror=o=>{console.error("EventSource error:",o)},()=>{r.close()}},[n]),{chunks:e,setUrl:i,url:n}}var T=e=>atob(e),O=e=>{try{let t=JSON.parse(T(e.data));return $(t)?t:null}catch(t){return console.error("Error parsing message:",t),null}},$=e=>typeof e=="object"&&e!==null&&"chatCompletionId"in e&&"streamId"in e&&"timestamp"in e&&"content"in e;function N(){let e=f(),{chunks:t,setUrl:n}=w(),[i,r]=M(null),o=j(async a=>{let c=await y(e.fency.publishableKey);return r(c),await d(e.fency.publishableKey,c.id,{request:{openai:{model:a.model,messages:[{role:"user",content:a.prompt}]}}}),{streamId:c.id,chatCompletionId:c.id}},[e]),s=K(()=>{let a=[],c=[...new Set(t.map(l=>l.chatCompletionId))];for(let l of c){let p=t.filter(u=>u.chatCompletionId===l).sort((u,F)=>u.timestamp.localeCompare(F.timestamp)),g=p.map(u=>u.content).join("");a.push({chatCompletionId:l,streamId:p[0].streamId,chunks:p,fullMessage:g})}return a},[t]);return A(()=>{i&&n(`http://localhost:8080/v1/streams/${i.id}?pk=${e.fency.publishableKey}`)},[i,e.fency.publishableKey,n]),{createChatCompletion:o,chatCompletions:s}}export{I as FencyProvider,N as useChatCompletion,f as useFency};
1
+ import{createContext as k,useEffect as P,useState as d}from"react";import{jsx as E}from"react/jsx-runtime";var y=k(void 0);function S({fency:e,children:o}){let[n,i]=d(null),[r,t]=d(!0),[s,l]=d(null);if(P(()=>{e.then(p=>{i(p),t(!1)}).catch(p=>{l(p),t(!1)})},[e]),!n)return null;let c={fency:n,loading:r,error:s};return E(y.Provider,{value:c,children:o})}import{useContext as T}from"react";function C(){let e=T(y);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}var O=e=>typeof e=="object"&&e!==null&&"id"in e,$=e=>typeof e=="object"&&e!==null&&"id"in e;async function w(e,o,n={}){let i=n.apiUrl||"http://localhost:8080/v1/chat-completions",r={...n.request,streamId:o,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...n.request?.openai}},t=await fetch(i,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(r)});if(!t.ok)throw new Error(`Failed to create chat completion: ${t.status} ${t.statusText}`);let s=await t.json();if(!$(s))throw new Error("Invalid chat completion response");return s}async function g(e,o={}){let n=o.apiUrl||"http://localhost:8080/v1/streams",i=o.name||e,r=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:i})});if(!r.ok)throw new Error(`Failed to create stream: ${r.status} ${r.statusText}`);let t=await r.json();if(!O(t))throw new Error("Invalid stream response");return t}import{useCallback as N,useEffect as H,useMemo as b,useState as I}from"react";import{useEffect as j,useState as F}from"react";function v(){let[e,o]=F([]),[n,i]=F();return j(()=>{if(!n)return;let r=new EventSource(n);return r.onmessage=t=>{let s=K(t);s&&o(l=>[...l,s])},r.onerror=t=>{console.error("EventSource error:",t)},()=>{r.close()}},[n]),{chunks:e,setUrl:i,url:n}}var A=e=>atob(e),K=e=>{try{let o=JSON.parse(A(e.data));return M(o)?o:null}catch(o){return console.error("Error parsing message:",o),null}},M=e=>typeof e=="object"&&e!==null&&"chatCompletionId"in e&&"streamId"in e&&"timestamp"in e&&"content"in e;function J(){let e=C(),{chunks:o,setUrl:n}=v(),[i,r]=I([]),[t,s]=I(null),l=N(async u=>{let a=await g(e.fency.publishableKey);s(a);let m=await w(e.fency.publishableKey,a.id,{request:{openai:{model:u.model,messages:[{role:"user",content:u.prompt}]}}});return r(f=>[...f,m]),{streamId:a.id,chatCompletionId:a.id}},[e]),c=b(()=>{let u=[];for(let a of i){let m=o.filter(h=>h.chatCompletionId===a.id).sort((h,x)=>h.timestamp.localeCompare(x.timestamp)),f=m.map(h=>h.content).join("");u.push({chatCompletion:a,streamId:m[0].streamId,chunks:m,fullMessage:f})}return u},[o,i]),p=b(()=>c[c.length-1],[c]);return H(()=>{t&&n(`http://localhost:8080/v1/streams/${t.id}?pk=${e.fency.publishableKey}`)},[t,e.fency.publishableKey,n]),{createChatCompletion:l,chatCompletions:c,latestCompletion:p}}export{S as FencyProvider,J as useChatCompletion,C as useFency};
2
2
  //# sourceMappingURL=index.mjs.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../src/FencyProvider.tsx", "../src/hooks/useFency.ts", "../../fency-js/dist/index.js", "../src/hooks/useChatCompletion.ts", "../src/hooks/useEventSource.ts"],
4
- "sourcesContent": ["import { FencyInstance } from '@fencyai/js';\nimport { createContext, useEffect, useState } from 'react';\n\n// Create the context\nexport const FencyContextValue = createContext<FencyContext | undefined>(undefined);\n\n/**\n * Provider component that provides Fency instance to child components\n * Expects a promise that resolves to a Fency instance\n */\nexport function FencyProvider({ fency, children }: FencyProviderProps) {\n const [fencyInstance, setFencyInstance] = useState<FencyInstance | null>(null);\n const [loading, setLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n\n useEffect(() => {\n fency\n .then((instance: FencyInstance) => {\n setFencyInstance(instance);\n setLoading(false);\n })\n .catch((err: Error) => {\n setError(err);\n setLoading(false);\n });\n }, [fency]);\n\n // Only render children and provide context when fency is loaded\n if (!fencyInstance) {\n return null;\n }\n\n const value: FencyContext = {\n fency: fencyInstance,\n loading,\n error,\n };\n\n return (\n <FencyContextValue.Provider value={value}>\n {children}\n </FencyContextValue.Provider>\n );\n} \n\nexport interface FencyOptions {\n publishableKey: string;\n endpoint?: string;\n}\n\n/**\n * Context for Fency instance\n */\nexport interface FencyContext {\n fency: FencyInstance;\n loading: boolean;\n error: Error | null;\n}\n\n/**\n * Props for FencyProvider\n */\nexport interface FencyProviderProps {\n fency: Promise<FencyInstance>;\n children: React.ReactNode;\n} ", "import { useContext } from 'react';\nimport { FencyContextValue } from '../FencyProvider';\nimport type { FencyContext } from '../FencyProvider';\n\n/**\n * Hook to access Fency instance and loading state\n */\nexport function useFency(): FencyContext {\n const context = useContext(FencyContextValue);\n if (context === undefined) {\n throw new Error('useFency must be used within a FencyProvider');\n }\n return context;\n} ", "function a(){if(typeof window>\"u\")return!1;let e=[\"fetch\",\"Promise\",\"JSON\"];for(let o of e)if(typeof window[o]>\"u\")return!1;if(typeof window.location<\"u\"){let o=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",t=window.location.protocol===\"https:\";!o&&!t&&console.warn(\"Fency: For security, we recommend using HTTPS in production.\")}return!0}function s(){let e={available:!0,missing:[],warnings:[]};if(typeof window>\"u\")return e.available=!1,e.missing.push(\"Browser environment\"),e;let o=[\"fetch\",\"Promise\",\"JSON\"];for(let t of o)typeof window[t]>\"u\"&&(e.available=!1,e.missing.push(`${t} API`));if(typeof window.location<\"u\"){let t=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",n=window.location.protocol===\"https:\";!t&&!n&&e.warnings.push(\"HTTPS is recommended for production use\")}return e}var f=e=>typeof e==\"object\"&&e!==null&&\"id\"in e,y=e=>typeof e==\"object\"&&e!==null&&\"id\"in e;async function p(e,o,t={}){let n=t.apiUrl||\"http://localhost:8080/v1/chat-completions\",i={...t.request,streamId:o,openai:{model:\"gpt-4o-mini\",messages:[{role:\"user\",content:\"Hello, how are you?\"}],...t.request?.openai}},r=await fetch(n,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify(i)});if(!r.ok)throw new Error(`Failed to create chat completion: ${r.status} ${r.statusText}`);let m=await r.json();if(!y(m))throw new Error(\"Invalid chat completion response\");return m}async function c(e,o={}){let t=o.apiUrl||\"http://localhost:8080/v1/streams\",n=o.name||e,i=await fetch(t,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify({name:n})});if(!i.ok)throw new Error(`Failed to create stream: ${i.status} ${i.statusText}`);let r=await i.json();if(!f(r))throw new Error(\"Invalid stream response\");return r}function l(e,o={}){return new Promise((t,n)=>{if(!e||typeof e!=\"string\"){n(new Error(\"Fency: A valid publishable key is required.\"));return}if(!e.startsWith(\"pk_\")){n(new Error('Fency: Invalid publishable key format. Keys should start with \"pk_\".'));return}let i={publishableKey:e,endpoint:o.endpoint||\"https://api.fency.ai\"};setTimeout(()=>{t(i)},0)})}var P={loadFency:l,isFencyAvailable:a,getFencyAvailabilityInfo:s,createStream:c,createChatCompletion:p};export{p as createChatCompletion,c as createStream,P as default,s as getFencyAvailabilityInfo,a as isFencyAvailable,l as loadFency};\n", "// hooks/useChatCompletion.ts\nimport { createChatCompletion, createStream, Stream } from '@fencyai/js'\nimport { useCallback, useEffect, useMemo, useState } from 'react'\nimport { ChatCompletionChunk, useEventSource } from './useEventSource'\nimport { useFency } from './useFency'\n\ninterface ChatCompletion {\n chatCompletionId: string\n streamId: string\n chunks: ChatCompletionChunk[]\n fullMessage: string\n}\n\nexport function useChatCompletion(): {\n chatCompletions: ChatCompletion[]\n createChatCompletion: (params: {\n prompt: string\n model: 'gpt-4o-mini' | 'gpt-4o'\n }) => Promise<{\n streamId: string\n chatCompletionId: string\n }>\n} {\n const fency = useFency()\n const { chunks, setUrl } = useEventSource()\n const [stream, setStream] = useState<Stream | null>(null)\n\n const create = useCallback(\n async (params: {\n prompt: string\n model: 'gpt-4o-mini' | 'gpt-4o'\n }): Promise<{\n streamId: string\n chatCompletionId: string\n }> => {\n // Step 1: Create stream if not exists\n const s = await createStream(fency.fency.publishableKey)\n setStream(s)\n\n // Step 2: Send chat completion\n await createChatCompletion(fency.fency.publishableKey, s.id, {\n request: {\n openai: {\n model: params.model,\n messages: [{ role: 'user', content: params.prompt }],\n },\n },\n })\n\n return {\n streamId: s.id,\n chatCompletionId: s.id,\n }\n },\n [fency]\n )\n\n const chatCompletions = useMemo(() => {\n const chatCompletions: ChatCompletion[] = []\n\n const uniqueChatCompletionIds = [\n ...new Set(chunks.map((chunk) => chunk.chatCompletionId)),\n ]\n\n for (const chatCompletionId of uniqueChatCompletionIds) {\n const relevantChunks = chunks\n .filter((chunk) => chunk.chatCompletionId === chatCompletionId)\n .sort((a, b) => a.timestamp.localeCompare(b.timestamp))\n\n const fullMessage = relevantChunks\n .map((chunk) => chunk.content)\n .join('')\n\n chatCompletions.push({\n chatCompletionId,\n streamId: relevantChunks[0].streamId,\n chunks: relevantChunks,\n fullMessage,\n })\n }\n\n return chatCompletions\n }, [chunks])\n\n useEffect(() => {\n if (stream) {\n setUrl(\n `http://localhost:8080/v1/streams/${stream.id}?pk=${fency.fency.publishableKey}`\n )\n }\n }, [stream, fency.fency.publishableKey, setUrl])\n\n return {\n createChatCompletion: create,\n chatCompletions,\n }\n}\n", "import { useEffect, useState } from 'react'\n\ntype Message = {\n data: string\n event?: string\n lastEventId?: string\n}\n\nexport function useEventSource() {\n const [chunks, setChunks] = useState<ChatCompletionChunk[]>([])\n const [url, setUrl] = useState<string | null>()\n\n \n\n useEffect(() => {\n if (!url) return\n\n const eventSource = new EventSource(url)\n\n eventSource.onmessage = (event: MessageEvent) => {\n const chunk = getChatCompletionChunk(event)\n if (chunk) {\n setChunks((prev) => [...prev, chunk])\n }\n }\n\n eventSource.onerror = (error) => {\n console.error('EventSource error:', error)\n }\n\n return () => {\n eventSource.close()\n }\n }, [url])\n\n return {\n chunks,\n setUrl,\n url,\n }\n}\n\nconst base64Decode = (str: string) => {\n return atob(str)\n}\n\nconst getChatCompletionChunk = (\n message: Message\n): ChatCompletionChunk | null => {\n try {\n const json = JSON.parse(base64Decode(message.data))\n if (isChatCompletionChunk(json)) {\n return json\n }\n return null\n } catch (error) {\n console.error('Error parsing message:', error)\n return null\n }\n}\n\nconst isChatCompletionChunk = (data: unknown): boolean => {\n return (\n typeof data === 'object' &&\n data !== null &&\n 'chatCompletionId' in data &&\n 'streamId' in data &&\n 'timestamp' in data &&\n 'content' in data\n )\n}\n\nexport interface ChatCompletionChunk {\n chatCompletionId: string\n streamId: string\n timestamp: string\n content: string\n}\n"],
5
- "mappings": "AACA,OAAS,iBAAAA,EAAe,aAAAC,EAAW,YAAAC,MAAgB,QAsC/C,cAAAC,MAAA,oBAnCG,IAAMC,EAAoBJ,EAAwC,MAAS,EAM3E,SAASK,EAAc,CAAE,MAAAC,EAAO,SAAAC,CAAS,EAAuB,CACrE,GAAM,CAACC,EAAeC,CAAgB,EAAIP,EAA+B,IAAI,EACvE,CAACQ,EAASC,CAAU,EAAIT,EAAS,EAAI,EACrC,CAACU,EAAOC,CAAQ,EAAIX,EAAuB,IAAI,EAerD,GAbAD,EAAU,IAAM,CACdK,EACG,KAAMQ,GAA4B,CACjCL,EAAiBK,CAAQ,EACzBH,EAAW,EAAK,CAClB,CAAC,EACA,MAAOI,GAAe,CACrBF,EAASE,CAAG,EACZJ,EAAW,EAAK,CAClB,CAAC,CACL,EAAG,CAACL,CAAK,CAAC,EAGN,CAACE,EACH,OAAO,KAGT,IAAMQ,EAAsB,CAC1B,MAAOR,EACP,QAAAE,EACA,MAAAE,CACF,EAEA,OACET,EAACC,EAAkB,SAAlB,CAA2B,MAAOY,EAChC,SAAAT,EACH,CAEJ,CC3CA,OAAS,cAAAU,MAAkB,QAOpB,SAASC,GAAyB,CACvC,IAAMC,EAAUC,EAAWC,CAAiB,EAC5C,GAAIF,IAAY,OACd,MAAM,IAAI,MAAM,8CAA8C,EAEhE,OAAOA,CACT,CCbw1B,IAAIG,EAAE,GAAG,OAAO,GAAG,UAAU,IAAI,MAAM,OAAO,EAAEC,EAAE,GAAG,OAAO,GAAG,UAAU,IAAI,MAAM,OAAO,EAAE,eAAeC,EAAE,EAAEC,EAAEC,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAED,EAAE,QAAQ,4CAA4CE,EAAE,CAAC,GAAGF,EAAE,QAAQ,SAASD,EAAE,OAAO,CAAC,MAAM,cAAc,SAAS,CAAC,CAAC,KAAK,OAAO,QAAQ,qBAAqB,CAAC,EAAE,GAAGC,EAAE,SAAS,MAAM,CAAC,EAAEG,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAUC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,qCAAqCA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACN,EAAEO,CAAC,EAAE,MAAM,IAAI,MAAM,kCAAkC,EAAE,OAAOA,CAAC,CAAC,eAAeC,EAAE,EAAEN,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAED,EAAE,QAAQ,mCAAmCE,EAAEF,EAAE,MAAM,EAAEG,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAU,CAAC,KAAKC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,4BAA4BA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACN,EAAEO,CAAC,EAAE,MAAM,IAAI,MAAM,yBAAyB,EAAE,OAAOA,CAAC,CCEt0D,OAAS,eAAAG,EAAa,aAAAC,EAAW,WAAAC,EAAS,YAAAC,MAAgB,QCF1D,OAAS,aAAAC,EAAW,YAAAC,MAAgB,QAQ7B,SAASC,GAAiB,CAC7B,GAAM,CAACC,EAAQC,CAAS,EAAIH,EAAgC,CAAC,CAAC,EACxD,CAACI,EAAKC,CAAM,EAAIL,EAAwB,EAI9C,OAAAD,EAAU,IAAM,CACZ,GAAI,CAACK,EAAK,OAEV,IAAME,EAAc,IAAI,YAAYF,CAAG,EAEvC,OAAAE,EAAY,UAAaC,GAAwB,CAC7C,IAAMC,EAAQC,EAAuBF,CAAK,EACtCC,GACAL,EAAWO,GAAS,CAAC,GAAGA,EAAMF,CAAK,CAAC,CAE5C,EAEAF,EAAY,QAAWK,GAAU,CAC7B,QAAQ,MAAM,qBAAsBA,CAAK,CAC7C,EAEO,IAAM,CACTL,EAAY,MAAM,CACtB,CACJ,EAAG,CAACF,CAAG,CAAC,EAED,CACH,OAAAF,EACA,OAAAG,EACA,IAAAD,CACJ,CACJ,CAEA,IAAMQ,EAAgBC,GACX,KAAKA,CAAG,EAGbJ,EACFK,GAC6B,CAC7B,GAAI,CACA,IAAMC,EAAO,KAAK,MAAMH,EAAaE,EAAQ,IAAI,CAAC,EAClD,OAAIE,EAAsBD,CAAI,EACnBA,EAEJ,IACX,OAASJ,EAAO,CACZ,eAAQ,MAAM,yBAA0BA,CAAK,EACtC,IACX,CACJ,EAEMK,EAAyBC,GAEvB,OAAOA,GAAS,UAChBA,IAAS,MACT,qBAAsBA,GACtB,aAAcA,GACd,cAAeA,GACf,YAAaA,EDvDd,SAASC,GASd,CACE,IAAMC,EAAQC,EAAS,EACjB,CAAE,OAAAC,EAAQ,OAAAC,CAAO,EAAIC,EAAe,EACpC,CAACC,EAAQC,CAAS,EAAIC,EAAwB,IAAI,EAElDC,EAASC,EACX,MAAOC,GAMD,CAEF,IAAMC,EAAI,MAAMC,EAAaZ,EAAM,MAAM,cAAc,EACvD,OAAAM,EAAUK,CAAC,EAGX,MAAME,EAAqBb,EAAM,MAAM,eAAgBW,EAAE,GAAI,CACzD,QAAS,CACL,OAAQ,CACJ,MAAOD,EAAO,MACd,SAAU,CAAC,CAAE,KAAM,OAAQ,QAASA,EAAO,MAAO,CAAC,CACvD,CACJ,CACJ,CAAC,EAEM,CACH,SAAUC,EAAE,GACZ,iBAAkBA,EAAE,EACxB,CACJ,EACA,CAACX,CAAK,CACV,EAEMc,EAAkBC,EAAQ,IAAM,CAClC,IAAMD,EAAoC,CAAC,EAErCE,EAA0B,CAC5B,GAAG,IAAI,IAAId,EAAO,IAAKe,GAAUA,EAAM,gBAAgB,CAAC,CAC5D,EAEA,QAAWC,KAAoBF,EAAyB,CACpD,IAAMG,EAAiBjB,EAClB,OAAQe,GAAUA,EAAM,mBAAqBC,CAAgB,EAC7D,KAAK,CAACE,EAAGC,IAAMD,EAAE,UAAU,cAAcC,EAAE,SAAS,CAAC,EAEpDC,EAAcH,EACf,IAAKF,GAAUA,EAAM,OAAO,EAC5B,KAAK,EAAE,EAEZH,EAAgB,KAAK,CACjB,iBAAAI,EACA,SAAUC,EAAe,CAAC,EAAE,SAC5B,OAAQA,EACR,YAAAG,CACJ,CAAC,CACL,CAEA,OAAOR,CACX,EAAG,CAACZ,CAAM,CAAC,EAEX,OAAAqB,EAAU,IAAM,CACRlB,GACAF,EACI,oCAAoCE,EAAO,EAAE,OAAOL,EAAM,MAAM,cAAc,EAClF,CAER,EAAG,CAACK,EAAQL,EAAM,MAAM,eAAgBG,CAAM,CAAC,EAExC,CACH,qBAAsBK,EACtB,gBAAAM,CACJ,CACJ",
6
- "names": ["createContext", "useEffect", "useState", "jsx", "FencyContextValue", "FencyProvider", "fency", "children", "fencyInstance", "setFencyInstance", "loading", "setLoading", "error", "setError", "instance", "err", "value", "useContext", "useFency", "context", "useContext", "FencyContextValue", "f", "y", "p", "o", "t", "n", "i", "r", "m", "c", "useCallback", "useEffect", "useMemo", "useState", "useEffect", "useState", "useEventSource", "chunks", "setChunks", "url", "setUrl", "eventSource", "event", "chunk", "getChatCompletionChunk", "prev", "error", "base64Decode", "str", "message", "json", "isChatCompletionChunk", "data", "useChatCompletion", "fency", "useFency", "chunks", "setUrl", "useEventSource", "stream", "setStream", "useState", "create", "useCallback", "params", "s", "c", "p", "chatCompletions", "useMemo", "uniqueChatCompletionIds", "chunk", "chatCompletionId", "relevantChunks", "a", "b", "fullMessage", "useEffect"]
3
+ "sources": ["../src/FencyProvider.tsx", "../src/hooks/useFency.ts", "../node_modules/@fencyai/js/dist/index.js", "../src/hooks/useChatCompletion.ts", "../src/hooks/useEventSource.ts"],
4
+ "sourcesContent": ["import { FencyInstance } from '@fencyai/js';\nimport { createContext, useEffect, useState } from 'react';\n\n// Create the context\nexport const FencyContextValue = createContext<FencyContext | undefined>(undefined);\n\n/**\n * Provider component that provides Fency instance to child components\n * Expects a promise that resolves to a Fency instance\n */\nexport function FencyProvider({ fency, children }: FencyProviderProps) {\n const [fencyInstance, setFencyInstance] = useState<FencyInstance | null>(null);\n const [loading, setLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n\n useEffect(() => {\n fency\n .then((instance: FencyInstance) => {\n setFencyInstance(instance);\n setLoading(false);\n })\n .catch((err: Error) => {\n setError(err);\n setLoading(false);\n });\n }, [fency]);\n\n // Only render children and provide context when fency is loaded\n if (!fencyInstance) {\n return null;\n }\n\n const value: FencyContext = {\n fency: fencyInstance,\n loading,\n error,\n };\n\n return (\n <FencyContextValue.Provider value={value}>\n {children}\n </FencyContextValue.Provider>\n );\n} \n\nexport interface FencyOptions {\n publishableKey: string;\n endpoint?: string;\n}\n\n/**\n * Context for Fency instance\n */\nexport interface FencyContext {\n fency: FencyInstance;\n loading: boolean;\n error: Error | null;\n}\n\n/**\n * Props for FencyProvider\n */\nexport interface FencyProviderProps {\n fency: Promise<FencyInstance>;\n children: React.ReactNode;\n} ", "import { useContext } from 'react';\nimport { FencyContextValue } from '../FencyProvider';\nimport type { FencyContext } from '../FencyProvider';\n\n/**\n * Hook to access Fency instance and loading state\n */\nexport function useFency(): FencyContext {\n const context = useContext(FencyContextValue);\n if (context === undefined) {\n throw new Error('useFency must be used within a FencyProvider');\n }\n return context;\n} ", "function a(){if(typeof window>\"u\")return!1;let e=[\"fetch\",\"Promise\",\"JSON\"];for(let o of e)if(typeof window[o]>\"u\")return!1;if(typeof window.location<\"u\"){let o=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",t=window.location.protocol===\"https:\";!o&&!t&&console.warn(\"Fency: For security, we recommend using HTTPS in production.\")}return!0}function s(){let e={available:!0,missing:[],warnings:[]};if(typeof window>\"u\")return e.available=!1,e.missing.push(\"Browser environment\"),e;let o=[\"fetch\",\"Promise\",\"JSON\"];for(let t of o)typeof window[t]>\"u\"&&(e.available=!1,e.missing.push(`${t} API`));if(typeof window.location<\"u\"){let t=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",n=window.location.protocol===\"https:\";!t&&!n&&e.warnings.push(\"HTTPS is recommended for production use\")}return e}var f=e=>typeof e==\"object\"&&e!==null&&\"id\"in e,y=e=>typeof e==\"object\"&&e!==null&&\"id\"in e;async function p(e,o,t={}){let n=t.apiUrl||\"http://localhost:8080/v1/chat-completions\",i={...t.request,streamId:o,openai:{model:\"gpt-4o-mini\",messages:[{role:\"user\",content:\"Hello, how are you?\"}],...t.request?.openai}},r=await fetch(n,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify(i)});if(!r.ok)throw new Error(`Failed to create chat completion: ${r.status} ${r.statusText}`);let m=await r.json();if(!y(m))throw new Error(\"Invalid chat completion response\");return m}async function c(e,o={}){let t=o.apiUrl||\"http://localhost:8080/v1/streams\",n=o.name||e,i=await fetch(t,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify({name:n})});if(!i.ok)throw new Error(`Failed to create stream: ${i.status} ${i.statusText}`);let r=await i.json();if(!f(r))throw new Error(\"Invalid stream response\");return r}function l(e,o={}){return new Promise((t,n)=>{if(!e||typeof e!=\"string\"){n(new Error(\"Fency: A valid publishable key is required.\"));return}if(!e.startsWith(\"pk_\")){n(new Error('Fency: Invalid publishable key format. Keys should start with \"pk_\".'));return}let i={publishableKey:e,endpoint:o.endpoint||\"https://api.fency.ai\"};setTimeout(()=>{t(i)},0)})}var P={loadFency:l,isFencyAvailable:a,getFencyAvailabilityInfo:s,createStream:c,createChatCompletion:p};export{p as createChatCompletion,c as createStream,P as default,s as getFencyAvailabilityInfo,a as isFencyAvailable,l as loadFency};\n", "// hooks/useChatCompletion.ts\nimport { ChatCompletion, createChatCompletion, createStream, Stream } from '@fencyai/js'\nimport { useCallback, useEffect, useMemo, useState } from 'react'\nimport { ChatCompletionChunk, useEventSource } from './useEventSource'\nimport { useFency } from './useFency'\n\ninterface Completions {\n chatCompletion: ChatCompletion\n streamId: string\n chunks: ChatCompletionChunk[]\n fullMessage: string\n}\n\ninterface HookResponse {\n chatCompletions: Completions[]\n createChatCompletion: (params: {\n prompt: string\n model: 'gpt-4o-mini' | 'gpt-4o'\n }) => Promise<{\n streamId: string\n chatCompletionId: string\n }>\n latestCompletion: Completions | null\n}\n\nexport function useChatCompletion(): HookResponse {\n const fency = useFency()\n const { chunks, setUrl } = useEventSource()\n const [chatCompletions, setChatCompletions] = useState<ChatCompletion[]>([])\n const [stream, setStream] = useState<Stream | null>(null)\n\n const create = useCallback(\n async (params: {\n prompt: string\n model: 'gpt-4o-mini' | 'gpt-4o'\n }): Promise<{\n streamId: string\n chatCompletionId: string\n }> => {\n // Step 1: Create stream if not exists\n const s = await createStream(fency.fency.publishableKey)\n setStream(s)\n\n // Step 2: Send chat completion\n const chatCompletion = await createChatCompletion(fency.fency.publishableKey, s.id, {\n request: {\n openai: {\n model: params.model,\n messages: [{ role: 'user', content: params.prompt }],\n },\n },\n })\n\n setChatCompletions((prev) => [...prev, chatCompletion])\n\n return {\n streamId: s.id,\n chatCompletionId: s.id,\n }\n },\n [fency]\n )\n\n const completions = useMemo(() => {\n const completions: Completions[] = []\n\n for (const chatCompletion of chatCompletions) {\n const relevantChunks = chunks\n .filter((chunk) => chunk.chatCompletionId === chatCompletion.id)\n .sort((a, b) => a.timestamp.localeCompare(b.timestamp))\n\n const fullMessage = relevantChunks\n .map((chunk) => chunk.content)\n .join('')\n\n completions.push({\n chatCompletion,\n streamId: relevantChunks[0].streamId,\n chunks: relevantChunks,\n fullMessage,\n })\n }\n\n return completions\n }, [chunks, chatCompletions])\n\n const latestCompletion = useMemo(() => {\n return completions[completions.length - 1]\n }, [completions])\n\n useEffect(() => {\n if (stream) {\n setUrl(\n `http://localhost:8080/v1/streams/${stream.id}?pk=${fency.fency.publishableKey}`\n )\n }\n }, [stream, fency.fency.publishableKey, setUrl])\n\n return {\n createChatCompletion: create,\n chatCompletions: completions,\n latestCompletion,\n }\n}\n", "import { useEffect, useState } from 'react'\n\ntype Message = {\n data: string\n event?: string\n lastEventId?: string\n}\n\nexport function useEventSource() {\n const [chunks, setChunks] = useState<ChatCompletionChunk[]>([])\n const [url, setUrl] = useState<string | null>()\n\n \n\n useEffect(() => {\n if (!url) return\n\n const eventSource = new EventSource(url)\n\n eventSource.onmessage = (event: MessageEvent) => {\n const chunk = getChatCompletionChunk(event)\n if (chunk) {\n setChunks((prev) => [...prev, chunk])\n }\n }\n\n eventSource.onerror = (error) => {\n console.error('EventSource error:', error)\n }\n\n return () => {\n eventSource.close()\n }\n }, [url])\n\n return {\n chunks,\n setUrl,\n url,\n }\n}\n\nconst base64Decode = (str: string) => {\n return atob(str)\n}\n\nconst getChatCompletionChunk = (\n message: Message\n): ChatCompletionChunk | null => {\n try {\n const json = JSON.parse(base64Decode(message.data))\n if (isChatCompletionChunk(json)) {\n return json\n }\n return null\n } catch (error) {\n console.error('Error parsing message:', error)\n return null\n }\n}\n\nconst isChatCompletionChunk = (data: unknown): boolean => {\n return (\n typeof data === 'object' &&\n data !== null &&\n 'chatCompletionId' in data &&\n 'streamId' in data &&\n 'timestamp' in data &&\n 'content' in data\n )\n}\n\nexport interface ChatCompletionChunk {\n chatCompletionId: string\n streamId: string\n timestamp: string\n content: string\n}\n"],
5
+ "mappings": "AACA,OAAS,iBAAAA,EAAe,aAAAC,EAAW,YAAAC,MAAgB,QAsC/C,cAAAC,MAAA,oBAnCG,IAAMC,EAAoBJ,EAAwC,MAAS,EAM3E,SAASK,EAAc,CAAE,MAAAC,EAAO,SAAAC,CAAS,EAAuB,CACrE,GAAM,CAACC,EAAeC,CAAgB,EAAIP,EAA+B,IAAI,EACvE,CAACQ,EAASC,CAAU,EAAIT,EAAS,EAAI,EACrC,CAACU,EAAOC,CAAQ,EAAIX,EAAuB,IAAI,EAerD,GAbAD,EAAU,IAAM,CACdK,EACG,KAAMQ,GAA4B,CACjCL,EAAiBK,CAAQ,EACzBH,EAAW,EAAK,CAClB,CAAC,EACA,MAAOI,GAAe,CACrBF,EAASE,CAAG,EACZJ,EAAW,EAAK,CAClB,CAAC,CACL,EAAG,CAACL,CAAK,CAAC,EAGN,CAACE,EACH,OAAO,KAGT,IAAMQ,EAAsB,CAC1B,MAAOR,EACP,QAAAE,EACA,MAAAE,CACF,EAEA,OACET,EAACC,EAAkB,SAAlB,CAA2B,MAAOY,EAChC,SAAAT,EACH,CAEJ,CC3CA,OAAS,cAAAU,MAAkB,QAOpB,SAASC,GAAyB,CACvC,IAAMC,EAAUC,EAAWC,CAAiB,EAC5C,GAAIF,IAAY,OACd,MAAM,IAAI,MAAM,8CAA8C,EAEhE,OAAOA,CACT,CCbw1B,IAAIG,EAAE,GAAG,OAAO,GAAG,UAAU,IAAI,MAAM,OAAO,EAAEC,EAAE,GAAG,OAAO,GAAG,UAAU,IAAI,MAAM,OAAO,EAAE,eAAeC,EAAE,EAAE,EAAEC,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAED,EAAE,QAAQ,4CAA4CE,EAAE,CAAC,GAAGF,EAAE,QAAQ,SAAS,EAAE,OAAO,CAAC,MAAM,cAAc,SAAS,CAAC,CAAC,KAAK,OAAO,QAAQ,qBAAqB,CAAC,EAAE,GAAGA,EAAE,SAAS,MAAM,CAAC,EAAEG,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAUC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,qCAAqCA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACL,EAAEM,CAAC,EAAE,MAAM,IAAI,MAAM,kCAAkC,EAAE,OAAOA,CAAC,CAAC,eAAeC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAIL,EAAE,EAAE,QAAQ,mCAAmCC,EAAE,EAAE,MAAM,EAAEC,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAU,CAAC,KAAKC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,4BAA4BA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACL,EAAEM,CAAC,EAAE,MAAM,IAAI,MAAM,yBAAyB,EAAE,OAAOA,CAAC,CCEt0D,OAAS,eAAAG,EAAa,aAAAC,EAAW,WAAAC,EAAS,YAAAC,MAAgB,QCF1D,OAAS,aAAAC,EAAW,YAAAC,MAAgB,QAQ7B,SAASC,GAAiB,CAC7B,GAAM,CAACC,EAAQC,CAAS,EAAIH,EAAgC,CAAC,CAAC,EACxD,CAACI,EAAKC,CAAM,EAAIL,EAAwB,EAI9C,OAAAD,EAAU,IAAM,CACZ,GAAI,CAACK,EAAK,OAEV,IAAME,EAAc,IAAI,YAAYF,CAAG,EAEvC,OAAAE,EAAY,UAAaC,GAAwB,CAC7C,IAAMC,EAAQC,EAAuBF,CAAK,EACtCC,GACAL,EAAWO,GAAS,CAAC,GAAGA,EAAMF,CAAK,CAAC,CAE5C,EAEAF,EAAY,QAAWK,GAAU,CAC7B,QAAQ,MAAM,qBAAsBA,CAAK,CAC7C,EAEO,IAAM,CACTL,EAAY,MAAM,CACtB,CACJ,EAAG,CAACF,CAAG,CAAC,EAED,CACH,OAAAF,EACA,OAAAG,EACA,IAAAD,CACJ,CACJ,CAEA,IAAMQ,EAAgBC,GACX,KAAKA,CAAG,EAGbJ,EACFK,GAC6B,CAC7B,GAAI,CACA,IAAMC,EAAO,KAAK,MAAMH,EAAaE,EAAQ,IAAI,CAAC,EAClD,OAAIE,EAAsBD,CAAI,EACnBA,EAEJ,IACX,OAASJ,EAAO,CACZ,eAAQ,MAAM,yBAA0BA,CAAK,EACtC,IACX,CACJ,EAEMK,EAAyBC,GAEvB,OAAOA,GAAS,UAChBA,IAAS,MACT,qBAAsBA,GACtB,aAAcA,GACd,cAAeA,GACf,YAAaA,ED3Cd,SAASC,GAAkC,CAC9C,IAAMC,EAAQC,EAAS,EACjB,CAAE,OAAAC,EAAQ,OAAAC,CAAO,EAAIC,EAAe,EACpC,CAACC,EAAiBC,CAAkB,EAAIC,EAA2B,CAAC,CAAC,EACrE,CAACC,EAAQC,CAAS,EAAIF,EAAwB,IAAI,EAElDG,EAASC,EACX,MAAOC,GAMD,CAEF,IAAMC,EAAI,MAAMC,EAAad,EAAM,MAAM,cAAc,EACvDS,EAAUI,CAAC,EAGX,IAAME,EAAiB,MAAMC,EAAqBhB,EAAM,MAAM,eAAgBa,EAAE,GAAI,CAChF,QAAS,CACL,OAAQ,CACJ,MAAOD,EAAO,MACd,SAAU,CAAC,CAAE,KAAM,OAAQ,QAASA,EAAO,MAAO,CAAC,CACvD,CACJ,CACJ,CAAC,EAED,OAAAN,EAAoBW,GAAS,CAAC,GAAGA,EAAMF,CAAc,CAAC,EAE/C,CACH,SAAUF,EAAE,GACZ,iBAAkBA,EAAE,EACxB,CACJ,EACA,CAACb,CAAK,CACV,EAEMkB,EAAcC,EAAQ,IAAM,CAC9B,IAAMD,EAA6B,CAAC,EAEpC,QAAWH,KAAkBV,EAAiB,CAC1C,IAAMe,EAAiBlB,EAClB,OAAQmB,GAAUA,EAAM,mBAAqBN,EAAe,EAAE,EAC9D,KAAK,CAACO,EAAGC,IAAMD,EAAE,UAAU,cAAcC,EAAE,SAAS,CAAC,EAEpDC,EAAcJ,EACf,IAAKC,GAAUA,EAAM,OAAO,EAC5B,KAAK,EAAE,EAEZH,EAAY,KAAK,CACb,eAAAH,EACA,SAAUK,EAAe,CAAC,EAAE,SAC5B,OAAQA,EACR,YAAAI,CACJ,CAAC,CACL,CAEA,OAAON,CACX,EAAG,CAAChB,EAAQG,CAAe,CAAC,EAEtBoB,EAAmBN,EAAQ,IACtBD,EAAYA,EAAY,OAAS,CAAC,EAC1C,CAACA,CAAW,CAAC,EAEhB,OAAAQ,EAAU,IAAM,CACRlB,GACAL,EACI,oCAAoCK,EAAO,EAAE,OAAOR,EAAM,MAAM,cAAc,EAClF,CAER,EAAG,CAACQ,EAAQR,EAAM,MAAM,eAAgBG,CAAM,CAAC,EAExC,CACH,qBAAsBO,EACtB,gBAAiBQ,EACjB,iBAAAO,CACJ,CACJ",
6
+ "names": ["createContext", "useEffect", "useState", "jsx", "FencyContextValue", "FencyProvider", "fency", "children", "fencyInstance", "setFencyInstance", "loading", "setLoading", "error", "setError", "instance", "err", "value", "useContext", "useFency", "context", "useContext", "FencyContextValue", "f", "y", "p", "t", "n", "i", "r", "m", "c", "useCallback", "useEffect", "useMemo", "useState", "useEffect", "useState", "useEventSource", "chunks", "setChunks", "url", "setUrl", "eventSource", "event", "chunk", "getChatCompletionChunk", "prev", "error", "base64Decode", "str", "message", "json", "isChatCompletionChunk", "data", "useChatCompletion", "fency", "useFency", "chunks", "setUrl", "useEventSource", "chatCompletions", "setChatCompletions", "useState", "stream", "setStream", "create", "useCallback", "params", "s", "c", "chatCompletion", "p", "prev", "completions", "useMemo", "relevantChunks", "chunk", "a", "b", "fullMessage", "latestCompletion", "useEffect"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fencyai/react",
3
- "version": "0.1.11",
3
+ "version": "0.1.12",
4
4
  "description": "React components for Fency integration",
5
5
  "type": "module",
6
6
  "main": "./dist/index.cjs",
@@ -46,7 +46,7 @@
46
46
  "license": "MIT",
47
47
  "peerDependencies": {
48
48
  "react": "^18.0.0",
49
- "@fencyai/js": "^0.1.5"
49
+ "@fencyai/js": "^0.1.9"
50
50
  },
51
51
  "devDependencies": {
52
52
  "@types/node": "^20.10.0",