@fencyai/react 0.1.9 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,27 @@
1
- import type { FencyContext, FencyProviderProps } from './types';
1
+ import { FencyInstance } from '@fencyai/js';
2
2
  export declare const FencyContextValue: import("react").Context<FencyContext | undefined>;
3
3
  /**
4
4
  * Provider component that provides Fency instance to child components
5
5
  * Expects a promise that resolves to a Fency instance
6
6
  */
7
7
  export declare function FencyProvider({ fency, children }: FencyProviderProps): import("react/jsx-runtime").JSX.Element | null;
8
+ export interface FencyOptions {
9
+ publishableKey: string;
10
+ endpoint?: string;
11
+ }
12
+ /**
13
+ * Context for Fency instance
14
+ */
15
+ export interface FencyContext {
16
+ fency: FencyInstance;
17
+ loading: boolean;
18
+ error: Error | null;
19
+ }
20
+ /**
21
+ * Props for FencyProvider
22
+ */
23
+ export interface FencyProviderProps {
24
+ fency: Promise<FencyInstance>;
25
+ children: React.ReactNode;
26
+ }
8
27
  //# sourceMappingURL=FencyProvider.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"FencyProvider.d.ts","sourceRoot":"","sources":["../src/FencyProvider.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,YAAY,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAGhE,eAAO,MAAM,iBAAiB,mDAAqD,CAAC;AAEpF;;;GAGG;AACH,wBAAgB,aAAa,CAAC,EAAE,KAAK,EAAE,QAAQ,EAAE,EAAE,kBAAkB,kDAiCpE"}
1
+ {"version":3,"file":"FencyProvider.d.ts","sourceRoot":"","sources":["../src/FencyProvider.tsx"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAI5C,eAAO,MAAM,iBAAiB,mDAAqD,CAAC;AAEpF;;;GAGG;AACH,wBAAgB,aAAa,CAAC,EAAE,KAAK,EAAE,QAAQ,EAAE,EAAE,kBAAkB,kDAiCpE;AAED,MAAM,WAAW,YAAY;IAC3B,cAAc,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,KAAK,EAAE,aAAa,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;CACrB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,KAAK,EAAE,OAAO,CAAC,aAAa,CAAC,CAAC;IAC9B,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;CAC3B"}
@@ -0,0 +1,19 @@
1
+ import { ChatCompletionChunk } from './useEventSource';
2
+ interface ChatCompletion {
3
+ chatCompletionId: string;
4
+ streamId: string;
5
+ chunks: ChatCompletionChunk[];
6
+ fullMessage: string;
7
+ }
8
+ export declare function useChatCompletion(): {
9
+ chatCompletions: ChatCompletion[];
10
+ createChatCompletion: (params: {
11
+ prompt: string;
12
+ model: 'gpt-4o-mini' | 'gpt-4o';
13
+ }) => Promise<{
14
+ streamId: string;
15
+ chatCompletionId: string;
16
+ }>;
17
+ };
18
+ export {};
19
+ //# sourceMappingURL=useChatCompletion.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useChatCompletion.d.ts","sourceRoot":"","sources":["../../src/hooks/useChatCompletion.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,EAAkB,MAAM,kBAAkB,CAAA;AAGtE,UAAU,cAAc;IACpB,gBAAgB,EAAE,MAAM,CAAA;IACxB,QAAQ,EAAE,MAAM,CAAA;IAChB,MAAM,EAAE,mBAAmB,EAAE,CAAA;IAC7B,WAAW,EAAE,MAAM,CAAA;CACtB;AAED,wBAAgB,iBAAiB,IAAI;IACjC,eAAe,EAAE,cAAc,EAAE,CAAA;IACjC,oBAAoB,EAAE,CAAC,MAAM,EAAE;QAC3B,MAAM,EAAE,MAAM,CAAA;QACd,KAAK,EAAE,aAAa,GAAG,QAAQ,CAAA;KAClC,KAAK,OAAO,CAAC;QACV,QAAQ,EAAE,MAAM,CAAA;QAChB,gBAAgB,EAAE,MAAM,CAAA;KAC3B,CAAC,CAAA;CACL,CA0EA"}
@@ -0,0 +1,58 @@
1
+ // hooks/useChatCompletion.ts
2
+ import { createChatCompletion, createStream } from '@fencyai/js';
3
+ import { useCallback, useEffect, useMemo, useState } from 'react';
4
+ import { useEventSource } from './useEventSource';
5
+ import { useFency } from './useFency';
6
+ export function useChatCompletion() {
7
+ const fency = useFency();
8
+ const { chunks, setUrl } = useEventSource();
9
+ const [stream, setStream] = useState(null);
10
+ const create = useCallback(async (params) => {
11
+ // Step 1: Create stream if not exists
12
+ const s = await createStream(fency.fency.publishableKey);
13
+ setStream(s);
14
+ // Step 2: Send chat completion
15
+ await createChatCompletion(fency.fency.publishableKey, s.id, {
16
+ request: {
17
+ openai: {
18
+ model: params.model,
19
+ messages: [{ role: 'user', content: params.prompt }],
20
+ },
21
+ },
22
+ });
23
+ return {
24
+ streamId: s.id,
25
+ chatCompletionId: s.id,
26
+ };
27
+ }, [fency]);
28
+ const chatCompletions = useMemo(() => {
29
+ const chatCompletions = [];
30
+ const uniqueChatCompletionIds = [
31
+ ...new Set(chunks.map((chunk) => chunk.chatCompletionId)),
32
+ ];
33
+ for (const chatCompletionId of uniqueChatCompletionIds) {
34
+ const relevantChunks = chunks
35
+ .filter((chunk) => chunk.chatCompletionId === chatCompletionId)
36
+ .sort((a, b) => a.timestamp.localeCompare(b.timestamp));
37
+ const fullMessage = relevantChunks
38
+ .map((chunk) => chunk.content)
39
+ .join('');
40
+ chatCompletions.push({
41
+ chatCompletionId,
42
+ streamId: relevantChunks[0].streamId,
43
+ chunks: relevantChunks,
44
+ fullMessage,
45
+ });
46
+ }
47
+ return chatCompletions;
48
+ }, [chunks]);
49
+ useEffect(() => {
50
+ if (stream) {
51
+ setUrl(`http://localhost:8080/v1/streams/${stream.id}?pk=${fency.fency.publishableKey}`);
52
+ }
53
+ }, [stream, fency.fency.publishableKey, setUrl]);
54
+ return {
55
+ createChatCompletion: create,
56
+ chatCompletions,
57
+ };
58
+ }
@@ -0,0 +1,12 @@
1
+ export declare function useEventSource(): {
2
+ chunks: ChatCompletionChunk[];
3
+ setUrl: import("react").Dispatch<import("react").SetStateAction<string | null | undefined>>;
4
+ url: string | null | undefined;
5
+ };
6
+ export interface ChatCompletionChunk {
7
+ chatCompletionId: string;
8
+ streamId: string;
9
+ timestamp: string;
10
+ content: string;
11
+ }
12
+ //# sourceMappingURL=useEventSource.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useEventSource.d.ts","sourceRoot":"","sources":["../../src/hooks/useEventSource.ts"],"names":[],"mappings":"AAQA,wBAAgB,cAAc;;;;EAgC7B;AAgCD,MAAM,WAAW,mBAAmB;IAChC,gBAAgB,EAAE,MAAM,CAAA;IACxB,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,MAAM,CAAA;CAClB"}
@@ -0,0 +1,51 @@
1
+ import { useEffect, useState } from 'react';
2
+ export function useEventSource() {
3
+ const [chunks, setChunks] = useState([]);
4
+ const [url, setUrl] = useState();
5
+ useEffect(() => {
6
+ if (!url)
7
+ return;
8
+ const eventSource = new EventSource(url);
9
+ eventSource.onmessage = (event) => {
10
+ const chunk = getChatCompletionChunk(event);
11
+ if (chunk) {
12
+ setChunks((prev) => [...prev, chunk]);
13
+ }
14
+ };
15
+ eventSource.onerror = (error) => {
16
+ console.error('EventSource error:', error);
17
+ };
18
+ return () => {
19
+ eventSource.close();
20
+ };
21
+ }, [url]);
22
+ return {
23
+ chunks,
24
+ setUrl,
25
+ url,
26
+ };
27
+ }
28
+ const base64Decode = (str) => {
29
+ return atob(str);
30
+ };
31
+ const getChatCompletionChunk = (message) => {
32
+ try {
33
+ const json = JSON.parse(base64Decode(message.data));
34
+ if (isChatCompletionChunk(json)) {
35
+ return json;
36
+ }
37
+ return null;
38
+ }
39
+ catch (error) {
40
+ console.error('Error parsing message:', error);
41
+ return null;
42
+ }
43
+ };
44
+ const isChatCompletionChunk = (data) => {
45
+ return (typeof data === 'object' &&
46
+ data !== null &&
47
+ 'chatCompletionId' in data &&
48
+ 'streamId' in data &&
49
+ 'timestamp' in data &&
50
+ 'content' in data);
51
+ };
@@ -1,4 +1,4 @@
1
- import type { FencyContext } from './types';
1
+ import type { FencyContext } from '../FencyProvider';
2
2
  /**
3
3
  * Hook to access Fency instance and loading state
4
4
  */
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useFency.d.ts","sourceRoot":"","sources":["../../src/hooks/useFency.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAC;AAErD;;GAEG;AACH,wBAAgB,QAAQ,IAAI,YAAY,CAMvC"}
@@ -1,5 +1,5 @@
1
1
  import { useContext } from 'react';
2
- import { FencyContextValue } from './FencyProvider';
2
+ import { FencyContextValue } from '../FencyProvider';
3
3
  /**
4
4
  * Hook to access Fency instance and loading state
5
5
  */
package/dist/index.cjs CHANGED
@@ -1 +1 @@
1
- "use strict";var v=Object.defineProperty;var k=Object.getOwnPropertyDescriptor;var A=Object.getOwnPropertyNames;var T=Object.prototype.hasOwnProperty;var K=(e,t)=>{for(var n in t)v(e,n,{get:t[n],enumerable:!0})},$=(e,t,n,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of A(t))!T.call(e,o)&&o!==n&&v(e,o,{get:()=>t[o],enumerable:!(r=k(t,o))||r.enumerable});return e};var O=e=>$(v({},"__esModule",{value:!0}),e);var J={};K(J,{FencyProvider:()=>F,useChatCompletion:()=>x,useFency:()=>w});module.exports=O(J);var d=require("react"),I=require("react/jsx-runtime"),b=(0,d.createContext)(void 0);function F({fency:e,children:t}){let[n,r]=(0,d.useState)(null),[o,a]=(0,d.useState)(!0),[u,p]=(0,d.useState)(null);if((0,d.useEffect)(()=>{e.then(l=>{r(l),a(!1)}).catch(l=>{p(l),a(!1)})},[e]),!n)return null;let g={fency:n,loading:o,error:u};return(0,I.jsx)(b.Provider,{value:g,children:t})}var E=require("react");function w(){let e=(0,E.useContext)(b);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}function j(e){return e&&typeof e=="object"&&typeof e.id=="string"&&typeof e.name=="string"}function H(e){return e&&typeof e=="object"&&typeof e.id=="string"&&typeof e.streamId=="string"&&Array.isArray(e.choices)}async function S(e,t,n={}){let r=n.apiUrl||"http://localhost:8080/v1/chat-completions",o={...n.request,streamId:t,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...n.request?.openai}},a=await fetch(r,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(o)});if(!a.ok)throw new Error(`Failed to create chat completion: ${a.status} ${a.statusText}`);let u=await a.json();if(!H(u))throw new Error("Invalid chat completion response");return u}async function C(e,t={}){let n=t.apiUrl||"http://localhost:8080/v1/streams",r=t.name||e,o=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:r})});if(!o.ok)throw new Error(`Failed to create stream: ${o.status} ${o.statusText}`);let a=await o.json();if(!j(a))throw new Error("Invalid stream response");return a}var f=require("react");var c=require("react");function M(e){let t=(0,c.useRef)(null),[n,r]=(0,c.useState)(!1),[o,a]=(0,c.useState)(null),[u,p]=(0,c.useState)([]),g=(0,c.useCallback)(h=>{if(t.current)return;let y=new EventSource(`http://localhost:8080/v1/streams/${h.streamId}?pk=${h.publishableKey}`,e.options);t.current=y,y.onopen=()=>{r(!0)},y.onmessage=i=>{console.log("onmessage",i);let m={data:q(i.data),event:i.type,lastEventId:i.lastEventId};a(m),p(s=>[...s,m]),e.onMessage&&e.onMessage({message:m,streamId:h.streamId})},y.onerror=()=>{r(!1)}},[e.options,e.onMessage]),l=(0,c.useCallback)(()=>{t.current&&(t.current.close(),t.current=null,r(!1))},[]);return(0,c.useEffect)(()=>()=>{l()},[l]),{connect:g,disconnect:l,isConnected:n,latestMessage:o,allMessages:u}}var q=e=>atob(e);function x(){let e=w(),[t,n]=(0,f.useState)({}),[r,o]=(0,f.useState)(!1),[a,u]=(0,f.useState)(null),[p,g]=(0,f.useState)(null),l=M({onMessage:i=>{console.log("Event source message",i);let m=t[i.streamId];if(m)n(s=>({...s,[i.streamId]:{...m,content:m.content+i.message.data}}));else{let s={streamId:i.streamId,role:"assistant",content:i.message.data};n(P=>({...P,[i.streamId]:s}))}}}),h=(0,f.useCallback)(async(i,m)=>{try{o(!0),u(null);let s=p;s||(s=await C(e.fency.publishableKey),console.log("Stream created",s),g(s)),l.connect({streamId:s.id,publishableKey:e.fency.publishableKey}),await S(e.fency.publishableKey,s.id,{request:{openai:{model:m.model,messages:[{role:"user",content:i}]}}})}catch(s){u(s instanceof Error?s:new Error("Unknown error"))}finally{o(!1)}},[e,p,l]),y=(0,f.useCallback)(()=>{n({})},[]);return{sendPrompt:h,clearMessages:y,messages:t,isLoading:r,error:a,stream:p}}
1
+ "use strict";var y=Object.defineProperty;var k=Object.getOwnPropertyDescriptor;var E=Object.getOwnPropertyNames;var T=Object.prototype.hasOwnProperty;var O=(e,t)=>{for(var n in t)y(e,n,{get:t[n],enumerable:!0})},$=(e,t,n,i)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of E(t))!T.call(e,o)&&o!==n&&y(e,o,{get:()=>t[o],enumerable:!(i=k(t,o))||i.enumerable});return e};var j=e=>$(y({},"__esModule",{value:!0}),e);var J={};O(J,{FencyProvider:()=>w,useChatCompletion:()=>x,useFency:()=>f});module.exports=j(J);var u=require("react"),g=require("react/jsx-runtime"),C=(0,u.createContext)(void 0);function w({fency:e,children:t}){let[n,i]=(0,u.useState)(null),[o,r]=(0,u.useState)(!0),[s,a]=(0,u.useState)(null);if((0,u.useEffect)(()=>{e.then(l=>{i(l),r(!1)}).catch(l=>{a(l),r(!1)})},[e]),!n)return null;let c={fency:n,loading:o,error:s};return(0,g.jsx)(C.Provider,{value:c,children:t})}var F=require("react");function f(){let e=(0,F.useContext)(C);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}var A=e=>typeof e=="object"&&e!==null&&"id"in e,K=e=>typeof e=="object"&&e!==null&&"id"in e;async function v(e,t,n={}){let i=n.apiUrl||"http://localhost:8080/v1/chat-completions",o={...n.request,streamId:t,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...n.request?.openai}},r=await fetch(i,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(o)});if(!r.ok)throw new Error(`Failed to create chat completion: ${r.status} ${r.statusText}`);let s=await r.json();if(!K(s))throw new Error("Invalid chat completion response");return s}async function b(e,t={}){let n=t.apiUrl||"http://localhost:8080/v1/streams",i=t.name||e,o=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:i})});if(!o.ok)throw new Error(`Failed to create stream: ${o.status} ${o.statusText}`);let r=await o.json();if(!A(r))throw new Error("Invalid stream response");return r}var p=require("react");var h=require("react");function I(){let[e,t]=(0,h.useState)([]),[n,i]=(0,h.useState)();return(0,h.useEffect)(()=>{if(!n)return;let o=new EventSource(n);return o.onmessage=r=>{let s=N(r);s&&t(a=>[...a,s])},o.onerror=r=>{console.error("EventSource error:",r)},()=>{o.close()}},[n]),{chunks:e,setUrl:i,url:n}}var M=e=>atob(e),N=e=>{try{let t=JSON.parse(M(e.data));return q(t)?t:null}catch(t){return console.error("Error parsing message:",t),null}},q=e=>typeof e=="object"&&e!==null&&"chatCompletionId"in e&&"streamId"in e&&"timestamp"in e&&"content"in e;function x(){let e=f(),{chunks:t,setUrl:n}=I(),[i,o]=(0,p.useState)(null),r=(0,p.useCallback)(async a=>{let c=await b(e.fency.publishableKey);return o(c),await v(e.fency.publishableKey,c.id,{request:{openai:{model:a.model,messages:[{role:"user",content:a.prompt}]}}}),{streamId:c.id,chatCompletionId:c.id}},[e]),s=(0,p.useMemo)(()=>{let a=[],c=[...new Set(t.map(l=>l.chatCompletionId))];for(let l of c){let d=t.filter(m=>m.chatCompletionId===l).sort((m,S)=>m.timestamp.localeCompare(S.timestamp)),P=d.map(m=>m.content).join("");a.push({chatCompletionId:l,streamId:d[0].streamId,chunks:d,fullMessage:P})}return a},[t]);return(0,p.useEffect)(()=>{i&&n(`http://localhost:8080/v1/streams/${i.id}?pk=${e.fency.publishableKey}`)},[i,e.fency.publishableKey,n]),{createChatCompletion:r,chatCompletions:s}}
package/dist/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  export { FencyProvider } from './FencyProvider';
2
- export { useFency } from './useFency';
3
- export { useChatCompletion } from './useChatCompletion';
4
- export type { FencyContext, FencyOptions, FencyProviderProps } from './types';
2
+ export { useFency } from './hooks/useFency';
3
+ export { useChatCompletion } from './hooks/useChatCompletion';
4
+ export type { FencyContext, FencyOptions, FencyProviderProps } from './FencyProvider';
5
5
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAGxD,YAAY,EAAE,YAAY,EAAE,YAAY,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,iBAAiB,EAAE,MAAM,2BAA2B,CAAC;AAG9D,YAAY,EAAE,YAAY,EAAE,YAAY,EAAE,kBAAkB,EAAE,MAAM,iBAAiB,CAAC"}
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
1
  // Re-export components and hooks
2
2
  export { FencyProvider } from './FencyProvider';
3
- export { useFency } from './useFency';
4
- export { useChatCompletion } from './useChatCompletion';
3
+ export { useFency } from './hooks/useFency';
4
+ export { useChatCompletion } from './hooks/useChatCompletion';
package/dist/index.mjs CHANGED
@@ -1,2 +1,2 @@
1
- import{createContext as M,useEffect as x,useState as g}from"react";import{jsx as k}from"react/jsx-runtime";var h=M(void 0);function P({fency:e,children:t}){let[o,i]=g(null),[a,s]=g(!0),[l,m]=g(null);if(x(()=>{e.then(c=>{i(c),s(!1)}).catch(c=>{m(c),s(!1)})},[e]),!o)return null;let f={fency:o,loading:a,error:l};return k(h.Provider,{value:f,children:t})}import{useContext as A}from"react";function w(){let e=A(h);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}function T(e){return e&&typeof e=="object"&&typeof e.id=="string"&&typeof e.name=="string"}function K(e){return e&&typeof e=="object"&&typeof e.id=="string"&&typeof e.streamId=="string"&&Array.isArray(e.choices)}async function b(e,t,o={}){let i=o.apiUrl||"http://localhost:8080/v1/chat-completions",a={...o.request,streamId:t,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...o.request?.openai}},s=await fetch(i,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(a)});if(!s.ok)throw new Error(`Failed to create chat completion: ${s.status} ${s.statusText}`);let l=await s.json();if(!K(l))throw new Error("Invalid chat completion response");return l}async function F(e,t={}){let o=t.apiUrl||"http://localhost:8080/v1/streams",i=t.name||e,a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:i})});if(!a.ok)throw new Error(`Failed to create stream: ${a.status} ${a.statusText}`);let s=await a.json();if(!T(s))throw new Error("Invalid stream response");return s}import{useCallback as S,useState as y}from"react";import{useCallback as I,useEffect as $,useRef as O,useState as v}from"react";function E(e){let t=O(null),[o,i]=v(!1),[a,s]=v(null),[l,m]=v([]),f=I(p=>{if(t.current)return;let d=new EventSource(`http://localhost:8080/v1/streams/${p.streamId}?pk=${p.publishableKey}`,e.options);t.current=d,d.onopen=()=>{i(!0)},d.onmessage=r=>{console.log("onmessage",r);let u={data:j(r.data),event:r.type,lastEventId:r.lastEventId};s(u),m(n=>[...n,u]),e.onMessage&&e.onMessage({message:u,streamId:p.streamId})},d.onerror=()=>{i(!1)}},[e.options,e.onMessage]),c=I(()=>{t.current&&(t.current.close(),t.current=null,i(!1))},[]);return $(()=>()=>{c()},[c]),{connect:f,disconnect:c,isConnected:o,latestMessage:a,allMessages:l}}var j=e=>atob(e);function H(){let e=w(),[t,o]=y({}),[i,a]=y(!1),[s,l]=y(null),[m,f]=y(null),c=E({onMessage:r=>{console.log("Event source message",r);let u=t[r.streamId];if(u)o(n=>({...n,[r.streamId]:{...u,content:u.content+r.message.data}}));else{let n={streamId:r.streamId,role:"assistant",content:r.message.data};o(C=>({...C,[r.streamId]:n}))}}}),p=S(async(r,u)=>{try{a(!0),l(null);let n=m;n||(n=await F(e.fency.publishableKey),console.log("Stream created",n),f(n)),c.connect({streamId:n.id,publishableKey:e.fency.publishableKey}),await b(e.fency.publishableKey,n.id,{request:{openai:{model:u.model,messages:[{role:"user",content:r}]}}})}catch(n){l(n instanceof Error?n:new Error("Unknown error"))}finally{a(!1)}},[e,m,c]),d=S(()=>{o({})},[]);return{sendPrompt:p,clearMessages:d,messages:t,isLoading:i,error:s,stream:m}}export{P as FencyProvider,H as useChatCompletion,w as useFency};
1
+ import{createContext as v,useEffect as b,useState as m}from"react";import{jsx as x}from"react/jsx-runtime";var h=v(void 0);function I({fency:e,children:t}){let[n,i]=m(null),[r,o]=m(!0),[s,a]=m(null);if(b(()=>{e.then(l=>{i(l),o(!1)}).catch(l=>{a(l),o(!1)})},[e]),!n)return null;let c={fency:n,loading:r,error:s};return x(h.Provider,{value:c,children:t})}import{useContext as P}from"react";function f(){let e=P(h);if(e===void 0)throw new Error("useFency must be used within a FencyProvider");return e}var S=e=>typeof e=="object"&&e!==null&&"id"in e,k=e=>typeof e=="object"&&e!==null&&"id"in e;async function d(e,t,n={}){let i=n.apiUrl||"http://localhost:8080/v1/chat-completions",r={...n.request,streamId:t,openai:{model:"gpt-4o-mini",messages:[{role:"user",content:"Hello, how are you?"}],...n.request?.openai}},o=await fetch(i,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify(r)});if(!o.ok)throw new Error(`Failed to create chat completion: ${o.status} ${o.statusText}`);let s=await o.json();if(!k(s))throw new Error("Invalid chat completion response");return s}async function y(e,t={}){let n=t.apiUrl||"http://localhost:8080/v1/streams",i=t.name||e,r=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${e}`},body:JSON.stringify({name:i})});if(!r.ok)throw new Error(`Failed to create stream: ${r.status} ${r.statusText}`);let o=await r.json();if(!S(o))throw new Error("Invalid stream response");return o}import{useCallback as j,useEffect as A,useMemo as K,useState as M}from"react";import{useEffect as E,useState as C}from"react";function w(){let[e,t]=C([]),[n,i]=C();return E(()=>{if(!n)return;let r=new EventSource(n);return r.onmessage=o=>{let s=O(o);s&&t(a=>[...a,s])},r.onerror=o=>{console.error("EventSource error:",o)},()=>{r.close()}},[n]),{chunks:e,setUrl:i,url:n}}var T=e=>atob(e),O=e=>{try{let t=JSON.parse(T(e.data));return $(t)?t:null}catch(t){return console.error("Error parsing message:",t),null}},$=e=>typeof e=="object"&&e!==null&&"chatCompletionId"in e&&"streamId"in e&&"timestamp"in e&&"content"in e;function N(){let e=f(),{chunks:t,setUrl:n}=w(),[i,r]=M(null),o=j(async a=>{let c=await y(e.fency.publishableKey);return r(c),await d(e.fency.publishableKey,c.id,{request:{openai:{model:a.model,messages:[{role:"user",content:a.prompt}]}}}),{streamId:c.id,chatCompletionId:c.id}},[e]),s=K(()=>{let a=[],c=[...new Set(t.map(l=>l.chatCompletionId))];for(let l of c){let p=t.filter(u=>u.chatCompletionId===l).sort((u,F)=>u.timestamp.localeCompare(F.timestamp)),g=p.map(u=>u.content).join("");a.push({chatCompletionId:l,streamId:p[0].streamId,chunks:p,fullMessage:g})}return a},[t]);return A(()=>{i&&n(`http://localhost:8080/v1/streams/${i.id}?pk=${e.fency.publishableKey}`)},[i,e.fency.publishableKey,n]),{createChatCompletion:o,chatCompletions:s}}export{I as FencyProvider,N as useChatCompletion,f as useFency};
2
2
  //# sourceMappingURL=index.mjs.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../src/FencyProvider.tsx", "../src/useFency.ts", "../../fency-js/dist/index.js", "../src/useChatCompletion.ts", "../src/useEventSource.ts"],
4
- "sourcesContent": ["import { FencyInstance } from '@fencyai/js';\nimport { createContext, useEffect, useState } from 'react';\nimport type { FencyContext, FencyProviderProps } from './types';\n\n// Create the context\nexport const FencyContextValue = createContext<FencyContext | undefined>(undefined);\n\n/**\n * Provider component that provides Fency instance to child components\n * Expects a promise that resolves to a Fency instance\n */\nexport function FencyProvider({ fency, children }: FencyProviderProps) {\n const [fencyInstance, setFencyInstance] = useState<FencyInstance | null>(null);\n const [loading, setLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n\n useEffect(() => {\n fency\n .then((instance: FencyInstance) => {\n setFencyInstance(instance);\n setLoading(false);\n })\n .catch((err: Error) => {\n setError(err);\n setLoading(false);\n });\n }, [fency]);\n\n // Only render children and provide context when fency is loaded\n if (!fencyInstance) {\n return null;\n }\n\n const value: FencyContext = {\n fency: fencyInstance,\n loading,\n error,\n };\n\n return (\n <FencyContextValue.Provider value={value}>\n {children}\n </FencyContextValue.Provider>\n );\n} ", "import { useContext } from 'react';\nimport type { FencyContext } from './types';\nimport { FencyContextValue } from './FencyProvider';\n\n/**\n * Hook to access Fency instance and loading state\n */\nexport function useFency(): FencyContext {\n const context = useContext(FencyContextValue);\n if (context === undefined) {\n throw new Error('useFency must be used within a FencyProvider');\n }\n return context;\n} ", "function a(){if(typeof window>\"u\")return!1;let e=[\"fetch\",\"Promise\",\"JSON\"];for(let o of e)if(typeof window[o]>\"u\")return!1;if(typeof window.location<\"u\"){let o=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",t=window.location.protocol===\"https:\";!o&&!t&&console.warn(\"Fency: For security, we recommend using HTTPS in production.\")}return!0}function s(){let e={available:!0,missing:[],warnings:[]};if(typeof window>\"u\")return e.available=!1,e.missing.push(\"Browser environment\"),e;let o=[\"fetch\",\"Promise\",\"JSON\"];for(let t of o)typeof window[t]>\"u\"&&(e.available=!1,e.missing.push(`${t} API`));if(typeof window.location<\"u\"){let t=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",n=window.location.protocol===\"https:\";!t&&!n&&e.warnings.push(\"HTTPS is recommended for production use\")}return e}function f(e){return e&&typeof e==\"object\"&&typeof e.id==\"string\"&&typeof e.name==\"string\"}function y(e){return e&&typeof e==\"object\"&&typeof e.id==\"string\"&&typeof e.streamId==\"string\"&&Array.isArray(e.choices)}async function c(e,o,t={}){let n=t.apiUrl||\"http://localhost:8080/v1/chat-completions\",r={...t.request,streamId:o,openai:{model:\"gpt-4o-mini\",messages:[{role:\"user\",content:\"Hello, how are you?\"}],...t.request?.openai}},i=await fetch(n,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify(r)});if(!i.ok)throw new Error(`Failed to create chat completion: ${i.status} ${i.statusText}`);let m=await i.json();if(!y(m))throw new Error(\"Invalid chat completion response\");return m}async function p(e,o={}){let t=o.apiUrl||\"http://localhost:8080/v1/streams\",n=o.name||e,r=await fetch(t,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify({name:n})});if(!r.ok)throw new Error(`Failed to create stream: ${r.status} ${r.statusText}`);let i=await r.json();if(!f(i))throw new Error(\"Invalid stream response\");return i}function l(e,o={}){return new Promise((t,n)=>{if(!e||typeof e!=\"string\"){n(new Error(\"Fency: A valid publishable key is required.\"));return}if(!e.startsWith(\"pk_\")){n(new Error('Fency: Invalid publishable key format. Keys should start with \"pk_\".'));return}let r={publishableKey:e,endpoint:o.endpoint||\"https://api.fency.ai\"};setTimeout(()=>{t(r)},0)})}var P={loadFency:l,isFencyAvailable:a,getFencyAvailabilityInfo:s,createStream:p,createChatCompletion:c};export{c as createChatCompletion,p as createStream,P as default,s as getFencyAvailabilityInfo,a as isFencyAvailable,l as loadFency};\n", "// hooks/useChatCompletion.ts\nimport { createChatCompletion, createStream } from '@fencyai/js'\nimport { useFency } from './useFency'\nimport { useCallback, useState } from 'react'\nimport { Stream } from '../../fency-js/dist/types'\nimport { useEventSource } from './useEventSource'\n\ninterface StreamMessage {\n streamId: string\n role: string\n content: string\n}\n\nexport function useChatCompletion() {\n const fency = useFency()\n const [messages, setMessages] = useState<Record<string, StreamMessage>>({})\n const [isLoading, setIsLoading] = useState(false)\n const [error, setError] = useState<Error | null>(null)\n const [stream, setStream] = useState<Stream | null>(null)\n\n // Create event source URL when stream is available\n const eventSource = useEventSource({\n onMessage: (params) => {\n console.log('Event source message', params)\n const existingMessage = messages[params.streamId]\n if (existingMessage) {\n setMessages(prev => ({ ...prev, [params.streamId]: {\n ...existingMessage,\n content: existingMessage.content + params.message.data\n }}))\n } else {\n const streamMessage: StreamMessage = {\n streamId: params.streamId,\n role: 'assistant',\n content: params.message.data\n }\n setMessages(prev => ({ ...prev, [params.streamId]: streamMessage }))\n }\n }\n })\n\n\n const sendPrompt = useCallback(async (prompt: string, promptOptions: {\n model: \"gpt-4o-mini\"\n temperature: number\n topP: number\n }) => {\n \n try {\n setIsLoading(true)\n setError(null)\n\n // Step 1: Create stream if not exists\n let s = stream\n if (!s) {\n s = await createStream(fency.fency.publishableKey)\n console.log('Stream created', s)\n setStream(s)\n }\n\n eventSource.connect({\n streamId: s.id,\n publishableKey: fency.fency.publishableKey\n })\n\n\n // Step 3: Send chat completion\n await createChatCompletion(fency.fency.publishableKey, s.id, {\n request: {\n openai: { \n model: promptOptions.model,\n messages: [{ role: 'user', content: prompt }],\n }\n }\n })\n\n } catch (error) {\n setError(error instanceof Error ? error : new Error('Unknown error'))\n } finally {\n setIsLoading(false)\n }\n }, [fency, stream, eventSource])\n\n const clearMessages = useCallback(() => {\n setMessages({})\n }, [])\n\n return {\n sendPrompt,\n clearMessages,\n messages,\n isLoading,\n error,\n stream,\n } \n}", "import { useCallback, useEffect, useRef, useState } from 'react'\n\ntype Message = {\n data: string\n event?: string\n lastEventId?: string\n}\n\ntype EventSourceHook = {\n connect: (params:{streamId: string, publishableKey: string }) => void\n disconnect: () => void\n isConnected: boolean\n latestMessage: Message | null\n allMessages: Message[]\n}\n\nexport function useEventSource(\n props: {\n options?: EventSourceInit,\n onMessage?: (params: {\n message: Message\n streamId: string\n }) => void,\n onError?: (error: Event) => void,\n }\n): EventSourceHook {\n const eventSourceRef = useRef<EventSource | null>(null)\n const [isConnected, setIsConnected] = useState(false)\n const [latestMessage, setLatestMessage] = useState<Message | null>(null)\n const [allMessages, setAllMessages] = useState<Message[]>([])\n\n const connect = useCallback((params: {streamId: string, publishableKey: string }) => {\n if (eventSourceRef.current) return\n\n const es = new EventSource(`http://localhost:8080/v1/streams/${params.streamId}?pk=${params.publishableKey}`, props.options)\n eventSourceRef.current = es\n\n es.onopen = () => {\n setIsConnected(true)\n }\n\n es.onmessage = (event: MessageEvent) => {\n console.log('onmessage', event)\n const message: Message = {\n data: base64Decode(event.data),\n event: event.type,\n lastEventId: event.lastEventId,\n }\n setLatestMessage(message)\n setAllMessages((prev) => [...prev, message])\n\n if (props.onMessage) {\n props.onMessage({\n message,\n streamId: params.streamId\n })\n }\n }\n\n es.onerror = () => {\n setIsConnected(false)\n // You could auto-reconnect here if needed\n }\n }, [props.options, props.onMessage])\n\n const disconnect = useCallback(() => {\n if (eventSourceRef.current) {\n eventSourceRef.current.close()\n eventSourceRef.current = null\n setIsConnected(false)\n }\n }, [])\n\n useEffect(() => {\n return () => {\n disconnect()\n }\n }, [disconnect])\n\n return {\n connect,\n disconnect,\n isConnected,\n latestMessage,\n allMessages,\n }\n}\n\nconst base64Decode = (str: string) => {\n return atob(str)\n}"],
5
- "mappings": "AACA,OAAS,iBAAAA,EAAe,aAAAC,EAAW,YAAAC,MAAgB,QAuC/C,cAAAC,MAAA,oBAnCG,IAAMC,EAAoBJ,EAAwC,MAAS,EAM3E,SAASK,EAAc,CAAE,MAAAC,EAAO,SAAAC,CAAS,EAAuB,CACrE,GAAM,CAACC,EAAeC,CAAgB,EAAIP,EAA+B,IAAI,EACvE,CAACQ,EAASC,CAAU,EAAIT,EAAS,EAAI,EACrC,CAACU,EAAOC,CAAQ,EAAIX,EAAuB,IAAI,EAerD,GAbAD,EAAU,IAAM,CACdK,EACG,KAAMQ,GAA4B,CACjCL,EAAiBK,CAAQ,EACzBH,EAAW,EAAK,CAClB,CAAC,EACA,MAAOI,GAAe,CACrBF,EAASE,CAAG,EACZJ,EAAW,EAAK,CAClB,CAAC,CACL,EAAG,CAACL,CAAK,CAAC,EAGN,CAACE,EACH,OAAO,KAGT,IAAMQ,EAAsB,CAC1B,MAAOR,EACP,QAAAE,EACA,MAAAE,CACF,EAEA,OACET,EAACC,EAAkB,SAAlB,CAA2B,MAAOY,EAChC,SAAAT,EACH,CAEJ,CC5CA,OAAS,cAAAU,MAAkB,QAOpB,SAASC,GAAyB,CACvC,IAAMC,EAAUC,EAAWC,CAAiB,EAC5C,GAAIF,IAAY,OACd,MAAM,IAAI,MAAM,8CAA8C,EAEhE,OAAOA,CACT,CCbw1B,SAASG,EAAE,EAAE,CAAC,OAAO,GAAG,OAAO,GAAG,UAAU,OAAO,EAAE,IAAI,UAAU,OAAO,EAAE,MAAM,QAAQ,CAAC,SAASC,EAAE,EAAE,CAAC,OAAO,GAAG,OAAO,GAAG,UAAU,OAAO,EAAE,IAAI,UAAU,OAAO,EAAE,UAAU,UAAU,MAAM,QAAQ,EAAE,OAAO,CAAC,CAAC,eAAeC,EAAE,EAAEC,EAAEC,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAED,EAAE,QAAQ,4CAA4CE,EAAE,CAAC,GAAGF,EAAE,QAAQ,SAASD,EAAE,OAAO,CAAC,MAAM,cAAc,SAAS,CAAC,CAAC,KAAK,OAAO,QAAQ,qBAAqB,CAAC,EAAE,GAAGC,EAAE,SAAS,MAAM,CAAC,EAAEG,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAUC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,qCAAqCA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACN,EAAEO,CAAC,EAAE,MAAM,IAAI,MAAM,kCAAkC,EAAE,OAAOA,CAAC,CAAC,eAAeC,EAAE,EAAEN,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAED,EAAE,QAAQ,mCAAmCE,EAAEF,EAAE,MAAM,EAAEG,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAU,CAAC,KAAKC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,4BAA4BA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACN,EAAEO,CAAC,EAAE,MAAM,IAAI,MAAM,yBAAyB,EAAE,OAAOA,CAAC,CCG97D,OAAS,eAAAG,EAAa,YAAAC,MAAgB,QCHtC,OAAS,eAAAC,EAAa,aAAAC,EAAW,UAAAC,EAAQ,YAAAC,MAAgB,QAgBlD,SAASC,EACZC,EAQe,CACf,IAAMC,EAAiBJ,EAA2B,IAAI,EAChD,CAACK,EAAaC,CAAc,EAAIL,EAAS,EAAK,EAC9C,CAACM,EAAeC,CAAgB,EAAIP,EAAyB,IAAI,EACjE,CAACQ,EAAaC,CAAc,EAAIT,EAAoB,CAAC,CAAC,EAEtDU,EAAUb,EAAac,GAAyD,CAClF,GAAIR,EAAe,QAAS,OAE5B,IAAMS,EAAK,IAAI,YAAY,oCAAoCD,EAAO,QAAQ,OAAOA,EAAO,cAAc,GAAIT,EAAM,OAAO,EAC3HC,EAAe,QAAUS,EAEzBA,EAAG,OAAS,IAAM,CACdP,EAAe,EAAI,CACvB,EAEAO,EAAG,UAAaC,GAAwB,CACpC,QAAQ,IAAI,YAAaA,CAAK,EAC9B,IAAMC,EAAmB,CACrB,KAAMC,EAAaF,EAAM,IAAI,EAC7B,MAAOA,EAAM,KACb,YAAaA,EAAM,WACvB,EACAN,EAAiBO,CAAO,EACxBL,EAAgBO,GAAS,CAAC,GAAGA,EAAMF,CAAO,CAAC,EAEvCZ,EAAM,WACNA,EAAM,UAAU,CACZ,QAAAY,EACA,SAAUH,EAAO,QACrB,CAAC,CAET,EAEAC,EAAG,QAAU,IAAM,CACfP,EAAe,EAAK,CAExB,CACJ,EAAG,CAACH,EAAM,QAASA,EAAM,SAAS,CAAC,EAE7Be,EAAapB,EAAY,IAAM,CAC7BM,EAAe,UACfA,EAAe,QAAQ,MAAM,EAC7BA,EAAe,QAAU,KACzBE,EAAe,EAAK,EAE5B,EAAG,CAAC,CAAC,EAEL,OAAAP,EAAU,IACC,IAAM,CACTmB,EAAW,CACf,EACD,CAACA,CAAU,CAAC,EAER,CACH,QAAAP,EACA,WAAAO,EACA,YAAAb,EACA,cAAAE,EACA,YAAAE,CACJ,CACJ,CAEA,IAAMO,EAAgBG,GACX,KAAKA,CAAG,ED5EZ,SAASC,GAAoB,CAClC,IAAMC,EAAQC,EAAS,EACjB,CAACC,EAAUC,CAAW,EAAIC,EAAwC,CAAC,CAAC,EACpE,CAACC,EAAWC,CAAY,EAAIF,EAAS,EAAK,EAC1C,CAACG,EAAOC,CAAQ,EAAIJ,EAAuB,IAAI,EAC/C,CAACK,EAAQC,CAAS,EAAIN,EAAwB,IAAI,EAGhDO,EAAcC,EAAe,CAC/B,UAAYC,GAAW,CACnB,QAAQ,IAAI,uBAAwBA,CAAM,EAC1C,IAAMC,EAAkBZ,EAASW,EAAO,QAAQ,EAChD,GAAIC,EACAX,EAAYY,IAAS,CAAE,GAAGA,EAAM,CAACF,EAAO,QAAQ,EAAG,CAC/C,GAAGC,EACH,QAASA,EAAgB,QAAUD,EAAO,QAAQ,IACtD,CAAC,EAAE,MACA,CACL,IAAMG,EAA+B,CACnC,SAAUH,EAAO,SACjB,KAAM,YACN,QAASA,EAAO,QAAQ,IAC1B,EACAV,EAAYY,IAAS,CAAE,GAAGA,EAAM,CAACF,EAAO,QAAQ,EAAGG,CAAc,EAAE,CACrE,CACJ,CACJ,CAAC,EAGGC,EAAaC,EAAY,MAAOC,EAAgBC,IAIhD,CAEJ,GAAI,CACFd,EAAa,EAAI,EACjBE,EAAS,IAAI,EAGb,IAAIa,EAAKZ,EACJY,IACHA,EAAI,MAAMC,EAAatB,EAAM,MAAM,cAAc,EACjD,QAAQ,IAAI,iBAAkBqB,CAAC,EAC/BX,EAAUW,CAAC,GAGbV,EAAY,QAAQ,CAClB,SAAUU,EAAE,GACZ,eAAgBrB,EAAM,MAAM,cAC9B,CAAC,EAID,MAAMuB,EAAqBvB,EAAM,MAAM,eAAgBqB,EAAE,GAAI,CAC3D,QAAS,CACP,OAAQ,CACN,MAAOD,EAAc,MACrB,SAAU,CAAC,CAAE,KAAM,OAAQ,QAASD,CAAO,CAAC,CAC9C,CACF,CACF,CAAC,CAEH,OAASZ,EAAO,CACdC,EAASD,aAAiB,MAAQA,EAAQ,IAAI,MAAM,eAAe,CAAC,CACtE,QAAE,CACAD,EAAa,EAAK,CACpB,CACF,EAAG,CAACN,EAAOS,EAAQE,CAAW,CAAC,EAEzBa,EAAgBN,EAAY,IAAM,CACtCf,EAAY,CAAC,CAAC,CAChB,EAAG,CAAC,CAAC,EAEL,MAAO,CACL,WAAAc,EACA,cAAAO,EACA,SAAAtB,EACA,UAAAG,EACA,MAAAE,EACA,OAAAE,CACF,CACF",
6
- "names": ["createContext", "useEffect", "useState", "jsx", "FencyContextValue", "FencyProvider", "fency", "children", "fencyInstance", "setFencyInstance", "loading", "setLoading", "error", "setError", "instance", "err", "value", "useContext", "useFency", "context", "useContext", "FencyContextValue", "f", "y", "c", "o", "t", "n", "r", "i", "m", "p", "useCallback", "useState", "useCallback", "useEffect", "useRef", "useState", "useEventSource", "props", "eventSourceRef", "isConnected", "setIsConnected", "latestMessage", "setLatestMessage", "allMessages", "setAllMessages", "connect", "params", "es", "event", "message", "base64Decode", "prev", "disconnect", "str", "useChatCompletion", "fency", "useFency", "messages", "setMessages", "useState", "isLoading", "setIsLoading", "error", "setError", "stream", "setStream", "eventSource", "useEventSource", "params", "existingMessage", "prev", "streamMessage", "sendPrompt", "useCallback", "prompt", "promptOptions", "s", "p", "c", "clearMessages"]
3
+ "sources": ["../src/FencyProvider.tsx", "../src/hooks/useFency.ts", "../../fency-js/dist/index.js", "../src/hooks/useChatCompletion.ts", "../src/hooks/useEventSource.ts"],
4
+ "sourcesContent": ["import { FencyInstance } from '@fencyai/js';\nimport { createContext, useEffect, useState } from 'react';\n\n// Create the context\nexport const FencyContextValue = createContext<FencyContext | undefined>(undefined);\n\n/**\n * Provider component that provides Fency instance to child components\n * Expects a promise that resolves to a Fency instance\n */\nexport function FencyProvider({ fency, children }: FencyProviderProps) {\n const [fencyInstance, setFencyInstance] = useState<FencyInstance | null>(null);\n const [loading, setLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n\n useEffect(() => {\n fency\n .then((instance: FencyInstance) => {\n setFencyInstance(instance);\n setLoading(false);\n })\n .catch((err: Error) => {\n setError(err);\n setLoading(false);\n });\n }, [fency]);\n\n // Only render children and provide context when fency is loaded\n if (!fencyInstance) {\n return null;\n }\n\n const value: FencyContext = {\n fency: fencyInstance,\n loading,\n error,\n };\n\n return (\n <FencyContextValue.Provider value={value}>\n {children}\n </FencyContextValue.Provider>\n );\n} \n\nexport interface FencyOptions {\n publishableKey: string;\n endpoint?: string;\n}\n\n/**\n * Context for Fency instance\n */\nexport interface FencyContext {\n fency: FencyInstance;\n loading: boolean;\n error: Error | null;\n}\n\n/**\n * Props for FencyProvider\n */\nexport interface FencyProviderProps {\n fency: Promise<FencyInstance>;\n children: React.ReactNode;\n} ", "import { useContext } from 'react';\nimport { FencyContextValue } from '../FencyProvider';\nimport type { FencyContext } from '../FencyProvider';\n\n/**\n * Hook to access Fency instance and loading state\n */\nexport function useFency(): FencyContext {\n const context = useContext(FencyContextValue);\n if (context === undefined) {\n throw new Error('useFency must be used within a FencyProvider');\n }\n return context;\n} ", "function a(){if(typeof window>\"u\")return!1;let e=[\"fetch\",\"Promise\",\"JSON\"];for(let o of e)if(typeof window[o]>\"u\")return!1;if(typeof window.location<\"u\"){let o=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",t=window.location.protocol===\"https:\";!o&&!t&&console.warn(\"Fency: For security, we recommend using HTTPS in production.\")}return!0}function s(){let e={available:!0,missing:[],warnings:[]};if(typeof window>\"u\")return e.available=!1,e.missing.push(\"Browser environment\"),e;let o=[\"fetch\",\"Promise\",\"JSON\"];for(let t of o)typeof window[t]>\"u\"&&(e.available=!1,e.missing.push(`${t} API`));if(typeof window.location<\"u\"){let t=window.location.hostname===\"localhost\"||window.location.hostname===\"127.0.0.1\",n=window.location.protocol===\"https:\";!t&&!n&&e.warnings.push(\"HTTPS is recommended for production use\")}return e}var f=e=>typeof e==\"object\"&&e!==null&&\"id\"in e,y=e=>typeof e==\"object\"&&e!==null&&\"id\"in e;async function p(e,o,t={}){let n=t.apiUrl||\"http://localhost:8080/v1/chat-completions\",i={...t.request,streamId:o,openai:{model:\"gpt-4o-mini\",messages:[{role:\"user\",content:\"Hello, how are you?\"}],...t.request?.openai}},r=await fetch(n,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify(i)});if(!r.ok)throw new Error(`Failed to create chat completion: ${r.status} ${r.statusText}`);let m=await r.json();if(!y(m))throw new Error(\"Invalid chat completion response\");return m}async function c(e,o={}){let t=o.apiUrl||\"http://localhost:8080/v1/streams\",n=o.name||e,i=await fetch(t,{method:\"POST\",headers:{\"Content-Type\":\"application/json\",Authorization:`Bearer ${e}`},body:JSON.stringify({name:n})});if(!i.ok)throw new Error(`Failed to create stream: ${i.status} ${i.statusText}`);let r=await i.json();if(!f(r))throw new Error(\"Invalid stream response\");return r}function l(e,o={}){return new Promise((t,n)=>{if(!e||typeof e!=\"string\"){n(new Error(\"Fency: A valid publishable key is required.\"));return}if(!e.startsWith(\"pk_\")){n(new Error('Fency: Invalid publishable key format. Keys should start with \"pk_\".'));return}let i={publishableKey:e,endpoint:o.endpoint||\"https://api.fency.ai\"};setTimeout(()=>{t(i)},0)})}var P={loadFency:l,isFencyAvailable:a,getFencyAvailabilityInfo:s,createStream:c,createChatCompletion:p};export{p as createChatCompletion,c as createStream,P as default,s as getFencyAvailabilityInfo,a as isFencyAvailable,l as loadFency};\n", "// hooks/useChatCompletion.ts\nimport { createChatCompletion, createStream, Stream } from '@fencyai/js'\nimport { useCallback, useEffect, useMemo, useState } from 'react'\nimport { ChatCompletionChunk, useEventSource } from './useEventSource'\nimport { useFency } from './useFency'\n\ninterface ChatCompletion {\n chatCompletionId: string\n streamId: string\n chunks: ChatCompletionChunk[]\n fullMessage: string\n}\n\nexport function useChatCompletion(): {\n chatCompletions: ChatCompletion[]\n createChatCompletion: (params: {\n prompt: string\n model: 'gpt-4o-mini' | 'gpt-4o'\n }) => Promise<{\n streamId: string\n chatCompletionId: string\n }>\n} {\n const fency = useFency()\n const { chunks, setUrl } = useEventSource()\n const [stream, setStream] = useState<Stream | null>(null)\n\n const create = useCallback(\n async (params: {\n prompt: string\n model: 'gpt-4o-mini' | 'gpt-4o'\n }): Promise<{\n streamId: string\n chatCompletionId: string\n }> => {\n // Step 1: Create stream if not exists\n const s = await createStream(fency.fency.publishableKey)\n setStream(s)\n\n // Step 2: Send chat completion\n await createChatCompletion(fency.fency.publishableKey, s.id, {\n request: {\n openai: {\n model: params.model,\n messages: [{ role: 'user', content: params.prompt }],\n },\n },\n })\n\n return {\n streamId: s.id,\n chatCompletionId: s.id,\n }\n },\n [fency]\n )\n\n const chatCompletions = useMemo(() => {\n const chatCompletions: ChatCompletion[] = []\n\n const uniqueChatCompletionIds = [\n ...new Set(chunks.map((chunk) => chunk.chatCompletionId)),\n ]\n\n for (const chatCompletionId of uniqueChatCompletionIds) {\n const relevantChunks = chunks\n .filter((chunk) => chunk.chatCompletionId === chatCompletionId)\n .sort((a, b) => a.timestamp.localeCompare(b.timestamp))\n\n const fullMessage = relevantChunks\n .map((chunk) => chunk.content)\n .join('')\n\n chatCompletions.push({\n chatCompletionId,\n streamId: relevantChunks[0].streamId,\n chunks: relevantChunks,\n fullMessage,\n })\n }\n\n return chatCompletions\n }, [chunks])\n\n useEffect(() => {\n if (stream) {\n setUrl(\n `http://localhost:8080/v1/streams/${stream.id}?pk=${fency.fency.publishableKey}`\n )\n }\n }, [stream, fency.fency.publishableKey, setUrl])\n\n return {\n createChatCompletion: create,\n chatCompletions,\n }\n}\n", "import { useEffect, useState } from 'react'\n\ntype Message = {\n data: string\n event?: string\n lastEventId?: string\n}\n\nexport function useEventSource() {\n const [chunks, setChunks] = useState<ChatCompletionChunk[]>([])\n const [url, setUrl] = useState<string | null>()\n\n \n\n useEffect(() => {\n if (!url) return\n\n const eventSource = new EventSource(url)\n\n eventSource.onmessage = (event: MessageEvent) => {\n const chunk = getChatCompletionChunk(event)\n if (chunk) {\n setChunks((prev) => [...prev, chunk])\n }\n }\n\n eventSource.onerror = (error) => {\n console.error('EventSource error:', error)\n }\n\n return () => {\n eventSource.close()\n }\n }, [url])\n\n return {\n chunks,\n setUrl,\n url,\n }\n}\n\nconst base64Decode = (str: string) => {\n return atob(str)\n}\n\nconst getChatCompletionChunk = (\n message: Message\n): ChatCompletionChunk | null => {\n try {\n const json = JSON.parse(base64Decode(message.data))\n if (isChatCompletionChunk(json)) {\n return json\n }\n return null\n } catch (error) {\n console.error('Error parsing message:', error)\n return null\n }\n}\n\nconst isChatCompletionChunk = (data: unknown): boolean => {\n return (\n typeof data === 'object' &&\n data !== null &&\n 'chatCompletionId' in data &&\n 'streamId' in data &&\n 'timestamp' in data &&\n 'content' in data\n )\n}\n\nexport interface ChatCompletionChunk {\n chatCompletionId: string\n streamId: string\n timestamp: string\n content: string\n}\n"],
5
+ "mappings": "AACA,OAAS,iBAAAA,EAAe,aAAAC,EAAW,YAAAC,MAAgB,QAsC/C,cAAAC,MAAA,oBAnCG,IAAMC,EAAoBJ,EAAwC,MAAS,EAM3E,SAASK,EAAc,CAAE,MAAAC,EAAO,SAAAC,CAAS,EAAuB,CACrE,GAAM,CAACC,EAAeC,CAAgB,EAAIP,EAA+B,IAAI,EACvE,CAACQ,EAASC,CAAU,EAAIT,EAAS,EAAI,EACrC,CAACU,EAAOC,CAAQ,EAAIX,EAAuB,IAAI,EAerD,GAbAD,EAAU,IAAM,CACdK,EACG,KAAMQ,GAA4B,CACjCL,EAAiBK,CAAQ,EACzBH,EAAW,EAAK,CAClB,CAAC,EACA,MAAOI,GAAe,CACrBF,EAASE,CAAG,EACZJ,EAAW,EAAK,CAClB,CAAC,CACL,EAAG,CAACL,CAAK,CAAC,EAGN,CAACE,EACH,OAAO,KAGT,IAAMQ,EAAsB,CAC1B,MAAOR,EACP,QAAAE,EACA,MAAAE,CACF,EAEA,OACET,EAACC,EAAkB,SAAlB,CAA2B,MAAOY,EAChC,SAAAT,EACH,CAEJ,CC3CA,OAAS,cAAAU,MAAkB,QAOpB,SAASC,GAAyB,CACvC,IAAMC,EAAUC,EAAWC,CAAiB,EAC5C,GAAIF,IAAY,OACd,MAAM,IAAI,MAAM,8CAA8C,EAEhE,OAAOA,CACT,CCbw1B,IAAIG,EAAE,GAAG,OAAO,GAAG,UAAU,IAAI,MAAM,OAAO,EAAEC,EAAE,GAAG,OAAO,GAAG,UAAU,IAAI,MAAM,OAAO,EAAE,eAAeC,EAAE,EAAEC,EAAEC,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAED,EAAE,QAAQ,4CAA4CE,EAAE,CAAC,GAAGF,EAAE,QAAQ,SAASD,EAAE,OAAO,CAAC,MAAM,cAAc,SAAS,CAAC,CAAC,KAAK,OAAO,QAAQ,qBAAqB,CAAC,EAAE,GAAGC,EAAE,SAAS,MAAM,CAAC,EAAEG,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAUC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,qCAAqCA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACN,EAAEO,CAAC,EAAE,MAAM,IAAI,MAAM,kCAAkC,EAAE,OAAOA,CAAC,CAAC,eAAeC,EAAE,EAAEN,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAED,EAAE,QAAQ,mCAAmCE,EAAEF,EAAE,MAAM,EAAEG,EAAE,MAAM,MAAMF,EAAE,CAAC,OAAO,OAAO,QAAQ,CAAC,eAAe,mBAAmB,cAAc,UAAU,CAAC,EAAE,EAAE,KAAK,KAAK,UAAU,CAAC,KAAKC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAACC,EAAE,GAAG,MAAM,IAAI,MAAM,4BAA4BA,EAAE,MAAM,IAAIA,EAAE,UAAU,EAAE,EAAE,IAAIC,EAAE,MAAMD,EAAE,KAAK,EAAE,GAAG,CAACN,EAAEO,CAAC,EAAE,MAAM,IAAI,MAAM,yBAAyB,EAAE,OAAOA,CAAC,CCEt0D,OAAS,eAAAG,EAAa,aAAAC,EAAW,WAAAC,EAAS,YAAAC,MAAgB,QCF1D,OAAS,aAAAC,EAAW,YAAAC,MAAgB,QAQ7B,SAASC,GAAiB,CAC7B,GAAM,CAACC,EAAQC,CAAS,EAAIH,EAAgC,CAAC,CAAC,EACxD,CAACI,EAAKC,CAAM,EAAIL,EAAwB,EAI9C,OAAAD,EAAU,IAAM,CACZ,GAAI,CAACK,EAAK,OAEV,IAAME,EAAc,IAAI,YAAYF,CAAG,EAEvC,OAAAE,EAAY,UAAaC,GAAwB,CAC7C,IAAMC,EAAQC,EAAuBF,CAAK,EACtCC,GACAL,EAAWO,GAAS,CAAC,GAAGA,EAAMF,CAAK,CAAC,CAE5C,EAEAF,EAAY,QAAWK,GAAU,CAC7B,QAAQ,MAAM,qBAAsBA,CAAK,CAC7C,EAEO,IAAM,CACTL,EAAY,MAAM,CACtB,CACJ,EAAG,CAACF,CAAG,CAAC,EAED,CACH,OAAAF,EACA,OAAAG,EACA,IAAAD,CACJ,CACJ,CAEA,IAAMQ,EAAgBC,GACX,KAAKA,CAAG,EAGbJ,EACFK,GAC6B,CAC7B,GAAI,CACA,IAAMC,EAAO,KAAK,MAAMH,EAAaE,EAAQ,IAAI,CAAC,EAClD,OAAIE,EAAsBD,CAAI,EACnBA,EAEJ,IACX,OAASJ,EAAO,CACZ,eAAQ,MAAM,yBAA0BA,CAAK,EACtC,IACX,CACJ,EAEMK,EAAyBC,GAEvB,OAAOA,GAAS,UAChBA,IAAS,MACT,qBAAsBA,GACtB,aAAcA,GACd,cAAeA,GACf,YAAaA,EDvDd,SAASC,GASd,CACE,IAAMC,EAAQC,EAAS,EACjB,CAAE,OAAAC,EAAQ,OAAAC,CAAO,EAAIC,EAAe,EACpC,CAACC,EAAQC,CAAS,EAAIC,EAAwB,IAAI,EAElDC,EAASC,EACX,MAAOC,GAMD,CAEF,IAAMC,EAAI,MAAMC,EAAaZ,EAAM,MAAM,cAAc,EACvD,OAAAM,EAAUK,CAAC,EAGX,MAAME,EAAqBb,EAAM,MAAM,eAAgBW,EAAE,GAAI,CACzD,QAAS,CACL,OAAQ,CACJ,MAAOD,EAAO,MACd,SAAU,CAAC,CAAE,KAAM,OAAQ,QAASA,EAAO,MAAO,CAAC,CACvD,CACJ,CACJ,CAAC,EAEM,CACH,SAAUC,EAAE,GACZ,iBAAkBA,EAAE,EACxB,CACJ,EACA,CAACX,CAAK,CACV,EAEMc,EAAkBC,EAAQ,IAAM,CAClC,IAAMD,EAAoC,CAAC,EAErCE,EAA0B,CAC5B,GAAG,IAAI,IAAId,EAAO,IAAKe,GAAUA,EAAM,gBAAgB,CAAC,CAC5D,EAEA,QAAWC,KAAoBF,EAAyB,CACpD,IAAMG,EAAiBjB,EAClB,OAAQe,GAAUA,EAAM,mBAAqBC,CAAgB,EAC7D,KAAK,CAACE,EAAGC,IAAMD,EAAE,UAAU,cAAcC,EAAE,SAAS,CAAC,EAEpDC,EAAcH,EACf,IAAKF,GAAUA,EAAM,OAAO,EAC5B,KAAK,EAAE,EAEZH,EAAgB,KAAK,CACjB,iBAAAI,EACA,SAAUC,EAAe,CAAC,EAAE,SAC5B,OAAQA,EACR,YAAAG,CACJ,CAAC,CACL,CAEA,OAAOR,CACX,EAAG,CAACZ,CAAM,CAAC,EAEX,OAAAqB,EAAU,IAAM,CACRlB,GACAF,EACI,oCAAoCE,EAAO,EAAE,OAAOL,EAAM,MAAM,cAAc,EAClF,CAER,EAAG,CAACK,EAAQL,EAAM,MAAM,eAAgBG,CAAM,CAAC,EAExC,CACH,qBAAsBK,EACtB,gBAAAM,CACJ,CACJ",
6
+ "names": ["createContext", "useEffect", "useState", "jsx", "FencyContextValue", "FencyProvider", "fency", "children", "fencyInstance", "setFencyInstance", "loading", "setLoading", "error", "setError", "instance", "err", "value", "useContext", "useFency", "context", "useContext", "FencyContextValue", "f", "y", "p", "o", "t", "n", "i", "r", "m", "c", "useCallback", "useEffect", "useMemo", "useState", "useEffect", "useState", "useEventSource", "chunks", "setChunks", "url", "setUrl", "eventSource", "event", "chunk", "getChatCompletionChunk", "prev", "error", "base64Decode", "str", "message", "json", "isChatCompletionChunk", "data", "useChatCompletion", "fency", "useFency", "chunks", "setUrl", "useEventSource", "stream", "setStream", "useState", "create", "useCallback", "params", "s", "c", "p", "chatCompletions", "useMemo", "uniqueChatCompletionIds", "chunk", "chatCompletionId", "relevantChunks", "a", "b", "fullMessage", "useEffect"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fencyai/react",
3
- "version": "0.1.9",
3
+ "version": "0.1.11",
4
4
  "description": "React components for Fency integration",
5
5
  "type": "module",
6
6
  "main": "./dist/index.cjs",
package/dist/types.d.ts DELETED
@@ -1,21 +0,0 @@
1
- import { FencyInstance } from "@fencyai/js";
2
- export interface FencyOptions {
3
- publishableKey: string;
4
- endpoint?: string;
5
- }
6
- /**
7
- * Context for Fency instance
8
- */
9
- export interface FencyContext {
10
- fency: FencyInstance;
11
- loading: boolean;
12
- error: Error | null;
13
- }
14
- /**
15
- * Props for FencyProvider
16
- */
17
- export interface FencyProviderProps {
18
- fency: Promise<FencyInstance>;
19
- children: React.ReactNode;
20
- }
21
- //# sourceMappingURL=types.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAE5C,MAAM,WAAW,YAAY;IAC3B,cAAc,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,KAAK,EAAE,aAAa,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;CACrB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,KAAK,EAAE,OAAO,CAAC,aAAa,CAAC,CAAC;IAC9B,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;CAC3B"}
package/dist/types.js DELETED
@@ -1 +0,0 @@
1
- export {};
@@ -1,20 +0,0 @@
1
- import { Stream } from '../../fency-js/dist/types';
2
- interface StreamMessage {
3
- streamId: string;
4
- role: string;
5
- content: string;
6
- }
7
- export declare function useChatCompletion(): {
8
- sendPrompt: (prompt: string, promptOptions: {
9
- model: "gpt-4o-mini";
10
- temperature: number;
11
- topP: number;
12
- }) => Promise<void>;
13
- clearMessages: () => void;
14
- messages: Record<string, StreamMessage>;
15
- isLoading: boolean;
16
- error: Error | null;
17
- stream: Stream | null;
18
- };
19
- export {};
20
- //# sourceMappingURL=useChatCompletion.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"useChatCompletion.d.ts","sourceRoot":"","sources":["../src/useChatCompletion.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,MAAM,EAAE,MAAM,2BAA2B,CAAA;AAGlD,UAAU,aAAa;IACrB,QAAQ,EAAE,MAAM,CAAA;IAChB,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;CAChB;AAED,wBAAgB,iBAAiB;yBA6Be,MAAM,iBAAiB;QACnE,KAAK,EAAE,aAAa,CAAA;QACpB,WAAW,EAAE,MAAM,CAAA;QACnB,IAAI,EAAE,MAAM,CAAA;KACb;;;;;;EAiDF"}
@@ -1,76 +0,0 @@
1
- // hooks/useChatCompletion.ts
2
- import { createChatCompletion, createStream } from '@fencyai/js';
3
- import { useFency } from './useFency';
4
- import { useCallback, useState } from 'react';
5
- import { useEventSource } from './useEventSource';
6
- export function useChatCompletion() {
7
- const fency = useFency();
8
- const [messages, setMessages] = useState({});
9
- const [isLoading, setIsLoading] = useState(false);
10
- const [error, setError] = useState(null);
11
- const [stream, setStream] = useState(null);
12
- // Create event source URL when stream is available
13
- const eventSource = useEventSource({
14
- onMessage: (params) => {
15
- console.log('Event source message', params);
16
- const existingMessage = messages[params.streamId];
17
- if (existingMessage) {
18
- setMessages(prev => ({ ...prev, [params.streamId]: {
19
- ...existingMessage,
20
- content: existingMessage.content + params.message.data
21
- } }));
22
- }
23
- else {
24
- const streamMessage = {
25
- streamId: params.streamId,
26
- role: 'assistant',
27
- content: params.message.data
28
- };
29
- setMessages(prev => ({ ...prev, [params.streamId]: streamMessage }));
30
- }
31
- }
32
- });
33
- const sendPrompt = useCallback(async (prompt, promptOptions) => {
34
- try {
35
- setIsLoading(true);
36
- setError(null);
37
- // Step 1: Create stream if not exists
38
- let s = stream;
39
- if (!s) {
40
- s = await createStream(fency.fency.publishableKey);
41
- console.log('Stream created', s);
42
- setStream(s);
43
- }
44
- eventSource.connect({
45
- streamId: s.id,
46
- publishableKey: fency.fency.publishableKey
47
- });
48
- // Step 3: Send chat completion
49
- await createChatCompletion(fency.fency.publishableKey, s.id, {
50
- request: {
51
- openai: {
52
- model: promptOptions.model,
53
- messages: [{ role: 'user', content: prompt }],
54
- }
55
- }
56
- });
57
- }
58
- catch (error) {
59
- setError(error instanceof Error ? error : new Error('Unknown error'));
60
- }
61
- finally {
62
- setIsLoading(false);
63
- }
64
- }, [fency, stream, eventSource]);
65
- const clearMessages = useCallback(() => {
66
- setMessages({});
67
- }, []);
68
- return {
69
- sendPrompt,
70
- clearMessages,
71
- messages,
72
- isLoading,
73
- error,
74
- stream,
75
- };
76
- }
@@ -1,25 +0,0 @@
1
- type Message = {
2
- data: string;
3
- event?: string;
4
- lastEventId?: string;
5
- };
6
- type EventSourceHook = {
7
- connect: (params: {
8
- streamId: string;
9
- publishableKey: string;
10
- }) => void;
11
- disconnect: () => void;
12
- isConnected: boolean;
13
- latestMessage: Message | null;
14
- allMessages: Message[];
15
- };
16
- export declare function useEventSource(props: {
17
- options?: EventSourceInit;
18
- onMessage?: (params: {
19
- message: Message;
20
- streamId: string;
21
- }) => void;
22
- onError?: (error: Event) => void;
23
- }): EventSourceHook;
24
- export {};
25
- //# sourceMappingURL=useEventSource.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"useEventSource.d.ts","sourceRoot":"","sources":["../src/useEventSource.ts"],"names":[],"mappings":"AAEA,KAAK,OAAO,GAAG;IACX,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,WAAW,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,eAAe,GAAG;IACnB,OAAO,EAAE,CAAC,MAAM,EAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,cAAc,EAAE,MAAM,CAAA;KAAG,KAAK,IAAI,CAAA;IACtE,UAAU,EAAE,MAAM,IAAI,CAAA;IACtB,WAAW,EAAE,OAAO,CAAA;IACpB,aAAa,EAAE,OAAO,GAAG,IAAI,CAAA;IAC7B,WAAW,EAAE,OAAO,EAAE,CAAA;CACzB,CAAA;AAED,wBAAgB,cAAc,CAC1B,KAAK,EAAG;IACJ,OAAO,CAAC,EAAE,eAAe,CAAC;IAC1B,SAAS,CAAC,EAAE,CAAC,MAAM,EAAE;QACjB,OAAO,EAAE,OAAO,CAAA;QAChB,QAAQ,EAAE,MAAM,CAAA;KACnB,KAAK,IAAI,CAAC;IACX,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CACpC,GACF,eAAe,CA6DjB"}
@@ -1,58 +0,0 @@
1
- import { useCallback, useEffect, useRef, useState } from 'react';
2
- export function useEventSource(props) {
3
- const eventSourceRef = useRef(null);
4
- const [isConnected, setIsConnected] = useState(false);
5
- const [latestMessage, setLatestMessage] = useState(null);
6
- const [allMessages, setAllMessages] = useState([]);
7
- const connect = useCallback((params) => {
8
- if (eventSourceRef.current)
9
- return;
10
- const es = new EventSource(`http://localhost:8080/v1/streams/${params.streamId}?pk=${params.publishableKey}`, props.options);
11
- eventSourceRef.current = es;
12
- es.onopen = () => {
13
- setIsConnected(true);
14
- };
15
- es.onmessage = (event) => {
16
- console.log('onmessage', event);
17
- const message = {
18
- data: base64Decode(event.data),
19
- event: event.type,
20
- lastEventId: event.lastEventId,
21
- };
22
- setLatestMessage(message);
23
- setAllMessages((prev) => [...prev, message]);
24
- if (props.onMessage) {
25
- props.onMessage({
26
- message,
27
- streamId: params.streamId
28
- });
29
- }
30
- };
31
- es.onerror = () => {
32
- setIsConnected(false);
33
- // You could auto-reconnect here if needed
34
- };
35
- }, [props.options, props.onMessage]);
36
- const disconnect = useCallback(() => {
37
- if (eventSourceRef.current) {
38
- eventSourceRef.current.close();
39
- eventSourceRef.current = null;
40
- setIsConnected(false);
41
- }
42
- }, []);
43
- useEffect(() => {
44
- return () => {
45
- disconnect();
46
- };
47
- }, [disconnect]);
48
- return {
49
- connect,
50
- disconnect,
51
- isConnected,
52
- latestMessage,
53
- allMessages,
54
- };
55
- }
56
- const base64Decode = (str) => {
57
- return atob(str);
58
- };
@@ -1 +0,0 @@
1
- {"version":3,"file":"useFency.d.ts","sourceRoot":"","sources":["../src/useFency.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAG5C;;GAEG;AACH,wBAAgB,QAAQ,IAAI,YAAY,CAMvC"}