@fencyai/react 0.1.39 → 0.1.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.d.ts +4 -1
- package/lib/index.js +1 -1
- package/lib/useChatCompletions/index.d.ts +35 -0
- package/lib/useChatCompletions/index.js +58 -0
- package/lib/useChatCompletions/types.d.ts +24 -0
- package/lib/useChatCompletions/types.js +1 -0
- package/lib/useChatCompletions/useStreamingChatCompletions.d.ts +36 -0
- package/lib/useChatCompletions/useStreamingChatCompletions.js +189 -0
- package/lib/useChatCompletions/useStructuredChatCompletions.d.ts +32 -0
- package/lib/useChatCompletions/useStructuredChatCompletions.js +99 -0
- package/lib/useChatCompletions/useSynchronousChatCompletions.d.ts +30 -0
- package/lib/useChatCompletions/useSynchronousChatCompletions.js +106 -0
- package/lib/{chat-completions/useEventSource.d.ts → useEventSource.d.ts} +3 -1
- package/lib/{chat-completions/useEventSource.js → useEventSource.js} +13 -4
- package/package.json +4 -4
- package/lib/chat-completions/useChatCompletions.d.ts +0 -53
- package/lib/chat-completions/useChatCompletions.js +0 -192
package/lib/index.d.ts
CHANGED
|
@@ -1,2 +1,5 @@
|
|
|
1
|
-
export { useChatCompletions } from './
|
|
1
|
+
export { useChatCompletions } from './useChatCompletions';
|
|
2
2
|
export { FencyProvider, FencyProviderProps } from './provider/FencyProvider';
|
|
3
|
+
export { CreateStreamingChatCompletionParams } from './useChatCompletions/useStreamingChatCompletions';
|
|
4
|
+
export { CreateStructuredChatCompletionParams } from './useChatCompletions/useStructuredChatCompletions';
|
|
5
|
+
export { CreateSynchronousChatCompletionParams } from './useChatCompletions/useSynchronousChatCompletions';
|
package/lib/index.js
CHANGED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { ApiError } from '@fencyai/js/lib/types/ApiError';
|
|
2
|
+
import { ZodTypeAny } from 'zod';
|
|
3
|
+
import { CreateStreamingChatCompletionParams, StreamingChatCompletion } from './useStreamingChatCompletions';
|
|
4
|
+
import { CreateStructuredChatCompletionParams, StructuredChatCompletion, StructuredChatCompletionResponse } from './useStructuredChatCompletions';
|
|
5
|
+
import { CreateSynchronousChatCompletionParams, SynchronousChatCompletion, SynchronousChatCompletionResponse } from './useSynchronousChatCompletions';
|
|
6
|
+
type CombinedChatCompletion = {
|
|
7
|
+
type: 'synchronous';
|
|
8
|
+
chatCompletion: SynchronousChatCompletion;
|
|
9
|
+
} | {
|
|
10
|
+
type: 'structured';
|
|
11
|
+
chatCompletion: StructuredChatCompletion<ZodTypeAny>;
|
|
12
|
+
} | {
|
|
13
|
+
type: 'streaming';
|
|
14
|
+
chatCompletion: StreamingChatCompletion;
|
|
15
|
+
};
|
|
16
|
+
interface HookResponse {
|
|
17
|
+
latest: {
|
|
18
|
+
basic: SynchronousChatCompletion | null;
|
|
19
|
+
structured: StructuredChatCompletion<ZodTypeAny> | null;
|
|
20
|
+
streaming: StreamingChatCompletion | null;
|
|
21
|
+
};
|
|
22
|
+
chatCompletions: CombinedChatCompletion[];
|
|
23
|
+
createChatCompletion: (params: CreateSynchronousChatCompletionParams) => Promise<SynchronousChatCompletionResponse>;
|
|
24
|
+
createStructuredChatCompletion: <T extends ZodTypeAny>(params: CreateStructuredChatCompletionParams<T>) => Promise<StructuredChatCompletionResponse<T>>;
|
|
25
|
+
createStreamingChatCompletion: (params: CreateStreamingChatCompletionParams) => Promise<{
|
|
26
|
+
type: 'success';
|
|
27
|
+
chatCompletionStreamId: string;
|
|
28
|
+
chatCompletionId: string;
|
|
29
|
+
} | {
|
|
30
|
+
type: 'error';
|
|
31
|
+
error: ApiError;
|
|
32
|
+
}>;
|
|
33
|
+
}
|
|
34
|
+
export declare function useChatCompletions(): HookResponse;
|
|
35
|
+
export {};
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { useMemo } from 'react';
|
|
2
|
+
import { useFencyContext } from '../provider/useFencyContext';
|
|
3
|
+
import { useStreamingChatCompletions, } from './useStreamingChatCompletions';
|
|
4
|
+
import { useStructuredChatCompletions, } from './useStructuredChatCompletions';
|
|
5
|
+
import { useSynchronousChatCompletions, } from './useSynchronousChatCompletions';
|
|
6
|
+
export function useChatCompletions() {
|
|
7
|
+
const context = useFencyContext();
|
|
8
|
+
const synchronousChatCompletions = useSynchronousChatCompletions(context);
|
|
9
|
+
const streamingChatCompletions = useStreamingChatCompletions(context);
|
|
10
|
+
const structuredChatCompletions = useStructuredChatCompletions(context);
|
|
11
|
+
const combinedChatCompletions = useMemo(() => {
|
|
12
|
+
const completions = [];
|
|
13
|
+
for (const chatCompletion of synchronousChatCompletions.chatCompletions) {
|
|
14
|
+
completions.push({
|
|
15
|
+
type: 'synchronous',
|
|
16
|
+
chatCompletion: chatCompletion,
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
for (const chatCompletion of structuredChatCompletions.chatCompletions) {
|
|
20
|
+
completions.push({
|
|
21
|
+
type: 'structured',
|
|
22
|
+
chatCompletion: chatCompletion,
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
for (const chatCompletion of streamingChatCompletions.chatCompletions) {
|
|
26
|
+
completions.push({
|
|
27
|
+
type: 'streaming',
|
|
28
|
+
chatCompletion: chatCompletion,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
return completions;
|
|
32
|
+
}, [
|
|
33
|
+
synchronousChatCompletions.chatCompletions,
|
|
34
|
+
structuredChatCompletions.chatCompletions,
|
|
35
|
+
streamingChatCompletions.chatCompletions,
|
|
36
|
+
]);
|
|
37
|
+
const latest = useMemo(() => {
|
|
38
|
+
const lastCompletion = combinedChatCompletions[combinedChatCompletions.length - 1];
|
|
39
|
+
return {
|
|
40
|
+
basic: lastCompletion?.type === 'synchronous'
|
|
41
|
+
? lastCompletion.chatCompletion
|
|
42
|
+
: null,
|
|
43
|
+
structured: lastCompletion?.type === 'structured'
|
|
44
|
+
? lastCompletion.chatCompletion
|
|
45
|
+
: null,
|
|
46
|
+
streaming: lastCompletion?.type === 'streaming'
|
|
47
|
+
? lastCompletion.chatCompletion
|
|
48
|
+
: null,
|
|
49
|
+
};
|
|
50
|
+
}, [combinedChatCompletions]);
|
|
51
|
+
return {
|
|
52
|
+
createChatCompletion: synchronousChatCompletions.createSynchronousChatCompletion,
|
|
53
|
+
createStructuredChatCompletion: structuredChatCompletions.createStructuredChatCompletion,
|
|
54
|
+
createStreamingChatCompletion: streamingChatCompletions.createStreamingChatCompletion,
|
|
55
|
+
chatCompletions: combinedChatCompletions,
|
|
56
|
+
latest,
|
|
57
|
+
};
|
|
58
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { AnthropicModel } from '@fencyai/js/lib/types/AnthropicModel';
|
|
2
|
+
import { ChatCompletionMessage } from '@fencyai/js/lib/types/ChatCompletionMessage';
|
|
3
|
+
import { GeminiModel } from '@fencyai/js/lib/types/GeminiModel';
|
|
4
|
+
import { OpenAiModel } from '@fencyai/js/lib/types/OpenAiModel';
|
|
5
|
+
export interface CreateOpenAiChatCompletionParams {
|
|
6
|
+
model: OpenAiModel;
|
|
7
|
+
messages: Array<ChatCompletionMessage>;
|
|
8
|
+
temperature?: number;
|
|
9
|
+
topP?: number;
|
|
10
|
+
}
|
|
11
|
+
export interface CreateGeminiChatCompletionParams {
|
|
12
|
+
model: GeminiModel;
|
|
13
|
+
content: string;
|
|
14
|
+
temperature?: number;
|
|
15
|
+
topP?: number;
|
|
16
|
+
topK?: number;
|
|
17
|
+
}
|
|
18
|
+
export interface CreateAnthropicChatCompletionParams {
|
|
19
|
+
model: AnthropicModel;
|
|
20
|
+
messages: Array<ChatCompletionMessage>;
|
|
21
|
+
temperature?: number;
|
|
22
|
+
topP?: number;
|
|
23
|
+
topK?: number;
|
|
24
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { ChatCompletionStream } from '@fencyai/js';
|
|
2
|
+
import { ApiError } from '@fencyai/js/lib/types/ApiError';
|
|
3
|
+
import { FencyContext } from '../provider/FencyContext';
|
|
4
|
+
import { ChatCompletionChunk } from '../useEventSource';
|
|
5
|
+
import { CreateAnthropicChatCompletionParams, CreateGeminiChatCompletionParams, CreateOpenAiChatCompletionParams } from './types';
|
|
6
|
+
export interface StreamingChatCompletionData {
|
|
7
|
+
id: string;
|
|
8
|
+
createdAt: string;
|
|
9
|
+
streamId: string;
|
|
10
|
+
}
|
|
11
|
+
export interface StreamingChatCompletion {
|
|
12
|
+
streamId: string;
|
|
13
|
+
data: StreamingChatCompletionData | null;
|
|
14
|
+
error: ApiError | null;
|
|
15
|
+
response: string;
|
|
16
|
+
chunks: ChatCompletionChunk[];
|
|
17
|
+
loading: boolean;
|
|
18
|
+
doneStreaming: boolean;
|
|
19
|
+
}
|
|
20
|
+
export interface CreateStreamingChatCompletionParams {
|
|
21
|
+
openai?: CreateOpenAiChatCompletionParams;
|
|
22
|
+
gemini?: CreateGeminiChatCompletionParams;
|
|
23
|
+
anthropic?: CreateAnthropicChatCompletionParams;
|
|
24
|
+
}
|
|
25
|
+
export declare const useStreamingChatCompletions: (context: FencyContext) => {
|
|
26
|
+
chatCompletions: StreamingChatCompletion[];
|
|
27
|
+
createStreamingChatCompletion: (params: CreateStreamingChatCompletionParams) => Promise<{
|
|
28
|
+
type: "success";
|
|
29
|
+
chatCompletionStreamId: string;
|
|
30
|
+
chatCompletionId: string;
|
|
31
|
+
} | {
|
|
32
|
+
type: "error";
|
|
33
|
+
error: ApiError;
|
|
34
|
+
}>;
|
|
35
|
+
stream: ChatCompletionStream | null;
|
|
36
|
+
};
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
import { createChatCompletion, createChatCompletionStream, } from '@fencyai/js';
|
|
2
|
+
import { useCallback, useEffect, useState } from 'react';
|
|
3
|
+
import { useEventSource } from '../useEventSource';
|
|
4
|
+
export const useStreamingChatCompletions = (context) => {
|
|
5
|
+
const [chatCompletions, setChatCompletions] = useState([]);
|
|
6
|
+
const [stream, setStream] = useState(null);
|
|
7
|
+
const [completedStreamIds, setCompletedStreamIds] = useState([]);
|
|
8
|
+
const [urlToStreamIdMapping, setUrlToStreamIdMapping] = useState({});
|
|
9
|
+
const { chunks, setUrl } = useEventSource({
|
|
10
|
+
onDone: (url) => {
|
|
11
|
+
const streamId = urlToStreamIdMapping[url];
|
|
12
|
+
if (!streamId) {
|
|
13
|
+
throw new Error(`Stream ID not found for URL: ${url}`);
|
|
14
|
+
}
|
|
15
|
+
setCompletedStreamIds((prev) => [...prev, streamId]);
|
|
16
|
+
},
|
|
17
|
+
});
|
|
18
|
+
useEffect(() => {
|
|
19
|
+
setChatCompletions((prev) => {
|
|
20
|
+
return prev.map((chatCompletion) => {
|
|
21
|
+
if (completedStreamIds.includes(chatCompletion.streamId)) {
|
|
22
|
+
return {
|
|
23
|
+
...chatCompletion,
|
|
24
|
+
doneStreaming: true,
|
|
25
|
+
loading: false,
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
return chatCompletion;
|
|
29
|
+
});
|
|
30
|
+
});
|
|
31
|
+
}, [completedStreamIds]);
|
|
32
|
+
useEffect(() => {
|
|
33
|
+
if (stream) {
|
|
34
|
+
const url = `${context.fency.baseUrl}/v1/pub/chat-completion-streams/${stream.id}?pk=${context.fency.publishableKey}`;
|
|
35
|
+
setUrl(url);
|
|
36
|
+
setUrlToStreamIdMapping((prev) => ({ ...prev, [url]: stream.id }));
|
|
37
|
+
}
|
|
38
|
+
}, [stream, context.fency.publishableKey, setUrl]);
|
|
39
|
+
useEffect(() => {
|
|
40
|
+
const newChatCompletions = [];
|
|
41
|
+
for (const chatCompletion of chatCompletions) {
|
|
42
|
+
const relevantChunks = chunks
|
|
43
|
+
.filter((chunk) => chunk.chatCompletionId === chatCompletion.data?.id)
|
|
44
|
+
.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
|
|
45
|
+
const fullMessage = relevantChunks
|
|
46
|
+
.map((chunk) => chunk.content)
|
|
47
|
+
.join('');
|
|
48
|
+
newChatCompletions.push({
|
|
49
|
+
data: chatCompletion.data
|
|
50
|
+
? {
|
|
51
|
+
id: chatCompletion.data.id,
|
|
52
|
+
createdAt: chatCompletion.data.createdAt,
|
|
53
|
+
streamId: chatCompletion.data.streamId,
|
|
54
|
+
}
|
|
55
|
+
: null,
|
|
56
|
+
streamId: chatCompletion.streamId,
|
|
57
|
+
error: chatCompletion.error,
|
|
58
|
+
loading: chatCompletion.loading,
|
|
59
|
+
doneStreaming: chatCompletion.doneStreaming,
|
|
60
|
+
response: fullMessage,
|
|
61
|
+
chunks: relevantChunks,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
setChatCompletions(newChatCompletions);
|
|
65
|
+
}, [chunks]);
|
|
66
|
+
const createStreamingChatCompletion = useCallback(async (params) => {
|
|
67
|
+
// Step 1: Create stream if not exists
|
|
68
|
+
const streamResponse = await createChatCompletionStream({
|
|
69
|
+
pk: context.fency.publishableKey,
|
|
70
|
+
baseUrl: context.fency.baseUrl,
|
|
71
|
+
});
|
|
72
|
+
if (streamResponse.type === 'success') {
|
|
73
|
+
setStream(streamResponse.stream);
|
|
74
|
+
setChatCompletions([
|
|
75
|
+
...chatCompletions,
|
|
76
|
+
{
|
|
77
|
+
streamId: streamResponse.stream.id,
|
|
78
|
+
data: null,
|
|
79
|
+
error: null,
|
|
80
|
+
response: '',
|
|
81
|
+
chunks: [],
|
|
82
|
+
loading: true,
|
|
83
|
+
doneStreaming: false,
|
|
84
|
+
},
|
|
85
|
+
]);
|
|
86
|
+
// Step 2: Send chat completion
|
|
87
|
+
const chatCompletion = await createChatCompletion({
|
|
88
|
+
pk: context.fency.publishableKey,
|
|
89
|
+
baseUrl: context.fency.baseUrl,
|
|
90
|
+
request: {
|
|
91
|
+
streamId: streamResponse.stream.id,
|
|
92
|
+
openai: params.openai
|
|
93
|
+
? {
|
|
94
|
+
model: params.openai.model,
|
|
95
|
+
messages: params.openai.messages,
|
|
96
|
+
}
|
|
97
|
+
: undefined,
|
|
98
|
+
gemini: params.gemini
|
|
99
|
+
? {
|
|
100
|
+
model: params.gemini.model,
|
|
101
|
+
content: params.gemini.content,
|
|
102
|
+
}
|
|
103
|
+
: undefined,
|
|
104
|
+
anthropic: params.anthropic
|
|
105
|
+
? {
|
|
106
|
+
model: params.anthropic.model,
|
|
107
|
+
messages: params.anthropic.messages,
|
|
108
|
+
}
|
|
109
|
+
: undefined,
|
|
110
|
+
},
|
|
111
|
+
});
|
|
112
|
+
if (chatCompletion.type === 'success' &&
|
|
113
|
+
chatCompletion.completion) {
|
|
114
|
+
const newCompletion = {
|
|
115
|
+
triggeredAt: new Date().toISOString(),
|
|
116
|
+
streamId: streamResponse.stream.id,
|
|
117
|
+
data: {
|
|
118
|
+
id: chatCompletion.completion.id,
|
|
119
|
+
createdAt: chatCompletion.completion.createdAt,
|
|
120
|
+
streamId: streamResponse.stream.id,
|
|
121
|
+
},
|
|
122
|
+
error: null,
|
|
123
|
+
response: '',
|
|
124
|
+
chunks: [],
|
|
125
|
+
doneStreaming: false,
|
|
126
|
+
loading: true,
|
|
127
|
+
};
|
|
128
|
+
setChatCompletions((prev) => [
|
|
129
|
+
...prev.filter((c) => c.streamId !== streamResponse.stream.id),
|
|
130
|
+
newCompletion,
|
|
131
|
+
]);
|
|
132
|
+
return {
|
|
133
|
+
type: 'success',
|
|
134
|
+
chatCompletionStreamId: streamResponse.stream.id,
|
|
135
|
+
chatCompletionId: chatCompletion.completion.id,
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
else if (chatCompletion.type === 'error') {
|
|
139
|
+
setChatCompletions((prev) => [
|
|
140
|
+
...prev.filter((c) => c.streamId !== streamResponse.stream.id),
|
|
141
|
+
{
|
|
142
|
+
streamId: streamResponse.stream.id,
|
|
143
|
+
error: chatCompletion.error,
|
|
144
|
+
response: '',
|
|
145
|
+
chunks: [],
|
|
146
|
+
loading: false,
|
|
147
|
+
doneStreaming: false,
|
|
148
|
+
data: null,
|
|
149
|
+
},
|
|
150
|
+
]);
|
|
151
|
+
return {
|
|
152
|
+
type: 'error',
|
|
153
|
+
error: chatCompletion.error,
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
else {
|
|
157
|
+
const error = {
|
|
158
|
+
message: 'No response received',
|
|
159
|
+
code: 'UnknownError',
|
|
160
|
+
};
|
|
161
|
+
setChatCompletions((prev) => [
|
|
162
|
+
...prev.filter((c) => c.streamId !== streamResponse.stream.id),
|
|
163
|
+
{
|
|
164
|
+
streamId: streamResponse.stream.id,
|
|
165
|
+
error: error,
|
|
166
|
+
response: '',
|
|
167
|
+
chunks: [],
|
|
168
|
+
loading: false,
|
|
169
|
+
doneStreaming: false,
|
|
170
|
+
data: null,
|
|
171
|
+
},
|
|
172
|
+
]);
|
|
173
|
+
return {
|
|
174
|
+
type: 'error',
|
|
175
|
+
error: error,
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
else {
|
|
180
|
+
console.error(streamResponse.error);
|
|
181
|
+
return streamResponse;
|
|
182
|
+
}
|
|
183
|
+
}, [context]);
|
|
184
|
+
return {
|
|
185
|
+
chatCompletions,
|
|
186
|
+
createStreamingChatCompletion,
|
|
187
|
+
stream,
|
|
188
|
+
};
|
|
189
|
+
};
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { ApiError } from '@fencyai/js/lib/types/ApiError';
|
|
2
|
+
import { z, ZodTypeAny } from 'zod';
|
|
3
|
+
import { FencyContext } from '../provider/FencyContext';
|
|
4
|
+
import { CreateGeminiChatCompletionParams, CreateOpenAiChatCompletionParams } from './types';
|
|
5
|
+
export interface StructuredChatCompletionData<T extends ZodTypeAny> {
|
|
6
|
+
id: string;
|
|
7
|
+
createdAt: string;
|
|
8
|
+
response: object;
|
|
9
|
+
structuredResponse: z.infer<T>;
|
|
10
|
+
}
|
|
11
|
+
export interface StructuredChatCompletion<T extends ZodTypeAny> {
|
|
12
|
+
triggeredAt: string;
|
|
13
|
+
data: StructuredChatCompletionData<T> | null;
|
|
14
|
+
error: ApiError | null;
|
|
15
|
+
loading: boolean;
|
|
16
|
+
}
|
|
17
|
+
export type StructuredChatCompletionResponse<T extends ZodTypeAny> = {
|
|
18
|
+
type: 'success';
|
|
19
|
+
data: StructuredChatCompletionData<T>;
|
|
20
|
+
} | {
|
|
21
|
+
type: 'error';
|
|
22
|
+
error: ApiError;
|
|
23
|
+
};
|
|
24
|
+
export interface CreateStructuredChatCompletionParams<T extends ZodTypeAny> {
|
|
25
|
+
openai?: CreateOpenAiChatCompletionParams;
|
|
26
|
+
gemini?: CreateGeminiChatCompletionParams;
|
|
27
|
+
responseFormat: T;
|
|
28
|
+
}
|
|
29
|
+
export declare const useStructuredChatCompletions: (context: FencyContext) => {
|
|
30
|
+
chatCompletions: StructuredChatCompletion<z.ZodType<unknown, unknown, z.core.$ZodTypeInternals<unknown, unknown>>>[];
|
|
31
|
+
createStructuredChatCompletion: <T extends ZodTypeAny>(params: CreateStructuredChatCompletionParams<T>) => Promise<StructuredChatCompletionResponse<T>>;
|
|
32
|
+
};
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import { createChatCompletion } from '@fencyai/js';
|
|
2
|
+
import { useCallback, useState } from 'react';
|
|
3
|
+
import { z } from 'zod';
|
|
4
|
+
export const useStructuredChatCompletions = (context) => {
|
|
5
|
+
const [chatCompletions, setChatCompletions] = useState([]);
|
|
6
|
+
const createStructuredChatCompletion = useCallback(async (params) => {
|
|
7
|
+
const triggeredAt = new Date().toISOString();
|
|
8
|
+
setChatCompletions((prev) => [
|
|
9
|
+
...prev,
|
|
10
|
+
{
|
|
11
|
+
triggeredAt,
|
|
12
|
+
data: null,
|
|
13
|
+
error: null,
|
|
14
|
+
loading: true,
|
|
15
|
+
},
|
|
16
|
+
]);
|
|
17
|
+
const jsonSchema = z.toJSONSchema(params.responseFormat);
|
|
18
|
+
const parsedJsonSchema = JSON.stringify(jsonSchema);
|
|
19
|
+
const response = await createChatCompletion({
|
|
20
|
+
pk: context.fency.publishableKey,
|
|
21
|
+
baseUrl: context.fency.baseUrl,
|
|
22
|
+
request: {
|
|
23
|
+
openai: params.openai
|
|
24
|
+
? {
|
|
25
|
+
model: params.openai.model,
|
|
26
|
+
responseJsonSchema: parsedJsonSchema,
|
|
27
|
+
messages: params.openai.messages,
|
|
28
|
+
}
|
|
29
|
+
: undefined,
|
|
30
|
+
gemini: params.gemini
|
|
31
|
+
? {
|
|
32
|
+
model: params.gemini.model,
|
|
33
|
+
responseJsonSchema: parsedJsonSchema,
|
|
34
|
+
content: params.gemini.content,
|
|
35
|
+
}
|
|
36
|
+
: undefined,
|
|
37
|
+
},
|
|
38
|
+
});
|
|
39
|
+
if (response.type === 'success') {
|
|
40
|
+
if (response.completion.response) {
|
|
41
|
+
const data = {
|
|
42
|
+
id: response.completion.id,
|
|
43
|
+
createdAt: response.completion.createdAt,
|
|
44
|
+
response: params.responseFormat.parse(JSON.parse(response.completion.response)),
|
|
45
|
+
structuredResponse: params.responseFormat.parse(JSON.parse(response.completion.response)),
|
|
46
|
+
};
|
|
47
|
+
const structuredChatCompletion = {
|
|
48
|
+
triggeredAt,
|
|
49
|
+
data: data,
|
|
50
|
+
error: null,
|
|
51
|
+
loading: false,
|
|
52
|
+
};
|
|
53
|
+
setChatCompletions((prev) => [
|
|
54
|
+
...prev.filter((c) => c.triggeredAt !== triggeredAt),
|
|
55
|
+
structuredChatCompletion,
|
|
56
|
+
]);
|
|
57
|
+
return {
|
|
58
|
+
type: 'success',
|
|
59
|
+
data: data,
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
const error = {
|
|
64
|
+
code: 'NoResponse',
|
|
65
|
+
message: 'No response from chat completion',
|
|
66
|
+
};
|
|
67
|
+
setChatCompletions((prev) => [
|
|
68
|
+
...prev.filter((c) => c.triggeredAt !== triggeredAt),
|
|
69
|
+
{
|
|
70
|
+
triggeredAt,
|
|
71
|
+
data: null,
|
|
72
|
+
error: error,
|
|
73
|
+
loading: false,
|
|
74
|
+
},
|
|
75
|
+
]);
|
|
76
|
+
return {
|
|
77
|
+
type: 'error',
|
|
78
|
+
error: error,
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
else {
|
|
83
|
+
setChatCompletions((prev) => [
|
|
84
|
+
...prev.filter((c) => c.triggeredAt !== triggeredAt),
|
|
85
|
+
{
|
|
86
|
+
triggeredAt,
|
|
87
|
+
data: null,
|
|
88
|
+
error: response.error,
|
|
89
|
+
loading: false,
|
|
90
|
+
},
|
|
91
|
+
]);
|
|
92
|
+
return response;
|
|
93
|
+
}
|
|
94
|
+
}, [context]);
|
|
95
|
+
return {
|
|
96
|
+
chatCompletions,
|
|
97
|
+
createStructuredChatCompletion,
|
|
98
|
+
};
|
|
99
|
+
};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { ApiError } from '@fencyai/js/lib/types/ApiError';
|
|
2
|
+
import { FencyContext } from '../provider/FencyContext';
|
|
3
|
+
import { CreateAnthropicChatCompletionParams, CreateGeminiChatCompletionParams, CreateOpenAiChatCompletionParams } from './types';
|
|
4
|
+
export interface SynchronousChatCompletionData {
|
|
5
|
+
id: string;
|
|
6
|
+
createdAt: string;
|
|
7
|
+
response: string;
|
|
8
|
+
}
|
|
9
|
+
export interface SynchronousChatCompletion {
|
|
10
|
+
triggeredAt: string;
|
|
11
|
+
data: SynchronousChatCompletionData | null;
|
|
12
|
+
error: ApiError | null;
|
|
13
|
+
loading: boolean;
|
|
14
|
+
}
|
|
15
|
+
export type SynchronousChatCompletionResponse = {
|
|
16
|
+
type: 'success';
|
|
17
|
+
data: SynchronousChatCompletionData;
|
|
18
|
+
} | {
|
|
19
|
+
type: 'error';
|
|
20
|
+
error: ApiError;
|
|
21
|
+
};
|
|
22
|
+
export interface CreateSynchronousChatCompletionParams {
|
|
23
|
+
openai?: CreateOpenAiChatCompletionParams;
|
|
24
|
+
gemini?: CreateGeminiChatCompletionParams;
|
|
25
|
+
anthropic?: CreateAnthropicChatCompletionParams;
|
|
26
|
+
}
|
|
27
|
+
export declare const useSynchronousChatCompletions: (context: FencyContext) => {
|
|
28
|
+
chatCompletions: SynchronousChatCompletion[];
|
|
29
|
+
createSynchronousChatCompletion: (params: CreateSynchronousChatCompletionParams) => Promise<SynchronousChatCompletionResponse>;
|
|
30
|
+
};
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { createChatCompletion } from '@fencyai/js';
|
|
2
|
+
import { useCallback, useState } from 'react';
|
|
3
|
+
export const useSynchronousChatCompletions = (context) => {
|
|
4
|
+
const [chatCompletions, setChatCompletions] = useState([]);
|
|
5
|
+
const createSynchronousChatCompletion = useCallback(async (params) => {
|
|
6
|
+
const triggeredAt = new Date().toISOString();
|
|
7
|
+
setChatCompletions((prev) => [
|
|
8
|
+
...prev,
|
|
9
|
+
{
|
|
10
|
+
triggeredAt,
|
|
11
|
+
data: null,
|
|
12
|
+
error: null,
|
|
13
|
+
loading: true,
|
|
14
|
+
},
|
|
15
|
+
]);
|
|
16
|
+
const chatCompletion = await createChatCompletion({
|
|
17
|
+
pk: context.fency.publishableKey,
|
|
18
|
+
baseUrl: context.fency.baseUrl,
|
|
19
|
+
request: {
|
|
20
|
+
openai: params.openai
|
|
21
|
+
? {
|
|
22
|
+
model: params.openai.model,
|
|
23
|
+
messages: params.openai.messages,
|
|
24
|
+
temperature: params.openai.temperature,
|
|
25
|
+
topP: params.openai.topP,
|
|
26
|
+
}
|
|
27
|
+
: undefined,
|
|
28
|
+
gemini: params.gemini
|
|
29
|
+
? {
|
|
30
|
+
model: params.gemini.model,
|
|
31
|
+
content: params.gemini.content,
|
|
32
|
+
temperature: params.gemini.temperature,
|
|
33
|
+
topP: params.gemini.topP,
|
|
34
|
+
topK: params.gemini.topK,
|
|
35
|
+
}
|
|
36
|
+
: undefined,
|
|
37
|
+
anthropic: params.anthropic
|
|
38
|
+
? {
|
|
39
|
+
model: params.anthropic.model,
|
|
40
|
+
messages: params.anthropic.messages,
|
|
41
|
+
temperature: params.anthropic.temperature,
|
|
42
|
+
topP: params.anthropic.topP,
|
|
43
|
+
topK: params.anthropic.topK,
|
|
44
|
+
}
|
|
45
|
+
: undefined,
|
|
46
|
+
},
|
|
47
|
+
});
|
|
48
|
+
if (chatCompletion.type === 'success' &&
|
|
49
|
+
chatCompletion.completion.response) {
|
|
50
|
+
const updatedCompletion = {
|
|
51
|
+
triggeredAt,
|
|
52
|
+
data: {
|
|
53
|
+
id: chatCompletion.completion.id,
|
|
54
|
+
createdAt: chatCompletion.completion.createdAt,
|
|
55
|
+
response: chatCompletion.completion.response,
|
|
56
|
+
},
|
|
57
|
+
error: null,
|
|
58
|
+
loading: false,
|
|
59
|
+
};
|
|
60
|
+
setChatCompletions((prev) => [
|
|
61
|
+
...prev.filter((c) => c.triggeredAt !== triggeredAt),
|
|
62
|
+
updatedCompletion,
|
|
63
|
+
]);
|
|
64
|
+
return {
|
|
65
|
+
type: 'success',
|
|
66
|
+
data: updatedCompletion.data,
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
else if (chatCompletion.type === 'error') {
|
|
70
|
+
const errorCompletion = {
|
|
71
|
+
triggeredAt,
|
|
72
|
+
data: null,
|
|
73
|
+
error: chatCompletion.error,
|
|
74
|
+
loading: false,
|
|
75
|
+
};
|
|
76
|
+
setChatCompletions((prev) => [...prev, errorCompletion]);
|
|
77
|
+
return {
|
|
78
|
+
type: 'error',
|
|
79
|
+
error: chatCompletion.error,
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
else {
|
|
83
|
+
const error = {
|
|
84
|
+
message: 'No response received',
|
|
85
|
+
code: 'UnknownError',
|
|
86
|
+
};
|
|
87
|
+
setChatCompletions((prev) => [
|
|
88
|
+
...prev.filter((c) => c.triggeredAt !== triggeredAt),
|
|
89
|
+
{
|
|
90
|
+
triggeredAt,
|
|
91
|
+
data: null,
|
|
92
|
+
error: error,
|
|
93
|
+
loading: false,
|
|
94
|
+
},
|
|
95
|
+
]);
|
|
96
|
+
return {
|
|
97
|
+
type: 'error',
|
|
98
|
+
error: error,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
}, [context]);
|
|
102
|
+
return {
|
|
103
|
+
chatCompletions,
|
|
104
|
+
createSynchronousChatCompletion,
|
|
105
|
+
};
|
|
106
|
+
};
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
export declare function useEventSource(
|
|
1
|
+
export declare function useEventSource(props?: {
|
|
2
|
+
onDone?: (url: string) => void;
|
|
3
|
+
}): {
|
|
2
4
|
chunks: ChatCompletionChunk[];
|
|
3
5
|
setUrl: import("react").Dispatch<import("react").SetStateAction<string | null | undefined>>;
|
|
4
6
|
url: string | null | undefined;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { useEffect, useState } from 'react';
|
|
2
|
-
export function useEventSource() {
|
|
2
|
+
export function useEventSource(props) {
|
|
3
3
|
const [chunks, setChunks] = useState([]);
|
|
4
4
|
const [url, setUrl] = useState();
|
|
5
5
|
useEffect(() => {
|
|
@@ -10,6 +10,7 @@ export function useEventSource() {
|
|
|
10
10
|
if (message.data === '__END_OF_STREAM__') {
|
|
11
11
|
eventSource.close();
|
|
12
12
|
setUrl(null); // Clear URL to prevent reconnection
|
|
13
|
+
props?.onDone?.(url);
|
|
13
14
|
return;
|
|
14
15
|
}
|
|
15
16
|
const chunk = getChatCompletionChunk(message);
|
|
@@ -35,9 +36,17 @@ export function useEventSource() {
|
|
|
35
36
|
url,
|
|
36
37
|
};
|
|
37
38
|
}
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
39
|
+
function base64Decode(base64) {
|
|
40
|
+
// Decode Base64 -> binary string
|
|
41
|
+
const binary = atob(base64);
|
|
42
|
+
// Convert binary string -> Uint8Array
|
|
43
|
+
const bytes = new Uint8Array(binary.length);
|
|
44
|
+
for (let i = 0; i < binary.length; i++) {
|
|
45
|
+
bytes[i] = binary.charCodeAt(i);
|
|
46
|
+
}
|
|
47
|
+
// Decode UTF-8 bytes -> proper string
|
|
48
|
+
return new TextDecoder('utf-8').decode(bytes);
|
|
49
|
+
}
|
|
41
50
|
const getChatCompletionChunk = (message) => {
|
|
42
51
|
try {
|
|
43
52
|
const json = JSON.parse(base64Decode(message.data));
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fencyai/react",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.41",
|
|
4
4
|
"description": "> TODO: description",
|
|
5
5
|
"author": "staklau <steinaageklaussen@gmail.com>",
|
|
6
6
|
"homepage": "",
|
|
@@ -36,7 +36,7 @@
|
|
|
36
36
|
"zod": "^4.0.5"
|
|
37
37
|
},
|
|
38
38
|
"devDependencies": {
|
|
39
|
-
"@fencyai/js": "^0.1.
|
|
39
|
+
"@fencyai/js": "^0.1.41",
|
|
40
40
|
"@types/jest": "^29.5.11",
|
|
41
41
|
"@types/node": "^20.10.5",
|
|
42
42
|
"@types/react": "^18.2.45",
|
|
@@ -45,8 +45,8 @@
|
|
|
45
45
|
"typescript": "^5.3.3"
|
|
46
46
|
},
|
|
47
47
|
"peerDependencies": {
|
|
48
|
-
"@fencyai/js": "^0.1.
|
|
48
|
+
"@fencyai/js": "^0.1.41",
|
|
49
49
|
"react": ">=16.8.0"
|
|
50
50
|
},
|
|
51
|
-
"gitHead": "
|
|
51
|
+
"gitHead": "cfe3e77d5266d35c1d39674bdcd2cb1888789b60"
|
|
52
52
|
}
|
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import { ChatCompletion, CreateAnthropicChatCompletionParams, CreateGeminiChatCompletionParams, CreateOpenAiChatCompletionParams } from '@fencyai/js';
|
|
2
|
-
import { ApiError } from '@fencyai/js/lib/types/ApiError';
|
|
3
|
-
import z, { ZodTypeAny } from 'zod';
|
|
4
|
-
import { ChatCompletionChunk } from './useEventSource';
|
|
5
|
-
interface Completions {
|
|
6
|
-
chatCompletion: ChatCompletion & {
|
|
7
|
-
structuredResponse?: any;
|
|
8
|
-
};
|
|
9
|
-
chunks: ChatCompletionChunk[];
|
|
10
|
-
response: string;
|
|
11
|
-
}
|
|
12
|
-
interface HookResponse {
|
|
13
|
-
chatCompletions: Completions[];
|
|
14
|
-
createChatCompletion: (params: {
|
|
15
|
-
openai?: CreateOpenAiChatCompletionParams;
|
|
16
|
-
anthropic?: CreateAnthropicChatCompletionParams;
|
|
17
|
-
gemini?: CreateGeminiChatCompletionParams;
|
|
18
|
-
}) => Promise<{
|
|
19
|
-
type: 'success';
|
|
20
|
-
chatCompletion: ChatCompletion;
|
|
21
|
-
} | {
|
|
22
|
-
type: 'error';
|
|
23
|
-
error: ApiError;
|
|
24
|
-
}>;
|
|
25
|
-
createStructuredChatCompletion: <T extends ZodTypeAny>(params: {
|
|
26
|
-
openai?: CreateOpenAiChatCompletionParams;
|
|
27
|
-
gemini?: CreateGeminiChatCompletionParams;
|
|
28
|
-
responseFormat: T;
|
|
29
|
-
}) => Promise<{
|
|
30
|
-
type: 'success';
|
|
31
|
-
chatCompletion: ChatCompletion & {
|
|
32
|
-
structuredResponse: z.infer<T>;
|
|
33
|
-
};
|
|
34
|
-
} | {
|
|
35
|
-
type: 'error';
|
|
36
|
-
error: ApiError;
|
|
37
|
-
}>;
|
|
38
|
-
createStreamingChatCompletion: (params: {
|
|
39
|
-
openai?: CreateOpenAiChatCompletionParams;
|
|
40
|
-
anthropic?: CreateAnthropicChatCompletionParams;
|
|
41
|
-
gemini?: CreateGeminiChatCompletionParams;
|
|
42
|
-
}) => Promise<{
|
|
43
|
-
type: 'success';
|
|
44
|
-
chatCompletionStreamId: string;
|
|
45
|
-
chatCompletionId: string;
|
|
46
|
-
} | {
|
|
47
|
-
type: 'error';
|
|
48
|
-
error: ApiError;
|
|
49
|
-
}>;
|
|
50
|
-
latest: Completions | null;
|
|
51
|
-
}
|
|
52
|
-
export declare function useChatCompletions(): HookResponse;
|
|
53
|
-
export {};
|
|
@@ -1,192 +0,0 @@
|
|
|
1
|
-
// hooks/useChatCompletion.ts
|
|
2
|
-
import { createChatCompletion, createChatCompletionStream, } from '@fencyai/js';
|
|
3
|
-
import { useCallback, useEffect, useMemo, useState } from 'react';
|
|
4
|
-
import z from 'zod';
|
|
5
|
-
import { useFencyContext } from '../provider/useFencyContext';
|
|
6
|
-
import { useEventSource } from './useEventSource';
|
|
7
|
-
export function useChatCompletions() {
|
|
8
|
-
const context = useFencyContext();
|
|
9
|
-
const { chunks, setUrl } = useEventSource();
|
|
10
|
-
const [chatCompletions, setChatCompletions] = useState([]);
|
|
11
|
-
const [stream, setStream] = useState(null);
|
|
12
|
-
const createStreamingChatCompletion = useCallback(async (params) => {
|
|
13
|
-
// Step 1: Create stream if not exists
|
|
14
|
-
const streamResponse = await createChatCompletionStream({
|
|
15
|
-
pk: context.fency.publishableKey,
|
|
16
|
-
baseUrl: context.fency.baseUrl,
|
|
17
|
-
});
|
|
18
|
-
if (streamResponse.type === 'success') {
|
|
19
|
-
setStream(streamResponse.stream);
|
|
20
|
-
// Step 2: Send chat completion
|
|
21
|
-
const chatCompletion = await createChatCompletion({
|
|
22
|
-
pk: context.fency.publishableKey,
|
|
23
|
-
baseUrl: context.fency.baseUrl,
|
|
24
|
-
request: {
|
|
25
|
-
streamId: streamResponse.stream.id,
|
|
26
|
-
openai: params.openai
|
|
27
|
-
? {
|
|
28
|
-
model: params.openai.model,
|
|
29
|
-
messages: params.openai.messages,
|
|
30
|
-
}
|
|
31
|
-
: undefined,
|
|
32
|
-
gemini: params.gemini
|
|
33
|
-
? {
|
|
34
|
-
model: params.gemini.model,
|
|
35
|
-
content: params.gemini.content,
|
|
36
|
-
}
|
|
37
|
-
: undefined,
|
|
38
|
-
anthropic: params.anthropic
|
|
39
|
-
? {
|
|
40
|
-
model: params.anthropic.model,
|
|
41
|
-
messages: params.anthropic.messages,
|
|
42
|
-
}
|
|
43
|
-
: undefined,
|
|
44
|
-
},
|
|
45
|
-
});
|
|
46
|
-
if (chatCompletion.type === 'success') {
|
|
47
|
-
setChatCompletions((prev) => [
|
|
48
|
-
...prev,
|
|
49
|
-
chatCompletion.completion,
|
|
50
|
-
]);
|
|
51
|
-
return {
|
|
52
|
-
type: 'success',
|
|
53
|
-
chatCompletionStreamId: streamResponse.stream.id,
|
|
54
|
-
chatCompletionId: chatCompletion.completion.id,
|
|
55
|
-
};
|
|
56
|
-
}
|
|
57
|
-
else {
|
|
58
|
-
return chatCompletion;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
else {
|
|
62
|
-
return streamResponse;
|
|
63
|
-
}
|
|
64
|
-
}, [context]);
|
|
65
|
-
const createSynchronousChatCompletion = useCallback(async (params) => {
|
|
66
|
-
const chatCompletion = await createChatCompletion({
|
|
67
|
-
pk: context.fency.publishableKey,
|
|
68
|
-
baseUrl: context.fency.baseUrl,
|
|
69
|
-
request: {
|
|
70
|
-
openai: params.openai
|
|
71
|
-
? {
|
|
72
|
-
model: params.openai.model,
|
|
73
|
-
messages: params.openai.messages,
|
|
74
|
-
temperature: params.openai.temperature,
|
|
75
|
-
topP: params.openai.topP,
|
|
76
|
-
}
|
|
77
|
-
: undefined,
|
|
78
|
-
gemini: params.gemini
|
|
79
|
-
? {
|
|
80
|
-
model: params.gemini.model,
|
|
81
|
-
content: params.gemini.content,
|
|
82
|
-
temperature: params.gemini.temperature,
|
|
83
|
-
topP: params.gemini.topP,
|
|
84
|
-
topK: params.gemini.topK,
|
|
85
|
-
}
|
|
86
|
-
: undefined,
|
|
87
|
-
anthropic: params.anthropic
|
|
88
|
-
? {
|
|
89
|
-
model: params.anthropic.model,
|
|
90
|
-
messages: params.anthropic.messages,
|
|
91
|
-
temperature: params.anthropic.temperature,
|
|
92
|
-
topP: params.anthropic.topP,
|
|
93
|
-
topK: params.anthropic.topK,
|
|
94
|
-
}
|
|
95
|
-
: undefined,
|
|
96
|
-
},
|
|
97
|
-
});
|
|
98
|
-
if (chatCompletion.type === 'success') {
|
|
99
|
-
setChatCompletions((prev) => [
|
|
100
|
-
...prev,
|
|
101
|
-
chatCompletion.completion,
|
|
102
|
-
]);
|
|
103
|
-
return {
|
|
104
|
-
type: 'success',
|
|
105
|
-
chatCompletion: chatCompletion.completion,
|
|
106
|
-
};
|
|
107
|
-
}
|
|
108
|
-
else {
|
|
109
|
-
return chatCompletion;
|
|
110
|
-
}
|
|
111
|
-
}, [context]);
|
|
112
|
-
const createStructuredChatCompletion = useCallback(async (params) => {
|
|
113
|
-
const jsonSchema = z.toJSONSchema(params.responseFormat);
|
|
114
|
-
const parsedJsonSchema = JSON.stringify(jsonSchema);
|
|
115
|
-
const response = await createChatCompletion({
|
|
116
|
-
pk: context.fency.publishableKey,
|
|
117
|
-
baseUrl: context.fency.baseUrl,
|
|
118
|
-
request: {
|
|
119
|
-
openai: params.openai
|
|
120
|
-
? {
|
|
121
|
-
model: params.openai.model,
|
|
122
|
-
responseJsonSchema: parsedJsonSchema,
|
|
123
|
-
messages: params.openai.messages,
|
|
124
|
-
}
|
|
125
|
-
: undefined,
|
|
126
|
-
gemini: params.gemini
|
|
127
|
-
? {
|
|
128
|
-
model: params.gemini.model,
|
|
129
|
-
responseJsonSchema: parsedJsonSchema,
|
|
130
|
-
content: params.gemini.content,
|
|
131
|
-
}
|
|
132
|
-
: undefined,
|
|
133
|
-
},
|
|
134
|
-
});
|
|
135
|
-
if (response.type === 'success') {
|
|
136
|
-
setChatCompletions((prev) => [...prev, response.completion]);
|
|
137
|
-
if (response.completion.response) {
|
|
138
|
-
return {
|
|
139
|
-
type: 'success',
|
|
140
|
-
chatCompletion: {
|
|
141
|
-
...response.completion,
|
|
142
|
-
structuredResponse: params.responseFormat.parse(JSON.parse(response.completion.response)),
|
|
143
|
-
},
|
|
144
|
-
};
|
|
145
|
-
}
|
|
146
|
-
else {
|
|
147
|
-
return {
|
|
148
|
-
type: 'error',
|
|
149
|
-
error: {
|
|
150
|
-
code: 'NoResponse',
|
|
151
|
-
message: 'No response from chat completion',
|
|
152
|
-
},
|
|
153
|
-
};
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
else {
|
|
157
|
-
return response;
|
|
158
|
-
}
|
|
159
|
-
}, [context]);
|
|
160
|
-
const completions = useMemo(() => {
|
|
161
|
-
const completions = [];
|
|
162
|
-
for (const chatCompletion of chatCompletions) {
|
|
163
|
-
const relevantChunks = chunks
|
|
164
|
-
.filter((chunk) => chunk.chatCompletionId === chatCompletion.id)
|
|
165
|
-
.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
|
|
166
|
-
const fullMessage = relevantChunks
|
|
167
|
-
.map((chunk) => chunk.content)
|
|
168
|
-
.join('');
|
|
169
|
-
completions.push({
|
|
170
|
-
chatCompletion,
|
|
171
|
-
chunks: relevantChunks,
|
|
172
|
-
response: fullMessage,
|
|
173
|
-
});
|
|
174
|
-
}
|
|
175
|
-
return completions;
|
|
176
|
-
}, [chunks, chatCompletions]);
|
|
177
|
-
const latest = useMemo(() => {
|
|
178
|
-
return completions[completions.length - 1];
|
|
179
|
-
}, [completions]);
|
|
180
|
-
useEffect(() => {
|
|
181
|
-
if (stream) {
|
|
182
|
-
setUrl(`${context.fency.baseUrl}/v1/pub/chat-completion-streams/${stream.id}?pk=${context.fency.publishableKey}`);
|
|
183
|
-
}
|
|
184
|
-
}, [stream, context.fency.publishableKey, setUrl]);
|
|
185
|
-
return {
|
|
186
|
-
createChatCompletion: createSynchronousChatCompletion,
|
|
187
|
-
createStructuredChatCompletion: createStructuredChatCompletion,
|
|
188
|
-
createStreamingChatCompletion: createStreamingChatCompletion,
|
|
189
|
-
chatCompletions: completions,
|
|
190
|
-
latest,
|
|
191
|
-
};
|
|
192
|
-
}
|