@fencyai/react 0.1.82 → 0.1.84
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assets/index.css +1 -0
- package/dist/chat/Chat.d.ts +8 -0
- package/dist/chat/ChatResponse.d.ts +5 -0
- package/dist/chat/EditableContent.d.ts +11 -0
- package/dist/chat/GhostWrapper.d.ts +10 -0
- package/dist/chat/MentionCategory.d.ts +5 -0
- package/dist/chat/MentionInput.d.ts +20 -0
- package/dist/chat/MentionOption.d.ts +5 -0
- package/dist/chat/MentionPopover.d.ts +10 -0
- package/dist/chat/hooks/useContentEditable.d.ts +10 -0
- package/dist/chat/hooks/useKeyboardNavigation.d.ts +10 -0
- package/dist/chat/hooks/useMentionParser.d.ts +15 -0
- package/dist/chat/hooks/useMentionRenderer.d.ts +22 -0
- package/dist/chat/hooks/useMentionState.d.ts +14 -0
- package/{lib → dist}/hooks/useFencyEventSource/index.d.ts +1 -1
- package/dist/hooks/useListMemoryTypes/index.d.ts +2 -0
- package/dist/hooks/usePaginatedQuery/index.d.ts +51 -0
- package/{lib → dist}/index.d.ts +8 -6
- package/dist/index.js +34858 -0
- package/{lib → dist}/provider/FencyContextValue.d.ts +1 -1
- package/{lib → dist}/provider/useFencyContext.d.ts +1 -1
- package/dist/types/AiModel.d.ts +2 -0
- package/{lib → dist}/types/CreateGeminiChatCompletionParams.d.ts +1 -2
- package/{lib → dist}/types/CreateOpenAiChatCompletionParams.d.ts +1 -2
- package/dist/types/FencyContext.d.ts +12 -0
- package/dist/types/ListMemoryTypesPage.d.ts +7 -0
- package/{lib/types/ListFilesParams.d.ts → dist/types/ListMemoryTypesParams.d.ts} +1 -1
- package/dist/types/ListMemoryTypesResult.d.ts +9 -0
- package/{lib → dist}/types/UseBasicChatCompletions.d.ts +3 -3
- package/dist/types/UseListMemoryTypes.d.ts +10 -0
- package/dist/types/UseListMemoryTypesProps.d.ts +3 -0
- package/{lib → dist}/types/UseStreamingChatCompletions.d.ts +3 -3
- package/{lib → dist}/types/index.d.ts +5 -13
- package/package.json +23 -15
- package/lib/hooks/useBasicChatCompletions/index.js +0 -92
- package/lib/hooks/useCreateFiles/index.d.ts +0 -3
- package/lib/hooks/useCreateFiles/index.js +0 -72
- package/lib/hooks/useFencyEventSource/index.js +0 -39
- package/lib/hooks/useListFiles/index.d.ts +0 -3
- package/lib/hooks/useListFiles/index.js +0 -125
- package/lib/hooks/useSearchFiles/index.d.ts +0 -2
- package/lib/hooks/useSearchFiles/index.js +0 -26
- package/lib/hooks/useStream/index.js +0 -73
- package/lib/hooks/useStream/toStreamData.js +0 -116
- package/lib/hooks/useStreamingChatCompletions/index.js +0 -205
- package/lib/hooks/useStructuredChatCompletions/index.js +0 -124
- package/lib/hooks/useWebsites/index.d.ts +0 -3
- package/lib/hooks/useWebsites/index.js +0 -55
- package/lib/index.js +0 -14
- package/lib/provider/FencyContextValue.js +0 -3
- package/lib/provider/FencyProvider.js +0 -32
- package/lib/provider/useFencyContext.js +0 -12
- package/lib/types/AiModel.d.ts +0 -4
- package/lib/types/AiModel.js +0 -1
- package/lib/types/BasicChatCompletion.js +0 -1
- package/lib/types/BasicChatCompletionData.js +0 -1
- package/lib/types/BasicChatCompletionResponse.js +0 -1
- package/lib/types/CreateBasicChatCompletionParams.js +0 -1
- package/lib/types/CreateClaudeChatCompletionParams.js +0 -1
- package/lib/types/CreateFileParams.d.ts +0 -9
- package/lib/types/CreateFileParams.js +0 -1
- package/lib/types/CreateGeminiChatCompletionParams.js +0 -1
- package/lib/types/CreateGenericChatCompletionParams.js +0 -169
- package/lib/types/CreateOpenAiChatCompletionParams.js +0 -1
- package/lib/types/CreateStreamResponse.js +0 -1
- package/lib/types/CreateStreamingChatCompletionParams.js +0 -1
- package/lib/types/CreateStreamingChatCompletionResponse.js +0 -1
- package/lib/types/CreateStructuredChatCompletionParams.js +0 -1
- package/lib/types/CreateWebsiteParams.d.ts +0 -5
- package/lib/types/CreateWebsiteParams.js +0 -1
- package/lib/types/FencyContext.d.ts +0 -6
- package/lib/types/FencyContext.js +0 -1
- package/lib/types/FencyProviderProps.js +0 -1
- package/lib/types/ListFilesPage.d.ts +0 -8
- package/lib/types/ListFilesPage.js +0 -1
- package/lib/types/ListFilesParams.js +0 -1
- package/lib/types/ListFilesResult.d.ts +0 -9
- package/lib/types/ListFilesResult.js +0 -1
- package/lib/types/SearchFilesParams.d.ts +0 -6
- package/lib/types/SearchFilesParams.js +0 -1
- package/lib/types/StreamData.js +0 -1
- package/lib/types/StreamError.js +0 -1
- package/lib/types/StreamingChatCompletion.js +0 -1
- package/lib/types/StreamingChatCompletionData.js +0 -1
- package/lib/types/StructuredChatCompletion.js +0 -1
- package/lib/types/StructuredChatCompletionData.js +0 -1
- package/lib/types/StructuredChatCompletionResponse.js +0 -1
- package/lib/types/UseBasicChatCompletions.js +0 -1
- package/lib/types/UseCreateFiles.d.ts +0 -6
- package/lib/types/UseCreateFiles.js +0 -1
- package/lib/types/UseCreateFilesProps.d.ts +0 -6
- package/lib/types/UseCreateFilesProps.js +0 -1
- package/lib/types/UseListFiles.d.ts +0 -10
- package/lib/types/UseListFiles.js +0 -1
- package/lib/types/UseListFilesProps.d.ts +0 -3
- package/lib/types/UseListFilesProps.js +0 -1
- package/lib/types/UseSearchFiles.d.ts +0 -6
- package/lib/types/UseSearchFiles.js +0 -1
- package/lib/types/UseStream.js +0 -1
- package/lib/types/UseStreamProps.js +0 -1
- package/lib/types/UseStreamingChatCompletions.js +0 -1
- package/lib/types/UseStreamingChatCompletionsProps.js +0 -1
- package/lib/types/UseStructuredChatCompletions.js +0 -1
- package/lib/types/UseWebsites.d.ts +0 -6
- package/lib/types/UseWebsites.js +0 -1
- package/lib/types/UseWebsitesProps.d.ts +0 -5
- package/lib/types/UseWebsitesProps.js +0 -1
- package/lib/types/index.js +0 -48
- /package/{lib → dist}/hooks/useBasicChatCompletions/index.d.ts +0 -0
- /package/{lib → dist}/hooks/useStream/index.d.ts +0 -0
- /package/{lib → dist}/hooks/useStream/toStreamData.d.ts +0 -0
- /package/{lib → dist}/hooks/useStreamingChatCompletions/index.d.ts +0 -0
- /package/{lib → dist}/hooks/useStructuredChatCompletions/index.d.ts +0 -0
- /package/{lib → dist}/provider/FencyProvider.d.ts +0 -0
- /package/{lib → dist}/types/BasicChatCompletion.d.ts +0 -0
- /package/{lib → dist}/types/BasicChatCompletionData.d.ts +0 -0
- /package/{lib → dist}/types/BasicChatCompletionResponse.d.ts +0 -0
- /package/{lib → dist}/types/CreateBasicChatCompletionParams.d.ts +0 -0
- /package/{lib → dist}/types/CreateClaudeChatCompletionParams.d.ts +0 -0
- /package/{lib → dist}/types/CreateGenericChatCompletionParams.d.ts +0 -0
- /package/{lib → dist}/types/CreateStreamResponse.d.ts +0 -0
- /package/{lib → dist}/types/CreateStreamingChatCompletionParams.d.ts +0 -0
- /package/{lib → dist}/types/CreateStreamingChatCompletionResponse.d.ts +0 -0
- /package/{lib → dist}/types/CreateStructuredChatCompletionParams.d.ts +0 -0
- /package/{lib → dist}/types/FencyProviderProps.d.ts +0 -0
- /package/{lib → dist}/types/StreamData.d.ts +0 -0
- /package/{lib → dist}/types/StreamError.d.ts +0 -0
- /package/{lib → dist}/types/StreamingChatCompletion.d.ts +0 -0
- /package/{lib → dist}/types/StreamingChatCompletionData.d.ts +0 -0
- /package/{lib → dist}/types/StructuredChatCompletion.d.ts +0 -0
- /package/{lib → dist}/types/StructuredChatCompletionData.d.ts +0 -0
- /package/{lib → dist}/types/StructuredChatCompletionResponse.d.ts +0 -0
- /package/{lib → dist}/types/UseStream.d.ts +0 -0
- /package/{lib → dist}/types/UseStreamProps.d.ts +0 -0
- /package/{lib → dist}/types/UseStreamingChatCompletionsProps.d.ts +0 -0
- /package/{lib → dist}/types/UseStructuredChatCompletions.d.ts +0 -0
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
import { FencyContext } from '../types/FencyContext';
|
|
2
|
-
export declare const FencyContextValue: import(
|
|
2
|
+
export declare const FencyContextValue: import('react').Context<FencyContext | undefined>;
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import { GeminiModel } from '@fencyai/js';
|
|
2
|
-
import { GeminiChatCompletionMessage } from '@fencyai/js';
|
|
1
|
+
import { GeminiModel, GeminiChatCompletionMessage } from '@fencyai/js';
|
|
3
2
|
export interface CreateGeminiChatCompletionParams {
|
|
4
3
|
model: GeminiModel;
|
|
5
4
|
messages: Array<GeminiChatCompletionMessage>;
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import { OpenAiModel } from '@fencyai/js';
|
|
2
|
-
import { ChatCompletionMessage } from '@fencyai/js';
|
|
1
|
+
import { OpenAiModel, ChatCompletionMessage } from '@fencyai/js';
|
|
3
2
|
export interface CreateOpenAiChatCompletionParams {
|
|
4
3
|
model: OpenAiModel;
|
|
5
4
|
messages: Array<ChatCompletionMessage>;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { FencyInstance, Stream, CreateStreamRequest } from '@fencyai/js';
|
|
2
|
+
export interface StreamCache {
|
|
3
|
+
stream: Stream;
|
|
4
|
+
createdAt: number;
|
|
5
|
+
}
|
|
6
|
+
export interface FencyContext {
|
|
7
|
+
fency: FencyInstance;
|
|
8
|
+
loading: boolean;
|
|
9
|
+
error: Error | null;
|
|
10
|
+
activeStream: StreamCache | null;
|
|
11
|
+
getOrCreateStream: (params: CreateStreamRequest, maxAge?: number) => Promise<Stream>;
|
|
12
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { BasicChatCompletion } from
|
|
2
|
-
import { CreateBasicChatCompletionParams } from
|
|
3
|
-
import { BasicChatCompletionResponse } from
|
|
1
|
+
import { BasicChatCompletion } from './BasicChatCompletion';
|
|
2
|
+
import { CreateBasicChatCompletionParams } from './CreateBasicChatCompletionParams';
|
|
3
|
+
import { BasicChatCompletionResponse } from './BasicChatCompletionResponse';
|
|
4
4
|
export interface UseBasicChatCompletions {
|
|
5
5
|
chatCompletions: BasicChatCompletion[];
|
|
6
6
|
createBasicChatCompletion: (params: CreateBasicChatCompletionParams) => Promise<BasicChatCompletionResponse>;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { ListMemoryTypesPage } from './ListMemoryTypesPage';
|
|
2
|
+
import { ListMemoryTypesParams } from './ListMemoryTypesParams';
|
|
3
|
+
import { ListMemoryTypesResult } from './ListMemoryTypesResult';
|
|
4
|
+
export interface UseListMemoryTypes {
|
|
5
|
+
listFirstPage: (params: ListMemoryTypesParams) => Promise<ListMemoryTypesResult>;
|
|
6
|
+
listNextPage: (params: ListMemoryTypesParams) => Promise<ListMemoryTypesResult>;
|
|
7
|
+
listPreviousPage: (params: ListMemoryTypesParams) => Promise<ListMemoryTypesResult>;
|
|
8
|
+
refetchCurrentPage: () => Promise<ListMemoryTypesResult>;
|
|
9
|
+
currentPage: ListMemoryTypesPage | null;
|
|
10
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { CreateStreamingChatCompletionParams } from
|
|
2
|
-
import { CreateStreamingChatCompletionResponse } from
|
|
3
|
-
import { StreamingChatCompletion } from
|
|
1
|
+
import { CreateStreamingChatCompletionParams } from './CreateStreamingChatCompletionParams';
|
|
2
|
+
import { CreateStreamingChatCompletionResponse } from './CreateStreamingChatCompletionResponse';
|
|
3
|
+
import { StreamingChatCompletion } from './StreamingChatCompletion';
|
|
4
4
|
export interface UseStreamingChatCompletions {
|
|
5
5
|
chatCompletions: StreamingChatCompletion[];
|
|
6
6
|
createStreamingChatCompletion: (params: CreateStreamingChatCompletionParams) => Promise<CreateStreamingChatCompletionResponse>;
|
|
@@ -23,16 +23,8 @@ export * from './CreateStreamResponse';
|
|
|
23
23
|
export * from './StreamError';
|
|
24
24
|
export * from './UseStream';
|
|
25
25
|
export * from './UseStreamProps';
|
|
26
|
-
export * from './
|
|
27
|
-
export * from './
|
|
28
|
-
export * from './
|
|
29
|
-
export * from './
|
|
30
|
-
export * from './
|
|
31
|
-
export * from './UseWebsitesProps';
|
|
32
|
-
export * from './ListFilesPage';
|
|
33
|
-
export * from './ListFilesParams';
|
|
34
|
-
export * from './ListFilesResult';
|
|
35
|
-
export * from './UseListFiles';
|
|
36
|
-
export * from './UseListFilesProps';
|
|
37
|
-
export * from './SearchFilesParams';
|
|
38
|
-
export * from './UseSearchFiles';
|
|
26
|
+
export * from './ListMemoryTypesPage';
|
|
27
|
+
export * from './ListMemoryTypesParams';
|
|
28
|
+
export * from './ListMemoryTypesResult';
|
|
29
|
+
export * from './UseListMemoryTypes';
|
|
30
|
+
export * from './UseListMemoryTypesProps';
|
package/package.json
CHANGED
|
@@ -1,50 +1,58 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fencyai/react",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.84",
|
|
4
4
|
"description": "> TODO: description",
|
|
5
5
|
"author": "staklau <steinaageklaussen@gmail.com>",
|
|
6
6
|
"homepage": "",
|
|
7
7
|
"license": "MIT",
|
|
8
8
|
"type": "module",
|
|
9
|
-
"main": "
|
|
10
|
-
"types": "
|
|
9
|
+
"main": "dist/index.js",
|
|
10
|
+
"types": "dist/index.d.ts",
|
|
11
11
|
"exports": {
|
|
12
12
|
".": {
|
|
13
|
-
"import": "./
|
|
14
|
-
"types": "./
|
|
15
|
-
"default": "./lib/index.js"
|
|
13
|
+
"import": "./dist/index.js",
|
|
14
|
+
"types": "./dist/index.d.ts"
|
|
16
15
|
}
|
|
17
16
|
},
|
|
18
17
|
"directories": {
|
|
19
|
-
"lib": "
|
|
18
|
+
"lib": "dist",
|
|
20
19
|
"test": "__tests__"
|
|
21
20
|
},
|
|
22
21
|
"files": [
|
|
23
|
-
"
|
|
22
|
+
"dist"
|
|
24
23
|
],
|
|
25
24
|
"publishConfig": {
|
|
26
25
|
"access": "public"
|
|
27
26
|
},
|
|
28
27
|
"scripts": {
|
|
29
|
-
"build": "
|
|
30
|
-
"build:watch": "tsc --watch",
|
|
28
|
+
"build": "vite build",
|
|
31
29
|
"test": "jest",
|
|
32
30
|
"test:watch": "jest --watch",
|
|
33
|
-
"dev": "
|
|
31
|
+
"dev": "vite build --watch",
|
|
34
32
|
"prepublishOnly": "npm run build"
|
|
35
33
|
},
|
|
36
34
|
"devDependencies": {
|
|
37
|
-
"@fencyai/js": "^0.1.
|
|
35
|
+
"@fencyai/js": "^0.1.84",
|
|
38
36
|
"@types/jest": "^29.5.11",
|
|
39
37
|
"@types/node": "^20.10.5",
|
|
40
38
|
"@types/react": "^18.2.45",
|
|
39
|
+
"@types/react-syntax-highlighter": "^15.5.13",
|
|
40
|
+
"@vitejs/plugin-react": "^4.3.4",
|
|
41
41
|
"jest": "^29.7.0",
|
|
42
42
|
"ts-jest": "^29.1.1",
|
|
43
|
-
"typescript": "^5.3.3"
|
|
43
|
+
"typescript": "^5.3.3",
|
|
44
|
+
"vite": "^5.4.11",
|
|
45
|
+
"vite-plugin-dts": "^4.3.0",
|
|
46
|
+
"vite-plugin-lib-inject-css": "^2.1.1"
|
|
44
47
|
},
|
|
45
48
|
"peerDependencies": {
|
|
46
|
-
"@fencyai/js": "^0.1.
|
|
49
|
+
"@fencyai/js": "^0.1.84",
|
|
50
|
+
"@radix-ui/react-popover": "^1.1.15",
|
|
47
51
|
"react": ">=16.8.0",
|
|
52
|
+
"react-markdown": "^10.1.0",
|
|
53
|
+
"react-syntax-highlighter": "^16.1.0",
|
|
54
|
+
"remark-gfm": "^4.0.1",
|
|
55
|
+
"streamdown": "^1.6.10",
|
|
48
56
|
"zod": "^4.0.5"
|
|
49
57
|
},
|
|
50
58
|
"peerDependenciesMeta": {
|
|
@@ -52,5 +60,5 @@
|
|
|
52
60
|
"optional": false
|
|
53
61
|
}
|
|
54
62
|
},
|
|
55
|
-
"gitHead": "
|
|
63
|
+
"gitHead": "2097d91c002c61a2dcfcaafbe0407458dbb88739"
|
|
56
64
|
}
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
import { createChatCompletion } from '@fencyai/js';
|
|
2
|
-
import { useCallback, useMemo, useState } from 'react';
|
|
3
|
-
import { useFencyContext } from '../../provider/useFencyContext';
|
|
4
|
-
import { toSpecificChatCompletionParams } from '../../types/CreateGenericChatCompletionParams';
|
|
5
|
-
export const useBasicChatCompletions = () => {
|
|
6
|
-
const context = useFencyContext();
|
|
7
|
-
const [chatCompletions, setChatCompletions] = useState([]);
|
|
8
|
-
const createBasicChatCompletion = useCallback(async (params) => {
|
|
9
|
-
const triggeredAt = new Date().toISOString();
|
|
10
|
-
setChatCompletions((prev) => [
|
|
11
|
-
...prev,
|
|
12
|
-
{
|
|
13
|
-
triggeredAt,
|
|
14
|
-
data: null,
|
|
15
|
-
error: null,
|
|
16
|
-
loading: true,
|
|
17
|
-
},
|
|
18
|
-
]);
|
|
19
|
-
const specificParams = toSpecificChatCompletionParams(params.openai, params.gemini, params.claude, params.generic);
|
|
20
|
-
const chatCompletion = await createChatCompletion({
|
|
21
|
-
pk: context.fency.publishableKey,
|
|
22
|
-
baseUrl: context.fency.baseUrl,
|
|
23
|
-
request: {
|
|
24
|
-
...specificParams,
|
|
25
|
-
},
|
|
26
|
-
});
|
|
27
|
-
if (chatCompletion.type === 'success' &&
|
|
28
|
-
chatCompletion.completion.response) {
|
|
29
|
-
const updatedCompletion = {
|
|
30
|
-
triggeredAt,
|
|
31
|
-
data: {
|
|
32
|
-
id: chatCompletion.completion.id,
|
|
33
|
-
createdAt: chatCompletion.completion.createdAt,
|
|
34
|
-
response: chatCompletion.completion.response,
|
|
35
|
-
},
|
|
36
|
-
error: null,
|
|
37
|
-
loading: false,
|
|
38
|
-
};
|
|
39
|
-
setChatCompletions((prev) => [
|
|
40
|
-
...prev.filter((c) => c.triggeredAt !== triggeredAt),
|
|
41
|
-
updatedCompletion,
|
|
42
|
-
]);
|
|
43
|
-
return {
|
|
44
|
-
type: 'success',
|
|
45
|
-
data: updatedCompletion.data,
|
|
46
|
-
};
|
|
47
|
-
}
|
|
48
|
-
else if (chatCompletion.type === 'error') {
|
|
49
|
-
const errorCompletion = {
|
|
50
|
-
triggeredAt,
|
|
51
|
-
data: null,
|
|
52
|
-
error: chatCompletion.error,
|
|
53
|
-
loading: false,
|
|
54
|
-
};
|
|
55
|
-
setChatCompletions((prev) => [...prev, errorCompletion]);
|
|
56
|
-
return {
|
|
57
|
-
type: 'error',
|
|
58
|
-
error: chatCompletion.error,
|
|
59
|
-
};
|
|
60
|
-
}
|
|
61
|
-
else {
|
|
62
|
-
const error = {
|
|
63
|
-
message: 'No response received',
|
|
64
|
-
code: 'UnknownError',
|
|
65
|
-
};
|
|
66
|
-
setChatCompletions((prev) => [
|
|
67
|
-
...prev.filter((c) => c.triggeredAt !== triggeredAt),
|
|
68
|
-
{
|
|
69
|
-
triggeredAt,
|
|
70
|
-
data: null,
|
|
71
|
-
error: error,
|
|
72
|
-
loading: false,
|
|
73
|
-
},
|
|
74
|
-
]);
|
|
75
|
-
return {
|
|
76
|
-
type: 'error',
|
|
77
|
-
error: error,
|
|
78
|
-
};
|
|
79
|
-
}
|
|
80
|
-
}, [context]);
|
|
81
|
-
const latest = useMemo(() => {
|
|
82
|
-
return chatCompletions.sort((a, b) => {
|
|
83
|
-
return (new Date(b.triggeredAt).getTime() -
|
|
84
|
-
new Date(a.triggeredAt).getTime());
|
|
85
|
-
})[0];
|
|
86
|
-
}, [chatCompletions]);
|
|
87
|
-
return {
|
|
88
|
-
chatCompletions,
|
|
89
|
-
createBasicChatCompletion,
|
|
90
|
-
latest,
|
|
91
|
-
};
|
|
92
|
-
};
|
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
import { createFile as createFileApi, } from '@fencyai/js';
|
|
2
|
-
import { useState } from 'react';
|
|
3
|
-
import { useFencyContext } from '../../provider/useFencyContext';
|
|
4
|
-
import { useStream } from '../useStream';
|
|
5
|
-
export function useCreateFiles(props) {
|
|
6
|
-
const [files, setFiles] = useState([]);
|
|
7
|
-
const context = useFencyContext();
|
|
8
|
-
const { createStream } = useStream({
|
|
9
|
-
onFileUploadCompleted: (streamData) => {
|
|
10
|
-
props?.onUploadCompleted?.(streamData);
|
|
11
|
-
setFiles((prev) => prev.map((fileUpload) => {
|
|
12
|
-
return fileUpload.id === streamData.uploadId
|
|
13
|
-
? { ...fileUpload, status: 'upload_complete' }
|
|
14
|
-
: fileUpload;
|
|
15
|
-
}));
|
|
16
|
-
},
|
|
17
|
-
onFileTextContentReady: (streamData) => {
|
|
18
|
-
props?.onTextContentReady?.(streamData);
|
|
19
|
-
setFiles((prev) => prev.map((fileUpload) => fileUpload.id === streamData.fileId
|
|
20
|
-
? { ...fileUpload, textContent: streamData.textContent }
|
|
21
|
-
: fileUpload));
|
|
22
|
-
},
|
|
23
|
-
onFileSearchIndexReady: (streamData) => {
|
|
24
|
-
props?.onFileSearchIndexReady?.(streamData);
|
|
25
|
-
setFiles((prev) => prev.map((fileUpload) => fileUpload.id === streamData.fileId
|
|
26
|
-
? { ...fileUpload, searchIndexReady: true }
|
|
27
|
-
: fileUpload));
|
|
28
|
-
},
|
|
29
|
-
});
|
|
30
|
-
const createFile = async (params) => {
|
|
31
|
-
const clientSecret = params.fetchClientSecret
|
|
32
|
-
? await params.fetchClientSecret()
|
|
33
|
-
: undefined;
|
|
34
|
-
const streamResponse = await createStream({
|
|
35
|
-
type: 'FileStream',
|
|
36
|
-
});
|
|
37
|
-
if (streamResponse.type === 'success') {
|
|
38
|
-
const fileResponse = await createFileApi({
|
|
39
|
-
pk: context.fency.publishableKey,
|
|
40
|
-
request: {
|
|
41
|
-
streamId: streamResponse.stream.id,
|
|
42
|
-
fileName: params.fileName,
|
|
43
|
-
fileType: params.fileType,
|
|
44
|
-
fileSize: params.fileSize,
|
|
45
|
-
extractTextContent: params.extractTextContent || true,
|
|
46
|
-
clientSecret: clientSecret?.clientSecret,
|
|
47
|
-
},
|
|
48
|
-
baseUrl: context.fency.baseUrl,
|
|
49
|
-
});
|
|
50
|
-
if (fileResponse.type === 'success') {
|
|
51
|
-
setFiles([...files, fileResponse.file]);
|
|
52
|
-
return fileResponse;
|
|
53
|
-
}
|
|
54
|
-
else {
|
|
55
|
-
return {
|
|
56
|
-
type: 'error',
|
|
57
|
-
error: fileResponse.error,
|
|
58
|
-
};
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
else {
|
|
62
|
-
return {
|
|
63
|
-
type: 'error',
|
|
64
|
-
error: streamResponse.error,
|
|
65
|
-
};
|
|
66
|
-
}
|
|
67
|
-
};
|
|
68
|
-
return {
|
|
69
|
-
createFile,
|
|
70
|
-
files,
|
|
71
|
-
};
|
|
72
|
-
}
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
import { useEffect, useState } from 'react';
|
|
2
|
-
export function useFencyEventSource(props) {
|
|
3
|
-
const [sourceUrl, setSourceUrl] = useState(null);
|
|
4
|
-
useEffect(() => {
|
|
5
|
-
if (!sourceUrl) {
|
|
6
|
-
return;
|
|
7
|
-
}
|
|
8
|
-
const eventSource = new EventSource(sourceUrl);
|
|
9
|
-
eventSource.onmessage = (message) => {
|
|
10
|
-
props?.onMessage({
|
|
11
|
-
url: sourceUrl,
|
|
12
|
-
data: base64Decode(message.data),
|
|
13
|
-
});
|
|
14
|
-
};
|
|
15
|
-
eventSource.onerror = (error) => {
|
|
16
|
-
console.error('EventSource error:', error);
|
|
17
|
-
props?.onError(sourceUrl);
|
|
18
|
-
setSourceUrl(null);
|
|
19
|
-
};
|
|
20
|
-
return () => {
|
|
21
|
-
eventSource.close();
|
|
22
|
-
};
|
|
23
|
-
}, [sourceUrl]);
|
|
24
|
-
return {
|
|
25
|
-
setSourceUrl,
|
|
26
|
-
sourceUrl,
|
|
27
|
-
};
|
|
28
|
-
}
|
|
29
|
-
export function base64Decode(base64) {
|
|
30
|
-
// Decode Base64 -> binary string
|
|
31
|
-
const binary = atob(base64);
|
|
32
|
-
// Convert binary string -> Uint8Array
|
|
33
|
-
const bytes = new Uint8Array(binary.length);
|
|
34
|
-
for (let i = 0; i < binary.length; i++) {
|
|
35
|
-
bytes[i] = binary.charCodeAt(i);
|
|
36
|
-
}
|
|
37
|
-
// Decode UTF-8 bytes -> proper string
|
|
38
|
-
return new TextDecoder('utf-8').decode(bytes);
|
|
39
|
-
}
|
|
@@ -1,125 +0,0 @@
|
|
|
1
|
-
import { listFiles as listFilesApi } from '@fencyai/js';
|
|
2
|
-
import { useState } from 'react';
|
|
3
|
-
import { useFencyContext } from '../../provider/useFencyContext';
|
|
4
|
-
export function useListFiles(props) {
|
|
5
|
-
const [currentPageParams, setCurrentPageParams] = useState(null);
|
|
6
|
-
const [currentPage, setCurrentPage] = useState(null);
|
|
7
|
-
const context = useFencyContext();
|
|
8
|
-
const listFilesInternal = async (params) => {
|
|
9
|
-
const clientSecret = await params.fetchClientSecret();
|
|
10
|
-
const response = await listFilesApi({
|
|
11
|
-
pk: context.fency.publishableKey,
|
|
12
|
-
request: {
|
|
13
|
-
limit: props?.pageSize ?? 50,
|
|
14
|
-
pagination: {
|
|
15
|
-
nextPageToken: params.nextPageToken,
|
|
16
|
-
previousPageToken: params.previousPageToken,
|
|
17
|
-
},
|
|
18
|
-
clientSecret: clientSecret.clientSecret,
|
|
19
|
-
},
|
|
20
|
-
baseUrl: context.fency.baseUrl,
|
|
21
|
-
});
|
|
22
|
-
if (response.type === 'success') {
|
|
23
|
-
return {
|
|
24
|
-
type: 'success',
|
|
25
|
-
page: {
|
|
26
|
-
items: response.items,
|
|
27
|
-
hasNextPage: response.pagination.nextPageToken != null,
|
|
28
|
-
hasPreviousPage: response.pagination.previousPageToken != null,
|
|
29
|
-
pagination: response.pagination,
|
|
30
|
-
},
|
|
31
|
-
};
|
|
32
|
-
}
|
|
33
|
-
return response;
|
|
34
|
-
};
|
|
35
|
-
const listFirstPage = async (params) => {
|
|
36
|
-
const result = await listFilesInternal({
|
|
37
|
-
...params,
|
|
38
|
-
nextPageToken: undefined,
|
|
39
|
-
previousPageToken: undefined,
|
|
40
|
-
});
|
|
41
|
-
if (result.type === 'success') {
|
|
42
|
-
setCurrentPage(result.page);
|
|
43
|
-
setCurrentPageParams(params);
|
|
44
|
-
}
|
|
45
|
-
return result;
|
|
46
|
-
};
|
|
47
|
-
const listNextPage = async (params) => {
|
|
48
|
-
if (currentPage == null) {
|
|
49
|
-
return {
|
|
50
|
-
type: 'error',
|
|
51
|
-
error: {
|
|
52
|
-
code: 'NO_CURRENT_PAGE',
|
|
53
|
-
message: 'No current page',
|
|
54
|
-
},
|
|
55
|
-
};
|
|
56
|
-
}
|
|
57
|
-
if (currentPage.pagination.nextPageToken == null) {
|
|
58
|
-
return {
|
|
59
|
-
type: 'error',
|
|
60
|
-
error: {
|
|
61
|
-
code: 'NO_NEXT_PAGE',
|
|
62
|
-
message: 'No next page',
|
|
63
|
-
},
|
|
64
|
-
};
|
|
65
|
-
}
|
|
66
|
-
const result = await listFilesInternal({
|
|
67
|
-
...params,
|
|
68
|
-
nextPageToken: currentPage.pagination.nextPageToken,
|
|
69
|
-
previousPageToken: undefined,
|
|
70
|
-
});
|
|
71
|
-
if (result.type === 'success') {
|
|
72
|
-
setCurrentPage(result.page);
|
|
73
|
-
setCurrentPageParams(params);
|
|
74
|
-
}
|
|
75
|
-
return result;
|
|
76
|
-
};
|
|
77
|
-
const listPreviousPage = async (params) => {
|
|
78
|
-
if (currentPage == null) {
|
|
79
|
-
return {
|
|
80
|
-
type: 'error',
|
|
81
|
-
error: {
|
|
82
|
-
code: 'NO_CURRENT_PAGE',
|
|
83
|
-
message: 'No current page',
|
|
84
|
-
},
|
|
85
|
-
};
|
|
86
|
-
}
|
|
87
|
-
if (currentPage.pagination.previousPageToken == null) {
|
|
88
|
-
return {
|
|
89
|
-
type: 'error',
|
|
90
|
-
error: {
|
|
91
|
-
code: 'NO_PREVIOUS_PAGE',
|
|
92
|
-
message: 'No previous page',
|
|
93
|
-
},
|
|
94
|
-
};
|
|
95
|
-
}
|
|
96
|
-
const result = await listFilesInternal({
|
|
97
|
-
...params,
|
|
98
|
-
nextPageToken: undefined,
|
|
99
|
-
previousPageToken: currentPage.pagination.previousPageToken,
|
|
100
|
-
});
|
|
101
|
-
if (result.type === 'success') {
|
|
102
|
-
setCurrentPage(result.page);
|
|
103
|
-
setCurrentPageParams(params);
|
|
104
|
-
}
|
|
105
|
-
return result;
|
|
106
|
-
};
|
|
107
|
-
return {
|
|
108
|
-
listFirstPage,
|
|
109
|
-
listNextPage,
|
|
110
|
-
listPreviousPage,
|
|
111
|
-
refetchCurrentPage: async () => {
|
|
112
|
-
if (currentPageParams == null) {
|
|
113
|
-
return {
|
|
114
|
-
type: 'error',
|
|
115
|
-
error: {
|
|
116
|
-
code: 'NO_CURRENT_PAGE',
|
|
117
|
-
message: 'No current page, please call listFirstPage first.',
|
|
118
|
-
},
|
|
119
|
-
};
|
|
120
|
-
}
|
|
121
|
-
return listFilesInternal(currentPageParams);
|
|
122
|
-
},
|
|
123
|
-
currentPage,
|
|
124
|
-
};
|
|
125
|
-
}
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
import { searchFiles as searchFilesApi, } from '@fencyai/js';
|
|
2
|
-
import { useState } from 'react';
|
|
3
|
-
import { useFencyContext } from '../../provider/useFencyContext';
|
|
4
|
-
export function useSearchFiles() {
|
|
5
|
-
const [searchResults, setSearchResults] = useState(undefined);
|
|
6
|
-
const context = useFencyContext();
|
|
7
|
-
const searchFiles = async (params) => {
|
|
8
|
-
const clientSecret = await params.fetchClientSecret();
|
|
9
|
-
const response = await searchFilesApi({
|
|
10
|
-
pk: context.fency.publishableKey,
|
|
11
|
-
request: {
|
|
12
|
-
text: params.text,
|
|
13
|
-
clientSecret: clientSecret.clientSecret,
|
|
14
|
-
},
|
|
15
|
-
baseUrl: context.fency.baseUrl,
|
|
16
|
-
});
|
|
17
|
-
if (response.type === 'success') {
|
|
18
|
-
setSearchResults(response.results);
|
|
19
|
-
}
|
|
20
|
-
return response;
|
|
21
|
-
};
|
|
22
|
-
return {
|
|
23
|
-
searchFiles,
|
|
24
|
-
searchResults,
|
|
25
|
-
};
|
|
26
|
-
}
|
|
@@ -1,73 +0,0 @@
|
|
|
1
|
-
import { createStream as createStreamApi, } from '@fencyai/js';
|
|
2
|
-
import { useState } from 'react';
|
|
3
|
-
import { useFencyContext } from '../../provider/useFencyContext';
|
|
4
|
-
import { useFencyEventSource } from '../useFencyEventSource';
|
|
5
|
-
import { toStreamData } from './toStreamData';
|
|
6
|
-
export const useStream = (props) => {
|
|
7
|
-
const context = useFencyContext();
|
|
8
|
-
const [stream, setStream] = useState(null);
|
|
9
|
-
const es = useFencyEventSource({
|
|
10
|
-
onError: (url) => {
|
|
11
|
-
console.error('Stream error:', url);
|
|
12
|
-
props?.onStreamError?.({
|
|
13
|
-
streamId: url,
|
|
14
|
-
error: {
|
|
15
|
-
code: 'UnknownError',
|
|
16
|
-
message: 'Unknown error in useStream',
|
|
17
|
-
},
|
|
18
|
-
});
|
|
19
|
-
},
|
|
20
|
-
onMessage: (message) => {
|
|
21
|
-
const streamData = toStreamData(message.data);
|
|
22
|
-
if (!streamData) {
|
|
23
|
-
return false;
|
|
24
|
-
}
|
|
25
|
-
switch (streamData.type) {
|
|
26
|
-
case 'NewChatCompletionStreamChunk':
|
|
27
|
-
props?.onNewChatCompletionStreamChunk?.(streamData);
|
|
28
|
-
return true;
|
|
29
|
-
case 'ChatCompletionStreamCompleted':
|
|
30
|
-
props?.onChatCompletionStreamCompleted?.(streamData);
|
|
31
|
-
return true;
|
|
32
|
-
case 'StreamTimeout':
|
|
33
|
-
props?.onStreamTimeout?.(streamData);
|
|
34
|
-
return true;
|
|
35
|
-
case 'StreamNotFound':
|
|
36
|
-
props?.onStreamNotFound?.(streamData);
|
|
37
|
-
return true;
|
|
38
|
-
case 'FileUploadCompleted':
|
|
39
|
-
props?.onFileUploadCompleted?.(streamData);
|
|
40
|
-
return true;
|
|
41
|
-
case 'FileTextContentReady':
|
|
42
|
-
props?.onFileTextContentReady?.(streamData);
|
|
43
|
-
return true;
|
|
44
|
-
case 'WebsiteHtmlContentReady':
|
|
45
|
-
props?.onWebsiteHtmlContentReady?.(streamData);
|
|
46
|
-
return true;
|
|
47
|
-
case 'WebsiteTextContentReady':
|
|
48
|
-
props?.onWebsiteTextContentReady?.(streamData);
|
|
49
|
-
return true;
|
|
50
|
-
case 'FileSearchIndexReady':
|
|
51
|
-
props?.onFileSearchIndexReady?.(streamData);
|
|
52
|
-
return true;
|
|
53
|
-
}
|
|
54
|
-
},
|
|
55
|
-
});
|
|
56
|
-
const createStream = async (params) => {
|
|
57
|
-
const response = await createStreamApi({
|
|
58
|
-
pk: context.fency.publishableKey,
|
|
59
|
-
baseUrl: context.fency.baseUrl,
|
|
60
|
-
request: params,
|
|
61
|
-
});
|
|
62
|
-
if (response.type === 'success') {
|
|
63
|
-
setStream(response.stream);
|
|
64
|
-
const url = `${context.fency.baseUrl}/v1/pub/streams/${response.stream.id}?pk=${context.fency.publishableKey}`;
|
|
65
|
-
es.setSourceUrl(url);
|
|
66
|
-
}
|
|
67
|
-
return response;
|
|
68
|
-
};
|
|
69
|
-
return {
|
|
70
|
-
createStream: createStream,
|
|
71
|
-
stream,
|
|
72
|
-
};
|
|
73
|
-
};
|