@geenius/ai 0.1.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +15 -2
- package/packages/convex/dist/index.d.ts +1 -0
- package/packages/convex/dist/index.js +42 -0
- package/packages/convex/dist/index.js.map +1 -0
- package/packages/react/README.md +1 -1
- package/packages/react-css/README.md +1 -1
- package/packages/react-css/dist/index.cjs +1544 -0
- package/packages/react-css/dist/index.cjs.map +1 -0
- package/packages/react-css/dist/index.d.cts +454 -0
- package/packages/react-css/dist/index.d.ts +454 -0
- package/packages/react-css/dist/index.js +1495 -0
- package/packages/react-css/dist/index.js.map +1 -0
- package/packages/shared/README.md +1 -1
- package/packages/solidjs/README.md +1 -1
- package/packages/solidjs-css/README.md +1 -1
- package/packages/solidjs-css/dist/index.cjs +674 -0
- package/packages/solidjs-css/dist/index.cjs.map +1 -0
- package/packages/solidjs-css/dist/index.d.cts +254 -0
- package/packages/solidjs-css/dist/index.d.ts +254 -0
- package/packages/solidjs-css/dist/index.js +634 -0
- package/packages/solidjs-css/dist/index.js.map +1 -0
- package/.changeset/config.json +0 -11
- package/.env.example +0 -2
- package/.github/CODEOWNERS +0 -1
- package/.github/ISSUE_TEMPLATE/bug_report.md +0 -16
- package/.github/ISSUE_TEMPLATE/feature_request.md +0 -11
- package/.github/PULL_REQUEST_TEMPLATE.md +0 -10
- package/.github/dependabot.yml +0 -11
- package/.github/workflows/ci.yml +0 -23
- package/.github/workflows/release.yml +0 -29
- package/.node-version +0 -1
- package/.nvmrc +0 -1
- package/.prettierrc +0 -7
- package/.project/ACCOUNT.yaml +0 -4
- package/.project/IDEAS.yaml +0 -7
- package/.project/PROJECT.yaml +0 -11
- package/.project/ROADMAP.yaml +0 -15
- package/CODE_OF_CONDUCT.md +0 -26
- package/CONTRIBUTING.md +0 -61
- package/SECURITY.md +0 -18
- package/SUPPORT.md +0 -14
- package/packages/convex/package.json +0 -42
- package/packages/convex/src/index.ts +0 -8
- package/packages/convex/src/mutations/messages.ts +0 -29
- package/packages/convex/src/queries/messages.ts +0 -24
- package/packages/convex/src/schema.ts +0 -20
- package/packages/convex/tsconfig.json +0 -11
- package/packages/convex/tsup.config.ts +0 -17
- package/packages/react/package.json +0 -60
- package/packages/react/src/components/AILogTable.tsx +0 -90
- package/packages/react/src/components/ChatWindow.tsx +0 -118
- package/packages/react/src/components/GenerationCard.tsx +0 -73
- package/packages/react/src/components/ImageGenerator.tsx +0 -103
- package/packages/react/src/components/ModelSelector.tsx +0 -44
- package/packages/react/src/components/ModelTestRunner.tsx +0 -148
- package/packages/react/src/components/VoiceSelector.tsx +0 -51
- package/packages/react/src/components/index.ts +0 -9
- package/packages/react/src/hooks/index.ts +0 -12
- package/packages/react/src/hooks/useAI.ts +0 -158
- package/packages/react/src/hooks/useAILogs.ts +0 -40
- package/packages/react/src/hooks/useAIModels.ts +0 -53
- package/packages/react/src/hooks/useChat.ts +0 -141
- package/packages/react/src/hooks/useContentManager.ts +0 -108
- package/packages/react/src/hooks/useImageGeneration.ts +0 -82
- package/packages/react/src/hooks/useMemory.ts +0 -161
- package/packages/react/src/hooks/useModelTest.ts +0 -126
- package/packages/react/src/hooks/useRealtimeAudio.ts +0 -203
- package/packages/react/src/hooks/useSkills.ts +0 -114
- package/packages/react/src/hooks/useTextToSpeech.ts +0 -99
- package/packages/react/src/hooks/useTranscription.ts +0 -119
- package/packages/react/src/hooks/useVideoGeneration.ts +0 -79
- package/packages/react/src/index.ts +0 -42
- package/packages/react/src/pages/AILogsPage.tsx +0 -98
- package/packages/react/src/pages/ChatPage.tsx +0 -42
- package/packages/react/src/pages/ModelTestPage.tsx +0 -33
- package/packages/react/src/pages/index.ts +0 -5
- package/packages/react/tsconfig.json +0 -26
- package/packages/react/tsup.config.ts +0 -22
- package/packages/react-css/package.json +0 -45
- package/packages/react-css/src/ai.css +0 -857
- package/packages/react-css/src/components/AILogTable.tsx +0 -90
- package/packages/react-css/src/components/ChatWindow.tsx +0 -118
- package/packages/react-css/src/components/GenerationCard.tsx +0 -73
- package/packages/react-css/src/components/ImageGenerator.tsx +0 -103
- package/packages/react-css/src/components/ModelSelector.tsx +0 -44
- package/packages/react-css/src/components/ModelTestRunner.tsx +0 -148
- package/packages/react-css/src/components/VoiceSelector.tsx +0 -51
- package/packages/react-css/src/components/index.ts +0 -9
- package/packages/react-css/src/hooks/index.ts +0 -12
- package/packages/react-css/src/hooks/useAI.ts +0 -153
- package/packages/react-css/src/hooks/useAILogs.ts +0 -40
- package/packages/react-css/src/hooks/useAIModels.ts +0 -51
- package/packages/react-css/src/hooks/useChat.ts +0 -145
- package/packages/react-css/src/hooks/useContentManager.ts +0 -108
- package/packages/react-css/src/hooks/useImageGeneration.ts +0 -82
- package/packages/react-css/src/hooks/useMemory.ts +0 -161
- package/packages/react-css/src/hooks/useModelTest.ts +0 -122
- package/packages/react-css/src/hooks/useRealtimeAudio.ts +0 -203
- package/packages/react-css/src/hooks/useSkills.ts +0 -114
- package/packages/react-css/src/hooks/useTextToSpeech.ts +0 -99
- package/packages/react-css/src/hooks/useTranscription.ts +0 -119
- package/packages/react-css/src/hooks/useVideoGeneration.ts +0 -79
- package/packages/react-css/src/index.ts +0 -35
- package/packages/react-css/src/pages/AILogsPage.tsx +0 -98
- package/packages/react-css/src/pages/ChatPage.tsx +0 -42
- package/packages/react-css/src/pages/ModelTestPage.tsx +0 -33
- package/packages/react-css/src/pages/index.ts +0 -5
- package/packages/react-css/src/styles.css +0 -127
- package/packages/react-css/tsconfig.json +0 -26
- package/packages/react-css/tsup.config.ts +0 -2
- package/packages/shared/package.json +0 -71
- package/packages/shared/src/__tests__/ai.test.ts +0 -67
- package/packages/shared/src/ai-client.ts +0 -243
- package/packages/shared/src/config.ts +0 -235
- package/packages/shared/src/content.ts +0 -249
- package/packages/shared/src/convex/helpers.ts +0 -163
- package/packages/shared/src/convex/index.ts +0 -16
- package/packages/shared/src/convex/schemas.ts +0 -146
- package/packages/shared/src/convex/validators.ts +0 -136
- package/packages/shared/src/index.ts +0 -107
- package/packages/shared/src/memory.ts +0 -197
- package/packages/shared/src/providers/base.ts +0 -103
- package/packages/shared/src/providers/elevenlabs.ts +0 -155
- package/packages/shared/src/providers/index.ts +0 -28
- package/packages/shared/src/providers/openai-compatible.ts +0 -286
- package/packages/shared/src/providers/registry.ts +0 -113
- package/packages/shared/src/providers/replicate-fal.ts +0 -230
- package/packages/shared/src/skills.ts +0 -273
- package/packages/shared/src/types.ts +0 -501
- package/packages/shared/tsconfig.json +0 -25
- package/packages/shared/tsup.config.ts +0 -22
- package/packages/shared/vitest.config.ts +0 -4
- package/packages/solidjs/package.json +0 -59
- package/packages/solidjs/src/components/ChatWindow.tsx +0 -78
- package/packages/solidjs/src/components/GenerationCard.tsx +0 -62
- package/packages/solidjs/src/components/ModelTestRunner.tsx +0 -119
- package/packages/solidjs/src/components/index.ts +0 -5
- package/packages/solidjs/src/index.ts +0 -32
- package/packages/solidjs/src/pages/ChatPage.tsx +0 -22
- package/packages/solidjs/src/pages/ModelTestPage.tsx +0 -22
- package/packages/solidjs/src/pages/index.ts +0 -4
- package/packages/solidjs/src/primitives/createAI.ts +0 -79
- package/packages/solidjs/src/primitives/createChat.ts +0 -100
- package/packages/solidjs/src/primitives/createContentManager.ts +0 -61
- package/packages/solidjs/src/primitives/createImageGeneration.ts +0 -46
- package/packages/solidjs/src/primitives/createMemory.ts +0 -127
- package/packages/solidjs/src/primitives/createModelTest.ts +0 -89
- package/packages/solidjs/src/primitives/createSkills.ts +0 -83
- package/packages/solidjs/src/primitives/createTextToSpeech.ts +0 -56
- package/packages/solidjs/src/primitives/createVideoGeneration.ts +0 -46
- package/packages/solidjs/src/primitives/index.ts +0 -8
- package/packages/solidjs/tsconfig.json +0 -27
- package/packages/solidjs/tsup.config.ts +0 -21
- package/packages/solidjs-css/package.json +0 -44
- package/packages/solidjs-css/src/ai.css +0 -857
- package/packages/solidjs-css/src/components/ChatWindow.tsx +0 -78
- package/packages/solidjs-css/src/components/GenerationCard.tsx +0 -62
- package/packages/solidjs-css/src/components/ModelTestRunner.tsx +0 -119
- package/packages/solidjs-css/src/components/index.ts +0 -5
- package/packages/solidjs-css/src/index.ts +0 -26
- package/packages/solidjs-css/src/pages/ChatPage.tsx +0 -22
- package/packages/solidjs-css/src/pages/ModelTestPage.tsx +0 -22
- package/packages/solidjs-css/src/pages/index.ts +0 -4
- package/packages/solidjs-css/src/primitives/createAI.ts +0 -79
- package/packages/solidjs-css/src/primitives/createChat.ts +0 -100
- package/packages/solidjs-css/src/primitives/createContentManager.ts +0 -61
- package/packages/solidjs-css/src/primitives/createImageGeneration.ts +0 -46
- package/packages/solidjs-css/src/primitives/createMemory.ts +0 -127
- package/packages/solidjs-css/src/primitives/createModelTest.ts +0 -89
- package/packages/solidjs-css/src/primitives/createSkills.ts +0 -83
- package/packages/solidjs-css/src/primitives/createTextToSpeech.ts +0 -56
- package/packages/solidjs-css/src/primitives/createVideoGeneration.ts +0 -46
- package/packages/solidjs-css/src/primitives/index.ts +0 -1
- package/packages/solidjs-css/src/styles.css +0 -127
- package/packages/solidjs-css/tsconfig.json +0 -27
- package/packages/solidjs-css/tsup.config.ts +0 -2
- package/pnpm-workspace.yaml +0 -2
|
@@ -0,0 +1,1544 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
AILogTable: () => AILogTable,
|
|
24
|
+
AILogsPage: () => AILogsPage,
|
|
25
|
+
ChatPage: () => ChatPage,
|
|
26
|
+
ChatWindow: () => ChatWindow,
|
|
27
|
+
GenerationCard: () => GenerationCard,
|
|
28
|
+
ImageGenerator: () => ImageGenerator,
|
|
29
|
+
ModelSelector: () => ModelSelector,
|
|
30
|
+
ModelTestPage: () => ModelTestPage,
|
|
31
|
+
ModelTestRunner: () => ModelTestRunner,
|
|
32
|
+
VoiceSelector: () => VoiceSelector,
|
|
33
|
+
useAI: () => useAI,
|
|
34
|
+
useAILogs: () => useAILogs,
|
|
35
|
+
useAIModels: () => useAIModels,
|
|
36
|
+
useChat: () => useChat,
|
|
37
|
+
useContentManager: () => useContentManager,
|
|
38
|
+
useImageGeneration: () => useImageGeneration,
|
|
39
|
+
useMemory: () => useMemory,
|
|
40
|
+
useModelTest: () => useModelTest,
|
|
41
|
+
useRealtimeAudio: () => useRealtimeAudio,
|
|
42
|
+
useSkills: () => useSkills,
|
|
43
|
+
useTextToSpeech: () => useTextToSpeech,
|
|
44
|
+
useTranscription: () => useTranscription,
|
|
45
|
+
useVideoGeneration: () => useVideoGeneration
|
|
46
|
+
});
|
|
47
|
+
module.exports = __toCommonJS(index_exports);
|
|
48
|
+
|
|
49
|
+
// src/hooks/useAI.ts
|
|
50
|
+
var import_react = require("react");
|
|
51
|
+
var import_react2 = require("convex/react");
|
|
52
|
+
function useAI(options = {}) {
|
|
53
|
+
const [isLoading, setIsLoading] = (0, import_react.useState)(false);
|
|
54
|
+
const [error, setError] = (0, import_react.useState)(null);
|
|
55
|
+
const [lastResult, setLastResult] = (0, import_react.useState)(null);
|
|
56
|
+
const [lastType, setLastType] = (0, import_react.useState)(null);
|
|
57
|
+
const textAction = options.generateTextAction ? (0, import_react2.useAction)(options.generateTextAction) : null;
|
|
58
|
+
const imageAction = options.generateImageAction ? (0, import_react2.useAction)(options.generateImageAction) : null;
|
|
59
|
+
const audioAction = options.generateAudioAction ? (0, import_react2.useAction)(options.generateAudioAction) : null;
|
|
60
|
+
const transcribeAction = options.transcribeAudioAction ? (0, import_react2.useAction)(options.transcribeAudioAction) : null;
|
|
61
|
+
const videoAction = options.generateVideoAction ? (0, import_react2.useAction)(options.generateVideoAction) : null;
|
|
62
|
+
const generateText = (0, import_react.useCallback)(async (args) => {
|
|
63
|
+
if (!textAction) throw new Error("generateTextAction not provided");
|
|
64
|
+
setIsLoading(true);
|
|
65
|
+
setError(null);
|
|
66
|
+
try {
|
|
67
|
+
const result = await textAction(args);
|
|
68
|
+
setLastResult(result);
|
|
69
|
+
setLastType("text");
|
|
70
|
+
return result;
|
|
71
|
+
} catch (err) {
|
|
72
|
+
const msg = err instanceof Error ? err.message : "Text generation failed";
|
|
73
|
+
setError(msg);
|
|
74
|
+
throw err;
|
|
75
|
+
} finally {
|
|
76
|
+
setIsLoading(false);
|
|
77
|
+
}
|
|
78
|
+
}, [textAction]);
|
|
79
|
+
const generateImage = (0, import_react.useCallback)(async (prompt, model) => {
|
|
80
|
+
if (!imageAction) throw new Error("generateImageAction not provided");
|
|
81
|
+
setIsLoading(true);
|
|
82
|
+
setError(null);
|
|
83
|
+
try {
|
|
84
|
+
const result = await imageAction({ prompt, model });
|
|
85
|
+
setLastResult(result);
|
|
86
|
+
setLastType("image");
|
|
87
|
+
return result;
|
|
88
|
+
} catch (err) {
|
|
89
|
+
const msg = err instanceof Error ? err.message : "Image generation failed";
|
|
90
|
+
setError(msg);
|
|
91
|
+
throw err;
|
|
92
|
+
} finally {
|
|
93
|
+
setIsLoading(false);
|
|
94
|
+
}
|
|
95
|
+
}, [imageAction]);
|
|
96
|
+
const generateAudio = (0, import_react.useCallback)(async (prompt, voice) => {
|
|
97
|
+
if (!audioAction) throw new Error("generateAudioAction not provided");
|
|
98
|
+
setIsLoading(true);
|
|
99
|
+
setError(null);
|
|
100
|
+
try {
|
|
101
|
+
const result = await audioAction({ prompt, voice });
|
|
102
|
+
setLastResult(result);
|
|
103
|
+
setLastType("audio");
|
|
104
|
+
return result;
|
|
105
|
+
} catch (err) {
|
|
106
|
+
const msg = err instanceof Error ? err.message : "Audio generation failed";
|
|
107
|
+
setError(msg);
|
|
108
|
+
throw err;
|
|
109
|
+
} finally {
|
|
110
|
+
setIsLoading(false);
|
|
111
|
+
}
|
|
112
|
+
}, [audioAction]);
|
|
113
|
+
const transcribeAudio = (0, import_react.useCallback)(async (audio) => {
|
|
114
|
+
if (!transcribeAction) throw new Error("transcribeAudioAction not provided");
|
|
115
|
+
setIsLoading(true);
|
|
116
|
+
setError(null);
|
|
117
|
+
try {
|
|
118
|
+
const result = await transcribeAction({ audio });
|
|
119
|
+
setLastResult(result);
|
|
120
|
+
setLastType("transcription");
|
|
121
|
+
return result;
|
|
122
|
+
} catch (err) {
|
|
123
|
+
const msg = err instanceof Error ? err.message : "Transcription failed";
|
|
124
|
+
setError(msg);
|
|
125
|
+
throw err;
|
|
126
|
+
} finally {
|
|
127
|
+
setIsLoading(false);
|
|
128
|
+
}
|
|
129
|
+
}, [transcribeAction]);
|
|
130
|
+
const generateVideo = (0, import_react.useCallback)(async (prompt) => {
|
|
131
|
+
if (!videoAction) throw new Error("generateVideoAction not provided");
|
|
132
|
+
setIsLoading(true);
|
|
133
|
+
setError(null);
|
|
134
|
+
try {
|
|
135
|
+
const result = await videoAction({ prompt });
|
|
136
|
+
setLastResult(result);
|
|
137
|
+
setLastType("video");
|
|
138
|
+
return result;
|
|
139
|
+
} catch (err) {
|
|
140
|
+
const msg = err instanceof Error ? err.message : "Video generation failed";
|
|
141
|
+
setError(msg);
|
|
142
|
+
throw err;
|
|
143
|
+
} finally {
|
|
144
|
+
setIsLoading(false);
|
|
145
|
+
}
|
|
146
|
+
}, [videoAction]);
|
|
147
|
+
const clearError = (0, import_react.useCallback)(() => setError(null), []);
|
|
148
|
+
return {
|
|
149
|
+
generateText,
|
|
150
|
+
generateImage,
|
|
151
|
+
generateAudio,
|
|
152
|
+
transcribeAudio,
|
|
153
|
+
generateVideo,
|
|
154
|
+
isLoading,
|
|
155
|
+
error,
|
|
156
|
+
lastResult,
|
|
157
|
+
lastType,
|
|
158
|
+
clearError
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// src/hooks/useChat.ts
|
|
163
|
+
var import_react3 = require("react");
|
|
164
|
+
var import_react4 = require("convex/react");
|
|
165
|
+
function useChat(options = {}) {
|
|
166
|
+
const [localMessages, setLocalMessages] = (0, import_react3.useState)([]);
|
|
167
|
+
const [isSending, setIsSending] = (0, import_react3.useState)(false);
|
|
168
|
+
const [error, setError] = (0, import_react3.useState)(null);
|
|
169
|
+
const [conversationId, setConversationId] = (0, import_react3.useState)(options.conversationId ?? null);
|
|
170
|
+
const convexMessages = options.listMessagesQuery && conversationId ? (0, import_react4.useQuery)(options.listMessagesQuery, { conversationId }) : void 0;
|
|
171
|
+
const sendMutation = options.sendMessageMutation ? (0, import_react4.useMutation)(options.sendMessageMutation) : null;
|
|
172
|
+
const createConversation = options.createConversationMutation ? (0, import_react4.useMutation)(options.createConversationMutation) : null;
|
|
173
|
+
const textAction = options.generateTextAction ? (0, import_react4.useAction)(options.generateTextAction) : null;
|
|
174
|
+
const messages = convexMessages ?? localMessages;
|
|
175
|
+
const sendMessage = (0, import_react3.useCallback)(async (content) => {
|
|
176
|
+
setIsSending(true);
|
|
177
|
+
setError(null);
|
|
178
|
+
try {
|
|
179
|
+
let activeConversationId = conversationId;
|
|
180
|
+
if (!activeConversationId && createConversation) {
|
|
181
|
+
activeConversationId = await createConversation({
|
|
182
|
+
title: content.substring(0, 100),
|
|
183
|
+
model: options.model ?? "gpt-4o",
|
|
184
|
+
systemPrompt: options.systemPrompt
|
|
185
|
+
});
|
|
186
|
+
setConversationId(activeConversationId);
|
|
187
|
+
if (activeConversationId) options.onNewConversation?.(activeConversationId);
|
|
188
|
+
}
|
|
189
|
+
if (sendMutation && activeConversationId) {
|
|
190
|
+
await sendMutation({ conversationId: activeConversationId, content });
|
|
191
|
+
} else {
|
|
192
|
+
const userMsg = {
|
|
193
|
+
id: `msg-${Date.now()}`,
|
|
194
|
+
conversationId: activeConversationId ?? "local",
|
|
195
|
+
userId: "local",
|
|
196
|
+
role: "user",
|
|
197
|
+
content,
|
|
198
|
+
createdAt: Date.now()
|
|
199
|
+
};
|
|
200
|
+
setLocalMessages((prev) => [...prev, userMsg]);
|
|
201
|
+
}
|
|
202
|
+
if (textAction) {
|
|
203
|
+
const aiResponse = await textAction({
|
|
204
|
+
model: options.model ?? "gpt-4o",
|
|
205
|
+
messages: [
|
|
206
|
+
...options.systemPrompt ? [{ role: "system", content: options.systemPrompt }] : [],
|
|
207
|
+
...messages.map((m) => ({ role: m.role, content: m.content })),
|
|
208
|
+
{ role: "user", content }
|
|
209
|
+
],
|
|
210
|
+
caller: "chat"
|
|
211
|
+
});
|
|
212
|
+
if (!sendMutation) {
|
|
213
|
+
const assistantMsg = {
|
|
214
|
+
id: `msg-${Date.now()}-ai`,
|
|
215
|
+
conversationId: activeConversationId ?? "local",
|
|
216
|
+
userId: "ai",
|
|
217
|
+
role: "assistant",
|
|
218
|
+
content: aiResponse,
|
|
219
|
+
model: options.model ?? "gpt-4o",
|
|
220
|
+
createdAt: Date.now()
|
|
221
|
+
};
|
|
222
|
+
setLocalMessages((prev) => [...prev, assistantMsg]);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
} catch (err) {
|
|
226
|
+
const msg = err instanceof Error ? err.message : "Failed to send message";
|
|
227
|
+
setError(msg);
|
|
228
|
+
} finally {
|
|
229
|
+
setIsSending(false);
|
|
230
|
+
}
|
|
231
|
+
}, [conversationId, sendMutation, createConversation, textAction, messages, options]);
|
|
232
|
+
const clearError = (0, import_react3.useCallback)(() => setError(null), []);
|
|
233
|
+
return {
|
|
234
|
+
messages,
|
|
235
|
+
isLoading: convexMessages === void 0 && !!options.listMessagesQuery,
|
|
236
|
+
isSending,
|
|
237
|
+
error,
|
|
238
|
+
sendMessage,
|
|
239
|
+
conversationId,
|
|
240
|
+
clearError
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// src/hooks/useAILogs.ts
|
|
245
|
+
var import_react5 = require("convex/react");
|
|
246
|
+
function useAILogs(options) {
|
|
247
|
+
const logs = (0, import_react5.useQuery)(options.listLogsQuery, {
|
|
248
|
+
model: options.filters?.model,
|
|
249
|
+
provider: options.filters?.provider,
|
|
250
|
+
status: options.filters?.status,
|
|
251
|
+
caller: options.filters?.caller,
|
|
252
|
+
limit: options.limit ?? 50
|
|
253
|
+
});
|
|
254
|
+
return {
|
|
255
|
+
logs: logs ?? [],
|
|
256
|
+
isLoading: logs === void 0
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// src/hooks/useModelTest.ts
|
|
261
|
+
var import_react6 = require("react");
|
|
262
|
+
var import_react7 = require("convex/react");
|
|
263
|
+
function useModelTest(options = {}) {
|
|
264
|
+
const [results, setResults] = (0, import_react6.useState)([]);
|
|
265
|
+
const [isRunning, setIsRunning] = (0, import_react6.useState)(false);
|
|
266
|
+
const textAction = options.generateTextAction ? (0, import_react7.useAction)(options.generateTextAction) : null;
|
|
267
|
+
const imageAction = options.generateImageAction ? (0, import_react7.useAction)(options.generateImageAction) : null;
|
|
268
|
+
const audioAction = options.generateAudioAction ? (0, import_react7.useAction)(options.generateAudioAction) : null;
|
|
269
|
+
const transcribeAction = options.transcribeAudioAction ? (0, import_react7.useAction)(options.transcribeAudioAction) : null;
|
|
270
|
+
const videoAction = options.generateVideoAction ? (0, import_react7.useAction)(options.generateVideoAction) : null;
|
|
271
|
+
const runTest = (0, import_react6.useCallback)(async (model, prompt, type = "text") => {
|
|
272
|
+
setIsRunning(true);
|
|
273
|
+
const start = Date.now();
|
|
274
|
+
try {
|
|
275
|
+
let result = "";
|
|
276
|
+
switch (type) {
|
|
277
|
+
case "text":
|
|
278
|
+
if (!textAction) throw new Error("generateTextAction not provided");
|
|
279
|
+
result = await textAction({
|
|
280
|
+
model,
|
|
281
|
+
messages: [{ role: "user", content: prompt }],
|
|
282
|
+
caller: "model-test"
|
|
283
|
+
});
|
|
284
|
+
break;
|
|
285
|
+
case "image":
|
|
286
|
+
if (!imageAction) throw new Error("generateImageAction not provided");
|
|
287
|
+
result = await imageAction({ prompt, model });
|
|
288
|
+
break;
|
|
289
|
+
case "audio":
|
|
290
|
+
if (!audioAction) throw new Error("generateAudioAction not provided");
|
|
291
|
+
result = await audioAction({ prompt });
|
|
292
|
+
break;
|
|
293
|
+
case "video":
|
|
294
|
+
if (!videoAction) throw new Error("generateVideoAction not provided");
|
|
295
|
+
result = await videoAction({ prompt });
|
|
296
|
+
break;
|
|
297
|
+
default:
|
|
298
|
+
throw new Error(`Unsupported test type: ${type}`);
|
|
299
|
+
}
|
|
300
|
+
const testResult = {
|
|
301
|
+
model,
|
|
302
|
+
type,
|
|
303
|
+
result,
|
|
304
|
+
durationMs: Date.now() - start,
|
|
305
|
+
timestamp: Date.now()
|
|
306
|
+
};
|
|
307
|
+
setResults((prev) => [...prev, testResult]);
|
|
308
|
+
return testResult;
|
|
309
|
+
} catch (err) {
|
|
310
|
+
const testResult = {
|
|
311
|
+
model,
|
|
312
|
+
type,
|
|
313
|
+
result: "",
|
|
314
|
+
durationMs: Date.now() - start,
|
|
315
|
+
timestamp: Date.now(),
|
|
316
|
+
error: err instanceof Error ? err.message : "Test failed"
|
|
317
|
+
};
|
|
318
|
+
setResults((prev) => [...prev, testResult]);
|
|
319
|
+
return testResult;
|
|
320
|
+
} finally {
|
|
321
|
+
setIsRunning(false);
|
|
322
|
+
}
|
|
323
|
+
}, [textAction, imageAction, audioAction, videoAction]);
|
|
324
|
+
const runBatchTest = (0, import_react6.useCallback)(async (models, prompt) => {
|
|
325
|
+
setIsRunning(true);
|
|
326
|
+
const batchResults = [];
|
|
327
|
+
for (const model of models) {
|
|
328
|
+
const result = await runTest(model, prompt);
|
|
329
|
+
batchResults.push(result);
|
|
330
|
+
}
|
|
331
|
+
setIsRunning(false);
|
|
332
|
+
return batchResults;
|
|
333
|
+
}, [runTest]);
|
|
334
|
+
const clearResults = (0, import_react6.useCallback)(() => setResults([]), []);
|
|
335
|
+
return { runTest, runBatchTest, results, isRunning, clearResults };
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
// src/hooks/useAIModels.ts
|
|
339
|
+
var import_react8 = require("convex/react");
|
|
340
|
+
var import_react9 = require("react");
|
|
341
|
+
function useAIModels(options) {
|
|
342
|
+
const models = (0, import_react8.useQuery)(options.listModelsQuery, {});
|
|
343
|
+
const upsertMutation = options.upsertModelMutation ? (0, import_react8.useMutation)(options.upsertModelMutation) : null;
|
|
344
|
+
const upsertModel = (0, import_react9.useCallback)(async (model) => {
|
|
345
|
+
if (!upsertMutation) throw new Error("upsertModelMutation not provided");
|
|
346
|
+
await upsertMutation({
|
|
347
|
+
model: model.id,
|
|
348
|
+
provider: model.provider,
|
|
349
|
+
displayName: model.displayName,
|
|
350
|
+
inputCostPer1k: model.inputCostPer1k ?? 0,
|
|
351
|
+
outputCostPer1k: model.outputCostPer1k ?? 0,
|
|
352
|
+
capabilities: model.capabilities,
|
|
353
|
+
contextWindow: model.contextWindow,
|
|
354
|
+
isActive: model.isActive
|
|
355
|
+
});
|
|
356
|
+
}, [upsertMutation]);
|
|
357
|
+
return {
|
|
358
|
+
models: models ?? [],
|
|
359
|
+
isLoading: models === void 0,
|
|
360
|
+
upsertModel
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// src/hooks/useImageGeneration.ts
|
|
365
|
+
var import_react10 = require("react");
|
|
366
|
+
var import_react11 = require("convex/react");
|
|
367
|
+
function useImageGeneration(options) {
|
|
368
|
+
const [images, setImages] = (0, import_react10.useState)([]);
|
|
369
|
+
const [isGenerating, setIsGenerating] = (0, import_react10.useState)(false);
|
|
370
|
+
const [error, setError] = (0, import_react10.useState)(null);
|
|
371
|
+
const action = (0, import_react11.useAction)(options.generateImageAction);
|
|
372
|
+
const generate = (0, import_react10.useCallback)(async (prompt, opts) => {
|
|
373
|
+
setIsGenerating(true);
|
|
374
|
+
setError(null);
|
|
375
|
+
try {
|
|
376
|
+
const model = opts?.model ?? options.defaultModel ?? "dall-e-3";
|
|
377
|
+
const result = await action({
|
|
378
|
+
prompt,
|
|
379
|
+
model,
|
|
380
|
+
negativePrompt: opts?.negativePrompt,
|
|
381
|
+
size: opts?.size ?? "1024x1024",
|
|
382
|
+
quality: opts?.quality,
|
|
383
|
+
style: opts?.style,
|
|
384
|
+
n: opts?.n ?? 1,
|
|
385
|
+
seed: opts?.seed
|
|
386
|
+
});
|
|
387
|
+
const img = {
|
|
388
|
+
url: result.startsWith("http") ? result : `data:image/png;base64,${result}`,
|
|
389
|
+
prompt,
|
|
390
|
+
model,
|
|
391
|
+
timestamp: Date.now()
|
|
392
|
+
};
|
|
393
|
+
setImages((prev) => [img, ...prev]);
|
|
394
|
+
return img.url;
|
|
395
|
+
} catch (err) {
|
|
396
|
+
const msg = err instanceof Error ? err.message : "Image generation failed";
|
|
397
|
+
setError(msg);
|
|
398
|
+
throw err;
|
|
399
|
+
} finally {
|
|
400
|
+
setIsGenerating(false);
|
|
401
|
+
}
|
|
402
|
+
}, [action, options.defaultModel]);
|
|
403
|
+
return {
|
|
404
|
+
generate,
|
|
405
|
+
images,
|
|
406
|
+
isGenerating,
|
|
407
|
+
error,
|
|
408
|
+
clearImages: () => setImages([]),
|
|
409
|
+
clearError: () => setError(null)
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
// src/hooks/useTextToSpeech.ts
|
|
414
|
+
var import_react12 = require("react");
|
|
415
|
+
var import_react13 = require("convex/react");
|
|
416
|
+
function useTextToSpeech(options) {
|
|
417
|
+
const [isGenerating, setIsGenerating] = (0, import_react12.useState)(false);
|
|
418
|
+
const [isSpeaking, setIsSpeaking] = (0, import_react12.useState)(false);
|
|
419
|
+
const [error, setError] = (0, import_react12.useState)(null);
|
|
420
|
+
const [audioUrl, setAudioUrl] = (0, import_react12.useState)(null);
|
|
421
|
+
const audioRef = (0, import_react12.useRef)(null);
|
|
422
|
+
const action = (0, import_react13.useAction)(options.generateAudioAction);
|
|
423
|
+
const speak = (0, import_react12.useCallback)(async (text, opts) => {
|
|
424
|
+
setIsGenerating(true);
|
|
425
|
+
setError(null);
|
|
426
|
+
try {
|
|
427
|
+
const base64 = await action({
|
|
428
|
+
prompt: text,
|
|
429
|
+
voice: opts?.voice ?? options.defaultVoice ?? "alloy",
|
|
430
|
+
model: opts?.model ?? options.defaultModel,
|
|
431
|
+
speed: opts?.speed,
|
|
432
|
+
voiceSettings: opts?.voiceSettings
|
|
433
|
+
});
|
|
434
|
+
const url = base64.startsWith("http") ? base64 : `data:audio/mp3;base64,${base64}`;
|
|
435
|
+
setAudioUrl(url);
|
|
436
|
+
if (options.autoPlay !== false) {
|
|
437
|
+
if (audioRef.current) {
|
|
438
|
+
audioRef.current.pause();
|
|
439
|
+
}
|
|
440
|
+
const audio = new Audio(url);
|
|
441
|
+
audioRef.current = audio;
|
|
442
|
+
setIsSpeaking(true);
|
|
443
|
+
audio.onended = () => setIsSpeaking(false);
|
|
444
|
+
audio.onerror = () => {
|
|
445
|
+
setIsSpeaking(false);
|
|
446
|
+
setError("Audio playback failed");
|
|
447
|
+
};
|
|
448
|
+
await audio.play();
|
|
449
|
+
}
|
|
450
|
+
return url;
|
|
451
|
+
} catch (err) {
|
|
452
|
+
const msg = err instanceof Error ? err.message : "TTS generation failed";
|
|
453
|
+
setError(msg);
|
|
454
|
+
throw err;
|
|
455
|
+
} finally {
|
|
456
|
+
setIsGenerating(false);
|
|
457
|
+
}
|
|
458
|
+
}, [action, options]);
|
|
459
|
+
const stop = (0, import_react12.useCallback)(() => {
|
|
460
|
+
if (audioRef.current) {
|
|
461
|
+
audioRef.current.pause();
|
|
462
|
+
audioRef.current = null;
|
|
463
|
+
}
|
|
464
|
+
setIsSpeaking(false);
|
|
465
|
+
}, []);
|
|
466
|
+
return {
|
|
467
|
+
speak,
|
|
468
|
+
stop,
|
|
469
|
+
isSpeaking,
|
|
470
|
+
isGenerating,
|
|
471
|
+
error,
|
|
472
|
+
audioUrl,
|
|
473
|
+
clearError: () => setError(null)
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
// src/hooks/useVideoGeneration.ts
|
|
478
|
+
var import_react14 = require("react");
|
|
479
|
+
var import_react15 = require("convex/react");
|
|
480
|
+
function useVideoGeneration(options) {
|
|
481
|
+
const [videos, setVideos] = (0, import_react14.useState)([]);
|
|
482
|
+
const [isGenerating, setIsGenerating] = (0, import_react14.useState)(false);
|
|
483
|
+
const [error, setError] = (0, import_react14.useState)(null);
|
|
484
|
+
const action = (0, import_react15.useAction)(options.generateVideoAction);
|
|
485
|
+
const generate = (0, import_react14.useCallback)(async (prompt, opts) => {
|
|
486
|
+
setIsGenerating(true);
|
|
487
|
+
setError(null);
|
|
488
|
+
try {
|
|
489
|
+
const model = opts?.model ?? options.defaultModel ?? "minimax/video-01";
|
|
490
|
+
const result = await action({
|
|
491
|
+
prompt,
|
|
492
|
+
model,
|
|
493
|
+
duration: opts?.duration,
|
|
494
|
+
aspectRatio: opts?.aspectRatio,
|
|
495
|
+
resolution: opts?.resolution,
|
|
496
|
+
startImage: opts?.startImage,
|
|
497
|
+
endImage: opts?.endImage
|
|
498
|
+
});
|
|
499
|
+
const vid = { url: result, prompt, model, timestamp: Date.now() };
|
|
500
|
+
setVideos((prev) => [vid, ...prev]);
|
|
501
|
+
return result;
|
|
502
|
+
} catch (err) {
|
|
503
|
+
const msg = err instanceof Error ? err.message : "Video generation failed";
|
|
504
|
+
setError(msg);
|
|
505
|
+
throw err;
|
|
506
|
+
} finally {
|
|
507
|
+
setIsGenerating(false);
|
|
508
|
+
}
|
|
509
|
+
}, [action, options.defaultModel]);
|
|
510
|
+
return {
|
|
511
|
+
generate,
|
|
512
|
+
videos,
|
|
513
|
+
isGenerating,
|
|
514
|
+
error,
|
|
515
|
+
clearVideos: () => setVideos([]),
|
|
516
|
+
clearError: () => setError(null)
|
|
517
|
+
};
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
// src/hooks/useTranscription.ts
|
|
521
|
+
var import_react16 = require("react");
|
|
522
|
+
var import_react17 = require("convex/react");
|
|
523
|
+
function useTranscription(options) {
|
|
524
|
+
const [isTranscribing, setIsTranscribing] = (0, import_react16.useState)(false);
|
|
525
|
+
const [isRecording, setIsRecording] = (0, import_react16.useState)(false);
|
|
526
|
+
const [lastResult, setLastResult] = (0, import_react16.useState)(null);
|
|
527
|
+
const [error, setError] = (0, import_react16.useState)(null);
|
|
528
|
+
const mediaRecorderRef = (0, import_react16.useRef)(null);
|
|
529
|
+
const chunksRef = (0, import_react16.useRef)([]);
|
|
530
|
+
const action = (0, import_react17.useAction)(options.transcribeAction);
|
|
531
|
+
const transcribe = (0, import_react16.useCallback)(async (audioBase64, opts) => {
|
|
532
|
+
setIsTranscribing(true);
|
|
533
|
+
setError(null);
|
|
534
|
+
const start = Date.now();
|
|
535
|
+
try {
|
|
536
|
+
const text = await action({
|
|
537
|
+
audio: audioBase64,
|
|
538
|
+
model: opts?.model ?? options.defaultModel,
|
|
539
|
+
language: opts?.language,
|
|
540
|
+
prompt: opts?.prompt
|
|
541
|
+
});
|
|
542
|
+
setLastResult({ text, timestamp: Date.now(), durationMs: Date.now() - start });
|
|
543
|
+
return text;
|
|
544
|
+
} catch (err) {
|
|
545
|
+
const msg = err instanceof Error ? err.message : "Transcription failed";
|
|
546
|
+
setError(msg);
|
|
547
|
+
throw err;
|
|
548
|
+
} finally {
|
|
549
|
+
setIsTranscribing(false);
|
|
550
|
+
}
|
|
551
|
+
}, [action, options.defaultModel]);
|
|
552
|
+
const startRecording = (0, import_react16.useCallback)(async () => {
|
|
553
|
+
if (!options.enableMicrophone) throw new Error("Microphone not enabled");
|
|
554
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
555
|
+
const mediaRecorder = new MediaRecorder(stream, { mimeType: "audio/webm" });
|
|
556
|
+
chunksRef.current = [];
|
|
557
|
+
mediaRecorder.ondataavailable = (e) => {
|
|
558
|
+
if (e.data.size > 0) chunksRef.current.push(e.data);
|
|
559
|
+
};
|
|
560
|
+
mediaRecorder.start();
|
|
561
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
562
|
+
setIsRecording(true);
|
|
563
|
+
}, [options.enableMicrophone]);
|
|
564
|
+
const stopRecording = (0, import_react16.useCallback)(async () => {
|
|
565
|
+
return new Promise((resolve, reject) => {
|
|
566
|
+
const recorder = mediaRecorderRef.current;
|
|
567
|
+
if (!recorder) {
|
|
568
|
+
reject(new Error("Not recording"));
|
|
569
|
+
return;
|
|
570
|
+
}
|
|
571
|
+
recorder.onstop = async () => {
|
|
572
|
+
setIsRecording(false);
|
|
573
|
+
const blob = new Blob(chunksRef.current, { type: "audio/webm" });
|
|
574
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
575
|
+
const bytes = new Uint8Array(arrayBuffer);
|
|
576
|
+
let binary = "";
|
|
577
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
578
|
+
binary += String.fromCharCode(bytes[i]);
|
|
579
|
+
}
|
|
580
|
+
const base64 = btoa(binary);
|
|
581
|
+
recorder.stream.getTracks().forEach((t) => t.stop());
|
|
582
|
+
try {
|
|
583
|
+
const text = await transcribe(base64);
|
|
584
|
+
resolve(text);
|
|
585
|
+
} catch (err) {
|
|
586
|
+
reject(err);
|
|
587
|
+
}
|
|
588
|
+
};
|
|
589
|
+
recorder.stop();
|
|
590
|
+
});
|
|
591
|
+
}, [transcribe]);
|
|
592
|
+
return {
|
|
593
|
+
transcribe,
|
|
594
|
+
startRecording,
|
|
595
|
+
stopRecording,
|
|
596
|
+
isTranscribing,
|
|
597
|
+
isRecording,
|
|
598
|
+
lastResult,
|
|
599
|
+
error,
|
|
600
|
+
clearError: () => setError(null)
|
|
601
|
+
};
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
// src/hooks/useRealtimeAudio.ts
|
|
605
|
+
var import_react18 = require("react");
|
|
606
|
+
function useRealtimeAudio(options) {
|
|
607
|
+
const [isConnected, setIsConnected] = (0, import_react18.useState)(false);
|
|
608
|
+
const [isSpeaking, setIsSpeaking] = (0, import_react18.useState)(false);
|
|
609
|
+
const [isListening, setIsListening] = (0, import_react18.useState)(false);
|
|
610
|
+
const [error, setError] = (0, import_react18.useState)(null);
|
|
611
|
+
const [transcript, setTranscript] = (0, import_react18.useState)("");
|
|
612
|
+
const [response, setResponse] = (0, import_react18.useState)("");
|
|
613
|
+
const wsRef = (0, import_react18.useRef)(null);
|
|
614
|
+
const audioContextRef = (0, import_react18.useRef)(null);
|
|
615
|
+
const streamRef = (0, import_react18.useRef)(null);
|
|
616
|
+
const processorRef = (0, import_react18.useRef)(null);
|
|
617
|
+
const connect = (0, import_react18.useCallback)(async () => {
|
|
618
|
+
try {
|
|
619
|
+
setError(null);
|
|
620
|
+
let wsUrl = options.wsUrl ?? "wss://api.openai.com/v1/realtime";
|
|
621
|
+
if (options.getSessionAction) {
|
|
622
|
+
const session = await options.getSessionAction({
|
|
623
|
+
model: options.model ?? "gpt-4o-realtime-preview",
|
|
624
|
+
voice: options.voice ?? "alloy",
|
|
625
|
+
instructions: options.instructions
|
|
626
|
+
});
|
|
627
|
+
wsUrl = session.url ?? wsUrl;
|
|
628
|
+
}
|
|
629
|
+
const ws = new WebSocket(wsUrl);
|
|
630
|
+
wsRef.current = ws;
|
|
631
|
+
ws.onopen = () => {
|
|
632
|
+
setIsConnected(true);
|
|
633
|
+
ws.send(JSON.stringify({
|
|
634
|
+
type: "session.update",
|
|
635
|
+
session: {
|
|
636
|
+
model: options.model ?? "gpt-4o-realtime-preview",
|
|
637
|
+
voice: options.voice ?? "alloy",
|
|
638
|
+
instructions: options.instructions ?? "You are a helpful assistant.",
|
|
639
|
+
input_audio_format: options.inputAudioFormat ?? "pcm16",
|
|
640
|
+
output_audio_format: options.outputAudioFormat ?? "pcm16",
|
|
641
|
+
turn_detection: options.turnDetection ?? {
|
|
642
|
+
type: "server_vad",
|
|
643
|
+
threshold: 0.5,
|
|
644
|
+
prefix_padding_ms: 300,
|
|
645
|
+
silence_duration_ms: 500
|
|
646
|
+
},
|
|
647
|
+
tools: options.tools ?? []
|
|
648
|
+
}
|
|
649
|
+
}));
|
|
650
|
+
startAudioCapture(ws);
|
|
651
|
+
};
|
|
652
|
+
ws.onmessage = (event) => {
|
|
653
|
+
const data = JSON.parse(event.data);
|
|
654
|
+
options.onEvent?.(data);
|
|
655
|
+
switch (data.type) {
|
|
656
|
+
case "input_audio_buffer.speech_started":
|
|
657
|
+
setIsSpeaking(true);
|
|
658
|
+
break;
|
|
659
|
+
case "input_audio_buffer.speech_stopped":
|
|
660
|
+
setIsSpeaking(false);
|
|
661
|
+
break;
|
|
662
|
+
case "response.text.delta":
|
|
663
|
+
setResponse((prev) => prev + data.delta);
|
|
664
|
+
break;
|
|
665
|
+
case "response.text.done":
|
|
666
|
+
options.onResponse?.(data.text);
|
|
667
|
+
setResponse("");
|
|
668
|
+
break;
|
|
669
|
+
case "response.audio.delta":
|
|
670
|
+
playAudioDelta(data.delta);
|
|
671
|
+
break;
|
|
672
|
+
case "error":
|
|
673
|
+
setError(data.error.message);
|
|
674
|
+
break;
|
|
675
|
+
}
|
|
676
|
+
};
|
|
677
|
+
ws.onerror = () => setError("WebSocket connection error");
|
|
678
|
+
ws.onclose = () => {
|
|
679
|
+
setIsConnected(false);
|
|
680
|
+
setIsListening(false);
|
|
681
|
+
stopAudioCapture();
|
|
682
|
+
};
|
|
683
|
+
} catch (err) {
|
|
684
|
+
setError(err instanceof Error ? err.message : "Failed to connect");
|
|
685
|
+
}
|
|
686
|
+
}, [options]);
|
|
687
|
+
const startAudioCapture = (0, import_react18.useCallback)(async (ws) => {
|
|
688
|
+
try {
|
|
689
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: { sampleRate: 24e3, channelCount: 1 } });
|
|
690
|
+
streamRef.current = stream;
|
|
691
|
+
const audioCtx = new AudioContext({ sampleRate: 24e3 });
|
|
692
|
+
audioContextRef.current = audioCtx;
|
|
693
|
+
const source = audioCtx.createMediaStreamSource(stream);
|
|
694
|
+
const processor = audioCtx.createScriptProcessor(4096, 1, 1);
|
|
695
|
+
processorRef.current = processor;
|
|
696
|
+
processor.onaudioprocess = (e) => {
|
|
697
|
+
if (ws.readyState !== WebSocket.OPEN) return;
|
|
698
|
+
const inputData = e.inputBuffer.getChannelData(0);
|
|
699
|
+
const pcm16 = new Int16Array(inputData.length);
|
|
700
|
+
for (let i = 0; i < inputData.length; i++) {
|
|
701
|
+
const s = Math.max(-1, Math.min(1, inputData[i]));
|
|
702
|
+
pcm16[i] = s < 0 ? s * 32768 : s * 32767;
|
|
703
|
+
}
|
|
704
|
+
const bytes = new Uint8Array(pcm16.buffer);
|
|
705
|
+
let binary = "";
|
|
706
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
707
|
+
binary += String.fromCharCode(bytes[i]);
|
|
708
|
+
}
|
|
709
|
+
ws.send(JSON.stringify({
|
|
710
|
+
type: "input_audio_buffer.append",
|
|
711
|
+
audio: btoa(binary)
|
|
712
|
+
}));
|
|
713
|
+
};
|
|
714
|
+
source.connect(processor);
|
|
715
|
+
processor.connect(audioCtx.destination);
|
|
716
|
+
setIsListening(true);
|
|
717
|
+
} catch (err) {
|
|
718
|
+
setError(err instanceof Error ? err.message : "Microphone access denied");
|
|
719
|
+
}
|
|
720
|
+
}, []);
|
|
721
|
+
const stopAudioCapture = (0, import_react18.useCallback)(() => {
|
|
722
|
+
processorRef.current?.disconnect();
|
|
723
|
+
processorRef.current = null;
|
|
724
|
+
streamRef.current?.getTracks().forEach((t) => t.stop());
|
|
725
|
+
streamRef.current = null;
|
|
726
|
+
audioContextRef.current?.close();
|
|
727
|
+
audioContextRef.current = null;
|
|
728
|
+
setIsListening(false);
|
|
729
|
+
}, []);
|
|
730
|
+
const playAudioDelta = (0, import_react18.useCallback)((_delta) => {
|
|
731
|
+
}, []);
|
|
732
|
+
const disconnect = (0, import_react18.useCallback)(() => {
|
|
733
|
+
wsRef.current?.close();
|
|
734
|
+
wsRef.current = null;
|
|
735
|
+
stopAudioCapture();
|
|
736
|
+
setIsConnected(false);
|
|
737
|
+
}, [stopAudioCapture]);
|
|
738
|
+
(0, import_react18.useEffect)(() => {
|
|
739
|
+
return () => {
|
|
740
|
+
disconnect();
|
|
741
|
+
};
|
|
742
|
+
}, []);
|
|
743
|
+
return {
|
|
744
|
+
connect,
|
|
745
|
+
disconnect,
|
|
746
|
+
isConnected,
|
|
747
|
+
isSpeaking,
|
|
748
|
+
isListening,
|
|
749
|
+
error,
|
|
750
|
+
transcript,
|
|
751
|
+
response,
|
|
752
|
+
clearError: () => setError(null)
|
|
753
|
+
};
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
// src/hooks/useContentManager.ts
|
|
757
|
+
var import_react19 = require("react");
|
|
758
|
+
function useContentManager(options) {
|
|
759
|
+
const { generateFn, defaultType = "text", defaultTone, defaultModel } = options;
|
|
760
|
+
const [result, setResult] = (0, import_react19.useState)(null);
|
|
761
|
+
const [isGenerating, setIsGenerating] = (0, import_react19.useState)(false);
|
|
762
|
+
const [error, setError] = (0, import_react19.useState)(null);
|
|
763
|
+
const execute = (0, import_react19.useCallback)(async (action, input, extra) => {
|
|
764
|
+
setIsGenerating(true);
|
|
765
|
+
setError(null);
|
|
766
|
+
try {
|
|
767
|
+
const res = await generateFn({
|
|
768
|
+
action,
|
|
769
|
+
input,
|
|
770
|
+
type: extra?.type ?? defaultType,
|
|
771
|
+
tone: extra?.tone ?? defaultTone,
|
|
772
|
+
model: extra?.model ?? defaultModel,
|
|
773
|
+
...extra
|
|
774
|
+
});
|
|
775
|
+
setResult(res);
|
|
776
|
+
return res;
|
|
777
|
+
} catch (err) {
|
|
778
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
779
|
+
setError(e);
|
|
780
|
+
throw e;
|
|
781
|
+
} finally {
|
|
782
|
+
setIsGenerating(false);
|
|
783
|
+
}
|
|
784
|
+
}, [generateFn, defaultType, defaultTone, defaultModel]);
|
|
785
|
+
const generate = (0, import_react19.useCallback)((input, opts) => execute("generate", input, opts), [execute]);
|
|
786
|
+
const rewrite = (0, import_react19.useCallback)((text, instructions) => execute("rewrite", text, { instructions }), [execute]);
|
|
787
|
+
const translate = (0, import_react19.useCallback)((text, language) => execute("translate", text, { language }), [execute]);
|
|
788
|
+
const summarize = (0, import_react19.useCallback)((text) => execute("summarize", text), [execute]);
|
|
789
|
+
const variations = (0, import_react19.useCallback)((text, count = 3) => execute("variations", text, { variations: count }), [execute]);
|
|
790
|
+
const changeTone = (0, import_react19.useCallback)((text, tone) => execute("change-tone", text, { tone }), [execute]);
|
|
791
|
+
const proofread = (0, import_react19.useCallback)((text) => execute("proofread", text), [execute]);
|
|
792
|
+
const reset = (0, import_react19.useCallback)(() => {
|
|
793
|
+
setResult(null);
|
|
794
|
+
setError(null);
|
|
795
|
+
}, []);
|
|
796
|
+
return {
|
|
797
|
+
generate,
|
|
798
|
+
rewrite,
|
|
799
|
+
translate,
|
|
800
|
+
summarize,
|
|
801
|
+
variations,
|
|
802
|
+
changeTone,
|
|
803
|
+
proofread,
|
|
804
|
+
result,
|
|
805
|
+
isGenerating,
|
|
806
|
+
error,
|
|
807
|
+
reset
|
|
808
|
+
};
|
|
809
|
+
}
|
|
810
|
+
|
|
811
|
+
// src/hooks/useMemory.ts
|
|
812
|
+
var import_react20 = require("react");
|
|
813
|
+
function useMemory(options) {
|
|
814
|
+
const {
|
|
815
|
+
storeFn,
|
|
816
|
+
searchFn,
|
|
817
|
+
deleteFn,
|
|
818
|
+
clearFn,
|
|
819
|
+
defaultNamespace = "user",
|
|
820
|
+
defaultScopeId = ""
|
|
821
|
+
} = options;
|
|
822
|
+
const [memories, setMemories] = (0, import_react20.useState)([]);
|
|
823
|
+
const [isLoading, setIsLoading] = (0, import_react20.useState)(false);
|
|
824
|
+
const [error, setError] = (0, import_react20.useState)(null);
|
|
825
|
+
const store = (0, import_react20.useCallback)(async (key, value, opts) => {
|
|
826
|
+
setIsLoading(true);
|
|
827
|
+
setError(null);
|
|
828
|
+
try {
|
|
829
|
+
const entry = await storeFn({
|
|
830
|
+
namespace: opts?.namespace ?? defaultNamespace,
|
|
831
|
+
type: opts?.type ?? "fact",
|
|
832
|
+
importance: opts?.importance ?? "medium",
|
|
833
|
+
key,
|
|
834
|
+
value,
|
|
835
|
+
metadata: opts?.metadata,
|
|
836
|
+
scopeId: opts?.scopeId ?? defaultScopeId
|
|
837
|
+
});
|
|
838
|
+
setMemories((prev) => [...prev, entry]);
|
|
839
|
+
return entry;
|
|
840
|
+
} catch (err) {
|
|
841
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
842
|
+
setError(e);
|
|
843
|
+
throw e;
|
|
844
|
+
} finally {
|
|
845
|
+
setIsLoading(false);
|
|
846
|
+
}
|
|
847
|
+
}, [storeFn, defaultNamespace, defaultScopeId]);
|
|
848
|
+
const recall = (0, import_react20.useCallback)(async (key, namespace) => {
|
|
849
|
+
setIsLoading(true);
|
|
850
|
+
setError(null);
|
|
851
|
+
try {
|
|
852
|
+
const results = await searchFn({
|
|
853
|
+
namespace: namespace ?? defaultNamespace,
|
|
854
|
+
key,
|
|
855
|
+
scopeId: defaultScopeId,
|
|
856
|
+
limit: 1
|
|
857
|
+
});
|
|
858
|
+
return results.length > 0 ? results[0] : null;
|
|
859
|
+
} catch (err) {
|
|
860
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
861
|
+
setError(e);
|
|
862
|
+
throw e;
|
|
863
|
+
} finally {
|
|
864
|
+
setIsLoading(false);
|
|
865
|
+
}
|
|
866
|
+
}, [searchFn, defaultNamespace, defaultScopeId]);
|
|
867
|
+
const search = (0, import_react20.useCallback)(async (query) => {
|
|
868
|
+
setIsLoading(true);
|
|
869
|
+
setError(null);
|
|
870
|
+
try {
|
|
871
|
+
const results = await searchFn({
|
|
872
|
+
scopeId: defaultScopeId,
|
|
873
|
+
...query
|
|
874
|
+
});
|
|
875
|
+
setMemories(results);
|
|
876
|
+
return results;
|
|
877
|
+
} catch (err) {
|
|
878
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
879
|
+
setError(e);
|
|
880
|
+
throw e;
|
|
881
|
+
} finally {
|
|
882
|
+
setIsLoading(false);
|
|
883
|
+
}
|
|
884
|
+
}, [searchFn, defaultScopeId]);
|
|
885
|
+
const remove = (0, import_react20.useCallback)(async (id) => {
|
|
886
|
+
setError(null);
|
|
887
|
+
try {
|
|
888
|
+
await deleteFn(id);
|
|
889
|
+
setMemories((prev) => prev.filter((m) => m.id !== id));
|
|
890
|
+
} catch (err) {
|
|
891
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
892
|
+
setError(e);
|
|
893
|
+
throw e;
|
|
894
|
+
}
|
|
895
|
+
}, [deleteFn]);
|
|
896
|
+
const clear = (0, import_react20.useCallback)(async (namespace) => {
|
|
897
|
+
setError(null);
|
|
898
|
+
try {
|
|
899
|
+
await clearFn(namespace ?? defaultNamespace, defaultScopeId);
|
|
900
|
+
setMemories([]);
|
|
901
|
+
} catch (err) {
|
|
902
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
903
|
+
setError(e);
|
|
904
|
+
throw e;
|
|
905
|
+
}
|
|
906
|
+
}, [clearFn, defaultNamespace, defaultScopeId]);
|
|
907
|
+
return { store, recall, search, remove, clear, memories, isLoading, error };
|
|
908
|
+
}
|
|
909
|
+
|
|
910
|
+
// src/hooks/useSkills.ts
|
|
911
|
+
var import_react21 = require("react");
|
|
912
|
+
var import_ai_shared = require("@geenius/ai-shared");
|
|
913
|
+
function useSkills(options) {
|
|
914
|
+
const { executeFn, customSkills = [], defaultModel, userId } = options;
|
|
915
|
+
const [result, setResult] = (0, import_react21.useState)(null);
|
|
916
|
+
const [isExecuting, setIsExecuting] = (0, import_react21.useState)(false);
|
|
917
|
+
const [error, setError] = (0, import_react21.useState)(null);
|
|
918
|
+
const allSkills = (0, import_react21.useMemo)(() => {
|
|
919
|
+
const map = /* @__PURE__ */ new Map();
|
|
920
|
+
for (const skill of Object.values(import_ai_shared.BUILT_IN_SKILLS)) map.set(skill.id, skill);
|
|
921
|
+
for (const skill of customSkills) map.set(skill.id, skill);
|
|
922
|
+
return Array.from(map.values());
|
|
923
|
+
}, [customSkills]);
|
|
924
|
+
const byCategory = (0, import_react21.useCallback)((category) => allSkills.filter((s) => s.category === category), [allSkills]);
|
|
925
|
+
const searchSkills = (0, import_react21.useCallback)((query) => {
|
|
926
|
+
const q = query.toLowerCase();
|
|
927
|
+
return allSkills.filter(
|
|
928
|
+
(s) => s.name.toLowerCase().includes(q) || s.description.toLowerCase().includes(q) || s.tags?.some((t) => t.toLowerCase().includes(q))
|
|
929
|
+
);
|
|
930
|
+
}, [allSkills]);
|
|
931
|
+
const getSkill = (0, import_react21.useCallback)((id) => allSkills.find((s) => s.id === id), [allSkills]);
|
|
932
|
+
const execute = (0, import_react21.useCallback)(async (skillId, params, context) => {
|
|
933
|
+
setIsExecuting(true);
|
|
934
|
+
setError(null);
|
|
935
|
+
try {
|
|
936
|
+
const res = await executeFn({
|
|
937
|
+
skillId,
|
|
938
|
+
params,
|
|
939
|
+
userId,
|
|
940
|
+
context,
|
|
941
|
+
model: defaultModel
|
|
942
|
+
});
|
|
943
|
+
setResult(res);
|
|
944
|
+
return res;
|
|
945
|
+
} catch (err) {
|
|
946
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
947
|
+
setError(e);
|
|
948
|
+
throw e;
|
|
949
|
+
} finally {
|
|
950
|
+
setIsExecuting(false);
|
|
951
|
+
}
|
|
952
|
+
}, [executeFn, userId, defaultModel]);
|
|
953
|
+
const reset = (0, import_react21.useCallback)(() => {
|
|
954
|
+
setResult(null);
|
|
955
|
+
setError(null);
|
|
956
|
+
}, []);
|
|
957
|
+
return {
|
|
958
|
+
skills: allSkills,
|
|
959
|
+
byCategory,
|
|
960
|
+
search: searchSkills,
|
|
961
|
+
getSkill,
|
|
962
|
+
execute,
|
|
963
|
+
result,
|
|
964
|
+
isExecuting,
|
|
965
|
+
error,
|
|
966
|
+
reset
|
|
967
|
+
};
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
// src/components/ChatWindow.tsx
|
|
971
|
+
var import_react22 = require("react");
|
|
972
|
+
var import_jsx_runtime = require("react/jsx-runtime");
|
|
973
|
+
function ChatWindow(props) {
|
|
974
|
+
const [input, setInput] = (0, import_react22.useState)("");
|
|
975
|
+
const messagesEndRef = (0, import_react22.useRef)(null);
|
|
976
|
+
const chat = useChat(props);
|
|
977
|
+
(0, import_react22.useEffect)(() => {
|
|
978
|
+
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
|
|
979
|
+
}, [chat.messages.length]);
|
|
980
|
+
const handleSubmit = async (e) => {
|
|
981
|
+
e?.preventDefault();
|
|
982
|
+
if (!input.trim() || chat.isSending) return;
|
|
983
|
+
const content = input.trim();
|
|
984
|
+
setInput("");
|
|
985
|
+
await chat.sendMessage(content);
|
|
986
|
+
};
|
|
987
|
+
return /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("div", { className: props.className, "data-ai-component": "chat-window", children: [
|
|
988
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsxs)("div", { "data-ai-messages": true, children: [
|
|
989
|
+
chat.messages.length === 0 && /* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { "data-ai-empty": true, children: /* @__PURE__ */ (0, import_jsx_runtime.jsx)("p", { children: "Start a conversation" }) }),
|
|
990
|
+
chat.messages.map(
|
|
991
|
+
(msg) => props.renderMessage ? props.renderMessage(msg) : /* @__PURE__ */ (0, import_jsx_runtime.jsxs)(
|
|
992
|
+
"div",
|
|
993
|
+
{
|
|
994
|
+
"data-ai-message": true,
|
|
995
|
+
"data-ai-role": msg.role,
|
|
996
|
+
children: [
|
|
997
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { "data-ai-message-role": true, children: msg.role }),
|
|
998
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { "data-ai-message-content": true, children: msg.content }),
|
|
999
|
+
msg.tokens && /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("span", { "data-ai-message-tokens": true, children: [
|
|
1000
|
+
msg.tokens,
|
|
1001
|
+
" tokens"
|
|
1002
|
+
] })
|
|
1003
|
+
]
|
|
1004
|
+
},
|
|
1005
|
+
msg.id
|
|
1006
|
+
)
|
|
1007
|
+
),
|
|
1008
|
+
chat.isSending && /* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { "data-ai-message": true, "data-ai-role": "assistant", "data-ai-loading": true, children: /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("div", { "data-ai-typing-indicator": true, children: [
|
|
1009
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("span", {}),
|
|
1010
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("span", {}),
|
|
1011
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("span", {})
|
|
1012
|
+
] }) }),
|
|
1013
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { ref: messagesEndRef })
|
|
1014
|
+
] }),
|
|
1015
|
+
chat.error && /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("div", { "data-ai-error": true, role: "alert", children: [
|
|
1016
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("span", { children: chat.error }),
|
|
1017
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)("button", { onClick: chat.clearError, "data-ai-dismiss": true, children: "\xD7" })
|
|
1018
|
+
] }),
|
|
1019
|
+
props.renderInput ? props.renderInput({
|
|
1020
|
+
value: input,
|
|
1021
|
+
onChange: setInput,
|
|
1022
|
+
onSubmit: handleSubmit,
|
|
1023
|
+
isSending: chat.isSending
|
|
1024
|
+
}) : /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("form", { onSubmit: handleSubmit, "data-ai-input-form": true, children: [
|
|
1025
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)(
|
|
1026
|
+
"textarea",
|
|
1027
|
+
{
|
|
1028
|
+
value: input,
|
|
1029
|
+
onChange: (e) => setInput(e.target.value),
|
|
1030
|
+
placeholder: "Type a message\u2026",
|
|
1031
|
+
disabled: chat.isSending,
|
|
1032
|
+
"data-ai-input": true,
|
|
1033
|
+
onKeyDown: (e) => {
|
|
1034
|
+
if (e.key === "Enter" && !e.shiftKey) {
|
|
1035
|
+
e.preventDefault();
|
|
1036
|
+
handleSubmit();
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
}
|
|
1040
|
+
),
|
|
1041
|
+
/* @__PURE__ */ (0, import_jsx_runtime.jsx)(
|
|
1042
|
+
"button",
|
|
1043
|
+
{
|
|
1044
|
+
type: "submit",
|
|
1045
|
+
disabled: chat.isSending || !input.trim(),
|
|
1046
|
+
"data-ai-send": true,
|
|
1047
|
+
children: chat.isSending ? "Sending\u2026" : "Send"
|
|
1048
|
+
}
|
|
1049
|
+
)
|
|
1050
|
+
] })
|
|
1051
|
+
] });
|
|
1052
|
+
}
|
|
1053
|
+
|
|
1054
|
+
// src/components/ModelSelector.tsx
|
|
1055
|
+
var import_jsx_runtime2 = require("react/jsx-runtime");
|
|
1056
|
+
function ModelSelector(props) {
|
|
1057
|
+
const filtered = props.filterProvider ? props.models.filter((m) => m.provider === props.filterProvider) : props.models;
|
|
1058
|
+
const activeModels = filtered.filter((m) => m.isActive);
|
|
1059
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(
|
|
1060
|
+
"select",
|
|
1061
|
+
{
|
|
1062
|
+
className: props.className,
|
|
1063
|
+
value: props.selectedModel ?? "",
|
|
1064
|
+
onChange: (e) => props.onSelect(e.target.value),
|
|
1065
|
+
disabled: props.disabled,
|
|
1066
|
+
"data-ai-component": "model-selector",
|
|
1067
|
+
children: [
|
|
1068
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)("option", { value: "", disabled: true, children: "Select a model\u2026" }),
|
|
1069
|
+
activeModels.map((model) => /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)("option", { value: model.id, "data-ai-provider": model.provider, children: [
|
|
1070
|
+
model.displayName ?? model.name,
|
|
1071
|
+
props.showCost ? ` ($${model.inputCostPer1k}/$${model.outputCostPer1k}/1k)` : ""
|
|
1072
|
+
] }, model.id))
|
|
1073
|
+
]
|
|
1074
|
+
}
|
|
1075
|
+
);
|
|
1076
|
+
}
|
|
1077
|
+
|
|
1078
|
+
// src/components/AILogTable.tsx
|
|
1079
|
+
var import_jsx_runtime3 = require("react/jsx-runtime");
|
|
1080
|
+
function formatDuration(ms) {
|
|
1081
|
+
if (ms < 1e3) return `${ms}ms`;
|
|
1082
|
+
return `${(ms / 1e3).toFixed(1)}s`;
|
|
1083
|
+
}
|
|
1084
|
+
function formatCost(usd) {
|
|
1085
|
+
if (!usd) return "\u2014";
|
|
1086
|
+
if (usd < 0.01) return `$${usd.toFixed(4)}`;
|
|
1087
|
+
return `$${usd.toFixed(2)}`;
|
|
1088
|
+
}
|
|
1089
|
+
function StatusBadge({ status }) {
|
|
1090
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)("span", { "data-ai-status": status, children: [
|
|
1091
|
+
status === "success" ? "\u2713" : "\u2717",
|
|
1092
|
+
" ",
|
|
1093
|
+
status
|
|
1094
|
+
] });
|
|
1095
|
+
}
|
|
1096
|
+
function AILogTable(props) {
|
|
1097
|
+
if (props.isLoading) {
|
|
1098
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)("div", { "data-ai-component": "log-table", "data-ai-loading": true, children: "Loading logs\u2026" });
|
|
1099
|
+
}
|
|
1100
|
+
if (props.logs.length === 0) {
|
|
1101
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)("div", { "data-ai-component": "log-table", "data-ai-empty": true, children: "No AI logs yet" });
|
|
1102
|
+
}
|
|
1103
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)("div", { className: props.className, "data-ai-component": "log-table", children: /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)("table", { "data-ai-table": true, children: [
|
|
1104
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("thead", { children: /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)("tr", { children: [
|
|
1105
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Time" }),
|
|
1106
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Model" }),
|
|
1107
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Provider" }),
|
|
1108
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Caller" }),
|
|
1109
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Status" }),
|
|
1110
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Duration" }),
|
|
1111
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Tokens" }),
|
|
1112
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("th", { children: "Cost" })
|
|
1113
|
+
] }) }),
|
|
1114
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("tbody", { children: props.logs.map((log) => /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)(
|
|
1115
|
+
"tr",
|
|
1116
|
+
{
|
|
1117
|
+
onClick: () => props.onRowClick?.(log),
|
|
1118
|
+
"data-ai-log-row": true,
|
|
1119
|
+
"data-ai-status": log.status,
|
|
1120
|
+
children: [
|
|
1121
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "time", children: new Date(log.timestamp).toLocaleTimeString() }),
|
|
1122
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "model", children: log.model }),
|
|
1123
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "provider", children: log.provider }),
|
|
1124
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "caller", children: log.caller }),
|
|
1125
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "status", children: /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(StatusBadge, { status: log.status }) }),
|
|
1126
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "duration", children: formatDuration(log.durationMs) }),
|
|
1127
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "tokens", children: log.totalTokens ?? "\u2014" }),
|
|
1128
|
+
/* @__PURE__ */ (0, import_jsx_runtime3.jsx)("td", { "data-ai-cell": "cost", children: formatCost(log.totalCostUsd) })
|
|
1129
|
+
]
|
|
1130
|
+
},
|
|
1131
|
+
log.requestId
|
|
1132
|
+
)) })
|
|
1133
|
+
] }) });
|
|
1134
|
+
}
|
|
1135
|
+
|
|
1136
|
+
// src/components/ModelTestRunner.tsx
|
|
1137
|
+
var import_react23 = require("react");
|
|
1138
|
+
var import_jsx_runtime4 = require("react/jsx-runtime");
|
|
1139
|
+
var TABS = [
|
|
1140
|
+
{ id: "text", label: "Text Generation" },
|
|
1141
|
+
{ id: "image", label: "Image Generation" },
|
|
1142
|
+
{ id: "audio", label: "Text-to-Speech" },
|
|
1143
|
+
{ id: "transcription", label: "Audio-to-Text" },
|
|
1144
|
+
{ id: "video", label: "Video Generation" }
|
|
1145
|
+
];
|
|
1146
|
+
function ModelTestRunner(props) {
|
|
1147
|
+
const [activeTab, setActiveTab] = (0, import_react23.useState)(props.defaultTab ?? "text");
|
|
1148
|
+
const [prompt, setPrompt] = (0, import_react23.useState)("");
|
|
1149
|
+
const [selectedModel, setSelectedModel] = (0, import_react23.useState)("");
|
|
1150
|
+
const [batchMode, setBatchMode] = (0, import_react23.useState)(false);
|
|
1151
|
+
const test = useModelTest(props);
|
|
1152
|
+
const handleSubmit = async (e) => {
|
|
1153
|
+
e.preventDefault();
|
|
1154
|
+
if (!prompt.trim()) return;
|
|
1155
|
+
if (batchMode && props.availableModels) {
|
|
1156
|
+
await test.runBatchTest(props.availableModels, prompt.trim());
|
|
1157
|
+
} else if (selectedModel) {
|
|
1158
|
+
await test.runTest(selectedModel, prompt.trim(), activeTab);
|
|
1159
|
+
}
|
|
1160
|
+
};
|
|
1161
|
+
return /* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("div", { className: props.className, "data-ai-component": "model-test-runner", children: [
|
|
1162
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)("div", { "data-ai-tabs": true, role: "tablist", children: TABS.map((tab) => /* @__PURE__ */ (0, import_jsx_runtime4.jsx)(
|
|
1163
|
+
"button",
|
|
1164
|
+
{
|
|
1165
|
+
role: "tab",
|
|
1166
|
+
"aria-selected": activeTab === tab.id,
|
|
1167
|
+
onClick: () => setActiveTab(tab.id),
|
|
1168
|
+
"data-ai-tab": true,
|
|
1169
|
+
"data-ai-tab-active": activeTab === tab.id ? "" : void 0,
|
|
1170
|
+
children: tab.label
|
|
1171
|
+
},
|
|
1172
|
+
tab.id
|
|
1173
|
+
)) }),
|
|
1174
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("form", { onSubmit: handleSubmit, "data-ai-test-form": true, children: [
|
|
1175
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("div", { "data-ai-field": "model", children: [
|
|
1176
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)("label", { htmlFor: "test-model", children: "Model" }),
|
|
1177
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)(
|
|
1178
|
+
"select",
|
|
1179
|
+
{
|
|
1180
|
+
id: "test-model",
|
|
1181
|
+
value: selectedModel,
|
|
1182
|
+
onChange: (e) => setSelectedModel(e.target.value),
|
|
1183
|
+
disabled: test.isRunning || batchMode,
|
|
1184
|
+
"data-ai-input": true,
|
|
1185
|
+
children: [
|
|
1186
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)("option", { value: "", children: "Select model\u2026" }),
|
|
1187
|
+
(props.availableModels ?? []).map((m) => /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("option", { value: m, children: m }, m))
|
|
1188
|
+
]
|
|
1189
|
+
}
|
|
1190
|
+
)
|
|
1191
|
+
] }),
|
|
1192
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("div", { "data-ai-field": "prompt", children: [
|
|
1193
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)("label", { htmlFor: "test-prompt", children: "Prompt" }),
|
|
1194
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)(
|
|
1195
|
+
"textarea",
|
|
1196
|
+
{
|
|
1197
|
+
id: "test-prompt",
|
|
1198
|
+
value: prompt,
|
|
1199
|
+
onChange: (e) => setPrompt(e.target.value),
|
|
1200
|
+
placeholder: "Enter your test prompt\u2026",
|
|
1201
|
+
disabled: test.isRunning,
|
|
1202
|
+
"data-ai-input": true
|
|
1203
|
+
}
|
|
1204
|
+
)
|
|
1205
|
+
] }),
|
|
1206
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("div", { "data-ai-actions": true, children: [
|
|
1207
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("label", { "data-ai-toggle": true, children: [
|
|
1208
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)(
|
|
1209
|
+
"input",
|
|
1210
|
+
{
|
|
1211
|
+
type: "checkbox",
|
|
1212
|
+
checked: batchMode,
|
|
1213
|
+
onChange: (e) => setBatchMode(e.target.checked),
|
|
1214
|
+
disabled: test.isRunning
|
|
1215
|
+
}
|
|
1216
|
+
),
|
|
1217
|
+
"Test all models"
|
|
1218
|
+
] }),
|
|
1219
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)("button", { type: "submit", disabled: test.isRunning || !prompt.trim(), "data-ai-submit": true, children: test.isRunning ? "Running\u2026" : batchMode ? "Run All" : "Run Test" }),
|
|
1220
|
+
test.results.length > 0 && /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("button", { type: "button", onClick: test.clearResults, "data-ai-clear": true, children: "Clear Results" })
|
|
1221
|
+
] })
|
|
1222
|
+
] }),
|
|
1223
|
+
test.results.length > 0 && /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("div", { "data-ai-test-results": true, children: test.results.map((result, i) => props.renderResult ? props.renderResult(result) : /* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("div", { "data-ai-test-result": true, "data-ai-status": result.error ? "error" : "success", children: [
|
|
1224
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("div", { "data-ai-result-header": true, children: [
|
|
1225
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsx)("span", { "data-ai-result-model": true, children: result.model }),
|
|
1226
|
+
/* @__PURE__ */ (0, import_jsx_runtime4.jsxs)("span", { "data-ai-result-duration": true, children: [
|
|
1227
|
+
result.durationMs,
|
|
1228
|
+
"ms"
|
|
1229
|
+
] }),
|
|
1230
|
+
result.error && /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("span", { "data-ai-result-error": true, children: result.error })
|
|
1231
|
+
] }),
|
|
1232
|
+
!result.error && /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("div", { "data-ai-result-content": true, children: result.type === "image" ? /* @__PURE__ */ (0, import_jsx_runtime4.jsx)(
|
|
1233
|
+
"img",
|
|
1234
|
+
{
|
|
1235
|
+
src: result.result.startsWith("http") ? result.result : `data:image/png;base64,${result.result}`,
|
|
1236
|
+
alt: "Generated",
|
|
1237
|
+
"data-ai-result-image": true
|
|
1238
|
+
}
|
|
1239
|
+
) : /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("pre", { "data-ai-result-text": true, children: result.result }) })
|
|
1240
|
+
] }, i)) })
|
|
1241
|
+
] });
|
|
1242
|
+
}
|
|
1243
|
+
|
|
1244
|
+
// src/components/GenerationCard.tsx
|
|
1245
|
+
var import_jsx_runtime5 = require("react/jsx-runtime");
|
|
1246
|
+
function GenerationCard(props) {
|
|
1247
|
+
return /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("div", { className: props.className, "data-ai-component": "generation-card", "data-ai-type": props.type, children: [
|
|
1248
|
+
/* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("div", { "data-ai-card-header": true, children: [
|
|
1249
|
+
/* @__PURE__ */ (0, import_jsx_runtime5.jsx)("span", { "data-ai-card-type": true, children: props.type }),
|
|
1250
|
+
props.model && /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("span", { "data-ai-card-model": true, children: props.model }),
|
|
1251
|
+
props.durationMs != null && /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("span", { "data-ai-card-duration": true, children: [
|
|
1252
|
+
props.durationMs,
|
|
1253
|
+
"ms"
|
|
1254
|
+
] })
|
|
1255
|
+
] }),
|
|
1256
|
+
props.error ? /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("div", { "data-ai-card-error": true, role: "alert", children: props.error }) : /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("div", { "data-ai-card-content": true, children: props.type === "image" ? /* @__PURE__ */ (0, import_jsx_runtime5.jsx)(
|
|
1257
|
+
"img",
|
|
1258
|
+
{
|
|
1259
|
+
src: props.content.startsWith("http") ? props.content : `data:image/png;base64,${props.content}`,
|
|
1260
|
+
alt: "AI Generated",
|
|
1261
|
+
"data-ai-card-image": true
|
|
1262
|
+
}
|
|
1263
|
+
) : props.type === "audio" || props.type === "speech" ? /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("audio", { controls: true, "data-ai-card-audio": true, children: /* @__PURE__ */ (0, import_jsx_runtime5.jsx)(
|
|
1264
|
+
"source",
|
|
1265
|
+
{
|
|
1266
|
+
src: props.content.startsWith("http") ? props.content : `data:audio/mp3;base64,${props.content}`,
|
|
1267
|
+
type: "audio/mp3"
|
|
1268
|
+
}
|
|
1269
|
+
) }) : props.type === "video" ? /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("video", { controls: true, "data-ai-card-video": true, children: /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("source", { src: props.content, type: "video/mp4" }) }) : /* @__PURE__ */ (0, import_jsx_runtime5.jsx)("pre", { "data-ai-card-text": true, children: props.content }) }),
|
|
1270
|
+
(props.tokens || props.cost != null) && /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("div", { "data-ai-card-footer": true, children: [
|
|
1271
|
+
props.tokens && /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("span", { "data-ai-card-tokens": true, children: [
|
|
1272
|
+
props.tokens,
|
|
1273
|
+
" tokens"
|
|
1274
|
+
] }),
|
|
1275
|
+
props.cost != null && /* @__PURE__ */ (0, import_jsx_runtime5.jsxs)("span", { "data-ai-card-cost": true, children: [
|
|
1276
|
+
"$",
|
|
1277
|
+
props.cost < 0.01 ? props.cost.toFixed(4) : props.cost.toFixed(2)
|
|
1278
|
+
] })
|
|
1279
|
+
] })
|
|
1280
|
+
] });
|
|
1281
|
+
}
|
|
1282
|
+
|
|
1283
|
+
// src/components/ImageGenerator.tsx
|
|
1284
|
+
var import_react24 = require("react");
|
|
1285
|
+
var import_jsx_runtime6 = require("react/jsx-runtime");
|
|
1286
|
+
function ImageGenerator(props) {
|
|
1287
|
+
const [prompt, setPrompt] = (0, import_react24.useState)("");
|
|
1288
|
+
const [selectedModel, setSelectedModel] = (0, import_react24.useState)(props.defaultModel ?? "");
|
|
1289
|
+
const [size, setSize] = (0, import_react24.useState)("1024x1024");
|
|
1290
|
+
const [quality, setQuality] = (0, import_react24.useState)("standard");
|
|
1291
|
+
const { generate, images, isGenerating, error, clearImages, clearError } = useImageGeneration(props);
|
|
1292
|
+
const handleSubmit = async (e) => {
|
|
1293
|
+
e.preventDefault();
|
|
1294
|
+
if (!prompt.trim() || isGenerating) return;
|
|
1295
|
+
await generate(prompt.trim(), { model: selectedModel || void 0, size, quality });
|
|
1296
|
+
};
|
|
1297
|
+
return /* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("div", { className: props.className, "data-ai-component": "image-generator", children: [
|
|
1298
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("form", { onSubmit: handleSubmit, "data-ai-image-form": true, children: [
|
|
1299
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("div", { "data-ai-field": "prompt", children: /* @__PURE__ */ (0, import_jsx_runtime6.jsx)(
|
|
1300
|
+
"textarea",
|
|
1301
|
+
{
|
|
1302
|
+
value: prompt,
|
|
1303
|
+
onChange: (e) => setPrompt(e.target.value),
|
|
1304
|
+
placeholder: "Describe the image you want to create\u2026",
|
|
1305
|
+
disabled: isGenerating,
|
|
1306
|
+
"data-ai-input": true
|
|
1307
|
+
}
|
|
1308
|
+
) }),
|
|
1309
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("div", { "data-ai-image-options": true, children: [
|
|
1310
|
+
props.availableModels && /* @__PURE__ */ (0, import_jsx_runtime6.jsxs)(
|
|
1311
|
+
"select",
|
|
1312
|
+
{
|
|
1313
|
+
value: selectedModel,
|
|
1314
|
+
onChange: (e) => setSelectedModel(e.target.value),
|
|
1315
|
+
disabled: isGenerating,
|
|
1316
|
+
"data-ai-select": true,
|
|
1317
|
+
children: [
|
|
1318
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: "", children: "Default model" }),
|
|
1319
|
+
props.availableModels.map((m) => /* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: m, children: m }, m))
|
|
1320
|
+
]
|
|
1321
|
+
}
|
|
1322
|
+
),
|
|
1323
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("select", { value: size, onChange: (e) => setSize(e.target.value), disabled: isGenerating, "data-ai-select": true, children: [
|
|
1324
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: "1024x1024", children: "1024\xD71024" }),
|
|
1325
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: "1792x1024", children: "1792\xD71024" }),
|
|
1326
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: "1024x1792", children: "1024\xD71792" }),
|
|
1327
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: "512x512", children: "512\xD7512" })
|
|
1328
|
+
] }),
|
|
1329
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("select", { value: quality, onChange: (e) => setQuality(e.target.value), disabled: isGenerating, "data-ai-select": true, children: [
|
|
1330
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: "standard", children: "Standard" }),
|
|
1331
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("option", { value: "hd", children: "HD" })
|
|
1332
|
+
] })
|
|
1333
|
+
] }),
|
|
1334
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("div", { "data-ai-actions": true, children: [
|
|
1335
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("button", { type: "submit", disabled: isGenerating || !prompt.trim(), "data-ai-submit": true, children: isGenerating ? "Generating\u2026" : "Generate Image" }),
|
|
1336
|
+
images.length > 0 && /* @__PURE__ */ (0, import_jsx_runtime6.jsx)("button", { type: "button", onClick: clearImages, "data-ai-clear": true, children: "Clear Gallery" })
|
|
1337
|
+
] })
|
|
1338
|
+
] }),
|
|
1339
|
+
error && /* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("div", { "data-ai-error": true, role: "alert", children: [
|
|
1340
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("span", { children: error }),
|
|
1341
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("button", { onClick: clearError, children: "\xD7" })
|
|
1342
|
+
] }),
|
|
1343
|
+
images.length > 0 && /* @__PURE__ */ (0, import_jsx_runtime6.jsx)("div", { "data-ai-image-gallery": true, children: images.map((img, i) => /* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("div", { "data-ai-image-card": true, children: [
|
|
1344
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("img", { src: img.url, alt: img.prompt, "data-ai-generated-image": true }),
|
|
1345
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("div", { "data-ai-image-meta": true, children: [
|
|
1346
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("span", { "data-ai-image-model": true, children: img.model }),
|
|
1347
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)("span", { "data-ai-image-time": true, children: new Date(img.timestamp).toLocaleTimeString() })
|
|
1348
|
+
] })
|
|
1349
|
+
] }, i)) })
|
|
1350
|
+
] });
|
|
1351
|
+
}
|
|
1352
|
+
|
|
1353
|
+
// src/components/VoiceSelector.tsx
|
|
1354
|
+
var import_jsx_runtime7 = require("react/jsx-runtime");
|
|
1355
|
+
function VoiceSelector(props) {
|
|
1356
|
+
return /* @__PURE__ */ (0, import_jsx_runtime7.jsx)("div", { className: props.className, "data-ai-component": "voice-selector", children: props.voices.map((voice) => /* @__PURE__ */ (0, import_jsx_runtime7.jsxs)(
|
|
1357
|
+
"button",
|
|
1358
|
+
{
|
|
1359
|
+
onClick: () => props.onSelect(voice.id),
|
|
1360
|
+
disabled: props.disabled,
|
|
1361
|
+
"data-ai-voice": true,
|
|
1362
|
+
"data-ai-voice-selected": props.selectedVoice === voice.id ? "" : void 0,
|
|
1363
|
+
"data-ai-provider": voice.provider,
|
|
1364
|
+
children: [
|
|
1365
|
+
/* @__PURE__ */ (0, import_jsx_runtime7.jsx)("span", { "data-ai-voice-name": true, children: voice.name }),
|
|
1366
|
+
voice.gender && /* @__PURE__ */ (0, import_jsx_runtime7.jsx)("span", { "data-ai-voice-gender": true, children: voice.gender }),
|
|
1367
|
+
voice.language && /* @__PURE__ */ (0, import_jsx_runtime7.jsx)("span", { "data-ai-voice-language": true, children: voice.language }),
|
|
1368
|
+
props.showProvider && /* @__PURE__ */ (0, import_jsx_runtime7.jsx)("span", { "data-ai-voice-provider": true, children: voice.provider }),
|
|
1369
|
+
props.showPreview && voice.preview && /* @__PURE__ */ (0, import_jsx_runtime7.jsx)(
|
|
1370
|
+
"button",
|
|
1371
|
+
{
|
|
1372
|
+
"data-ai-voice-preview": true,
|
|
1373
|
+
onClick: (e) => {
|
|
1374
|
+
e.stopPropagation();
|
|
1375
|
+
props.onPreview?.(voice.id);
|
|
1376
|
+
},
|
|
1377
|
+
children: "\u25B6"
|
|
1378
|
+
}
|
|
1379
|
+
)
|
|
1380
|
+
]
|
|
1381
|
+
},
|
|
1382
|
+
voice.id
|
|
1383
|
+
)) });
|
|
1384
|
+
}
|
|
1385
|
+
|
|
1386
|
+
// src/pages/ChatPage.tsx
|
|
1387
|
+
var import_react25 = require("react");
|
|
1388
|
+
var import_jsx_runtime8 = require("react/jsx-runtime");
|
|
1389
|
+
function ChatPage(props) {
|
|
1390
|
+
const [activeConversationId, setActiveConversationId] = (0, import_react25.useState)(props.conversationId);
|
|
1391
|
+
return /* @__PURE__ */ (0, import_jsx_runtime8.jsxs)("div", { className: props.className, "data-ai-page": "chat", children: [
|
|
1392
|
+
/* @__PURE__ */ (0, import_jsx_runtime8.jsxs)("div", { "data-ai-page-header": true, children: [
|
|
1393
|
+
/* @__PURE__ */ (0, import_jsx_runtime8.jsx)("h1", { "data-ai-page-title": true, children: "AI Chat" }),
|
|
1394
|
+
/* @__PURE__ */ (0, import_jsx_runtime8.jsx)(
|
|
1395
|
+
"button",
|
|
1396
|
+
{
|
|
1397
|
+
onClick: () => setActiveConversationId(void 0),
|
|
1398
|
+
"data-ai-new-chat": true,
|
|
1399
|
+
children: "New Chat"
|
|
1400
|
+
}
|
|
1401
|
+
)
|
|
1402
|
+
] }),
|
|
1403
|
+
/* @__PURE__ */ (0, import_jsx_runtime8.jsx)(
|
|
1404
|
+
ChatWindow,
|
|
1405
|
+
{
|
|
1406
|
+
...props,
|
|
1407
|
+
conversationId: activeConversationId,
|
|
1408
|
+
onNewConversation: (id) => {
|
|
1409
|
+
setActiveConversationId(id);
|
|
1410
|
+
props.onNewConversation?.(id);
|
|
1411
|
+
}
|
|
1412
|
+
}
|
|
1413
|
+
)
|
|
1414
|
+
] });
|
|
1415
|
+
}
|
|
1416
|
+
|
|
1417
|
+
// src/pages/ModelTestPage.tsx
|
|
1418
|
+
var import_ai_shared2 = require("@geenius/ai-shared");
|
|
1419
|
+
var import_jsx_runtime9 = require("react/jsx-runtime");
|
|
1420
|
+
function ModelTestPage(props) {
|
|
1421
|
+
const models = props.availableModels ?? import_ai_shared2.DEFAULT_MODELS.map((m) => m.id);
|
|
1422
|
+
return /* @__PURE__ */ (0, import_jsx_runtime9.jsxs)("div", { className: props.className, "data-ai-page": "model-test", children: [
|
|
1423
|
+
/* @__PURE__ */ (0, import_jsx_runtime9.jsxs)("div", { "data-ai-page-header": true, children: [
|
|
1424
|
+
/* @__PURE__ */ (0, import_jsx_runtime9.jsx)("h1", { "data-ai-page-title": true, children: props.title ?? "Model Test Lab" }),
|
|
1425
|
+
/* @__PURE__ */ (0, import_jsx_runtime9.jsx)("p", { "data-ai-page-subtitle": true, children: "Test AI models individually or compare them side-by-side" })
|
|
1426
|
+
] }),
|
|
1427
|
+
/* @__PURE__ */ (0, import_jsx_runtime9.jsx)(
|
|
1428
|
+
ModelTestRunner,
|
|
1429
|
+
{
|
|
1430
|
+
...props,
|
|
1431
|
+
availableModels: models
|
|
1432
|
+
}
|
|
1433
|
+
)
|
|
1434
|
+
] });
|
|
1435
|
+
}
|
|
1436
|
+
|
|
1437
|
+
// src/pages/AILogsPage.tsx
|
|
1438
|
+
var import_react26 = require("react");
|
|
1439
|
+
var import_jsx_runtime10 = require("react/jsx-runtime");
|
|
1440
|
+
function AILogsPage(props) {
|
|
1441
|
+
const [modelFilter, setModelFilter] = (0, import_react26.useState)("");
|
|
1442
|
+
const [statusFilter, setStatusFilter] = (0, import_react26.useState)("");
|
|
1443
|
+
const { logs, isLoading } = useAILogs({
|
|
1444
|
+
listLogsQuery: props.listLogsQuery,
|
|
1445
|
+
filters: {
|
|
1446
|
+
model: modelFilter || void 0,
|
|
1447
|
+
status: statusFilter || void 0
|
|
1448
|
+
},
|
|
1449
|
+
limit: 100
|
|
1450
|
+
});
|
|
1451
|
+
const totalCalls = logs.length;
|
|
1452
|
+
const successCalls = logs.filter((l) => l.status === "success").length;
|
|
1453
|
+
const errorCalls = totalCalls - successCalls;
|
|
1454
|
+
const totalCost = logs.reduce((sum, l) => sum + (l.totalCostUsd ?? 0), 0);
|
|
1455
|
+
const totalTokens = logs.reduce((sum, l) => sum + (l.totalTokens ?? 0), 0);
|
|
1456
|
+
return /* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { className: props.className, "data-ai-page": "logs", children: [
|
|
1457
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("div", { "data-ai-page-header": true, children: /* @__PURE__ */ (0, import_jsx_runtime10.jsx)("h1", { "data-ai-page-title": true, children: props.title ?? "AI Logs" }) }),
|
|
1458
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { "data-ai-stats-grid": true, children: [
|
|
1459
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { "data-ai-stat": true, children: [
|
|
1460
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-label": true, children: "Total Calls" }),
|
|
1461
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-value": true, children: totalCalls })
|
|
1462
|
+
] }),
|
|
1463
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { "data-ai-stat": true, "data-ai-status": "success", children: [
|
|
1464
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-label": true, children: "Success" }),
|
|
1465
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-value": true, children: successCalls })
|
|
1466
|
+
] }),
|
|
1467
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { "data-ai-stat": true, "data-ai-status": "error", children: [
|
|
1468
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-label": true, children: "Errors" }),
|
|
1469
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-value": true, children: errorCalls })
|
|
1470
|
+
] }),
|
|
1471
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { "data-ai-stat": true, children: [
|
|
1472
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-label": true, children: "Total Cost" }),
|
|
1473
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("span", { "data-ai-stat-value": true, children: [
|
|
1474
|
+
"$",
|
|
1475
|
+
totalCost.toFixed(2)
|
|
1476
|
+
] })
|
|
1477
|
+
] }),
|
|
1478
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { "data-ai-stat": true, children: [
|
|
1479
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-label": true, children: "Total Tokens" }),
|
|
1480
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("span", { "data-ai-stat-value": true, children: totalTokens.toLocaleString() })
|
|
1481
|
+
] })
|
|
1482
|
+
] }),
|
|
1483
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)("div", { "data-ai-filters": true, children: [
|
|
1484
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)(
|
|
1485
|
+
"input",
|
|
1486
|
+
{
|
|
1487
|
+
type: "text",
|
|
1488
|
+
placeholder: "Filter by model\u2026",
|
|
1489
|
+
value: modelFilter,
|
|
1490
|
+
onChange: (e) => setModelFilter(e.target.value),
|
|
1491
|
+
"data-ai-filter-input": true
|
|
1492
|
+
}
|
|
1493
|
+
),
|
|
1494
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsxs)(
|
|
1495
|
+
"select",
|
|
1496
|
+
{
|
|
1497
|
+
value: statusFilter,
|
|
1498
|
+
onChange: (e) => setStatusFilter(e.target.value),
|
|
1499
|
+
"data-ai-filter-select": true,
|
|
1500
|
+
children: [
|
|
1501
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("option", { value: "", children: "All statuses" }),
|
|
1502
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("option", { value: "success", children: "Success" }),
|
|
1503
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)("option", { value: "error", children: "Error" })
|
|
1504
|
+
]
|
|
1505
|
+
}
|
|
1506
|
+
)
|
|
1507
|
+
] }),
|
|
1508
|
+
/* @__PURE__ */ (0, import_jsx_runtime10.jsx)(
|
|
1509
|
+
AILogTable,
|
|
1510
|
+
{
|
|
1511
|
+
logs,
|
|
1512
|
+
isLoading,
|
|
1513
|
+
onRowClick: props.onRowClick
|
|
1514
|
+
}
|
|
1515
|
+
)
|
|
1516
|
+
] });
|
|
1517
|
+
}
|
|
1518
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1519
|
+
0 && (module.exports = {
|
|
1520
|
+
AILogTable,
|
|
1521
|
+
AILogsPage,
|
|
1522
|
+
ChatPage,
|
|
1523
|
+
ChatWindow,
|
|
1524
|
+
GenerationCard,
|
|
1525
|
+
ImageGenerator,
|
|
1526
|
+
ModelSelector,
|
|
1527
|
+
ModelTestPage,
|
|
1528
|
+
ModelTestRunner,
|
|
1529
|
+
VoiceSelector,
|
|
1530
|
+
useAI,
|
|
1531
|
+
useAILogs,
|
|
1532
|
+
useAIModels,
|
|
1533
|
+
useChat,
|
|
1534
|
+
useContentManager,
|
|
1535
|
+
useImageGeneration,
|
|
1536
|
+
useMemory,
|
|
1537
|
+
useModelTest,
|
|
1538
|
+
useRealtimeAudio,
|
|
1539
|
+
useSkills,
|
|
1540
|
+
useTextToSpeech,
|
|
1541
|
+
useTranscription,
|
|
1542
|
+
useVideoGeneration
|
|
1543
|
+
});
|
|
1544
|
+
//# sourceMappingURL=index.cjs.map
|