@geenius/ai 0.1.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (177) hide show
  1. package/package.json +15 -2
  2. package/packages/convex/dist/index.d.ts +1 -0
  3. package/packages/convex/dist/index.js +42 -0
  4. package/packages/convex/dist/index.js.map +1 -0
  5. package/packages/react/README.md +1 -1
  6. package/packages/react-css/README.md +1 -1
  7. package/packages/react-css/dist/index.cjs +1544 -0
  8. package/packages/react-css/dist/index.cjs.map +1 -0
  9. package/packages/react-css/dist/index.d.cts +454 -0
  10. package/packages/react-css/dist/index.d.ts +454 -0
  11. package/packages/react-css/dist/index.js +1495 -0
  12. package/packages/react-css/dist/index.js.map +1 -0
  13. package/packages/shared/README.md +1 -1
  14. package/packages/solidjs/README.md +1 -1
  15. package/packages/solidjs-css/README.md +1 -1
  16. package/packages/solidjs-css/dist/index.cjs +674 -0
  17. package/packages/solidjs-css/dist/index.cjs.map +1 -0
  18. package/packages/solidjs-css/dist/index.d.cts +254 -0
  19. package/packages/solidjs-css/dist/index.d.ts +254 -0
  20. package/packages/solidjs-css/dist/index.js +634 -0
  21. package/packages/solidjs-css/dist/index.js.map +1 -0
  22. package/.changeset/config.json +0 -11
  23. package/.env.example +0 -2
  24. package/.github/CODEOWNERS +0 -1
  25. package/.github/ISSUE_TEMPLATE/bug_report.md +0 -16
  26. package/.github/ISSUE_TEMPLATE/feature_request.md +0 -11
  27. package/.github/PULL_REQUEST_TEMPLATE.md +0 -10
  28. package/.github/dependabot.yml +0 -11
  29. package/.github/workflows/ci.yml +0 -23
  30. package/.github/workflows/release.yml +0 -29
  31. package/.node-version +0 -1
  32. package/.nvmrc +0 -1
  33. package/.prettierrc +0 -7
  34. package/.project/ACCOUNT.yaml +0 -4
  35. package/.project/IDEAS.yaml +0 -7
  36. package/.project/PROJECT.yaml +0 -11
  37. package/.project/ROADMAP.yaml +0 -15
  38. package/CODE_OF_CONDUCT.md +0 -26
  39. package/CONTRIBUTING.md +0 -61
  40. package/SECURITY.md +0 -18
  41. package/SUPPORT.md +0 -14
  42. package/packages/convex/package.json +0 -42
  43. package/packages/convex/src/index.ts +0 -8
  44. package/packages/convex/src/mutations/messages.ts +0 -29
  45. package/packages/convex/src/queries/messages.ts +0 -24
  46. package/packages/convex/src/schema.ts +0 -20
  47. package/packages/convex/tsconfig.json +0 -11
  48. package/packages/convex/tsup.config.ts +0 -17
  49. package/packages/react/package.json +0 -60
  50. package/packages/react/src/components/AILogTable.tsx +0 -90
  51. package/packages/react/src/components/ChatWindow.tsx +0 -118
  52. package/packages/react/src/components/GenerationCard.tsx +0 -73
  53. package/packages/react/src/components/ImageGenerator.tsx +0 -103
  54. package/packages/react/src/components/ModelSelector.tsx +0 -44
  55. package/packages/react/src/components/ModelTestRunner.tsx +0 -148
  56. package/packages/react/src/components/VoiceSelector.tsx +0 -51
  57. package/packages/react/src/components/index.ts +0 -9
  58. package/packages/react/src/hooks/index.ts +0 -12
  59. package/packages/react/src/hooks/useAI.ts +0 -158
  60. package/packages/react/src/hooks/useAILogs.ts +0 -40
  61. package/packages/react/src/hooks/useAIModels.ts +0 -53
  62. package/packages/react/src/hooks/useChat.ts +0 -141
  63. package/packages/react/src/hooks/useContentManager.ts +0 -108
  64. package/packages/react/src/hooks/useImageGeneration.ts +0 -82
  65. package/packages/react/src/hooks/useMemory.ts +0 -161
  66. package/packages/react/src/hooks/useModelTest.ts +0 -126
  67. package/packages/react/src/hooks/useRealtimeAudio.ts +0 -203
  68. package/packages/react/src/hooks/useSkills.ts +0 -114
  69. package/packages/react/src/hooks/useTextToSpeech.ts +0 -99
  70. package/packages/react/src/hooks/useTranscription.ts +0 -119
  71. package/packages/react/src/hooks/useVideoGeneration.ts +0 -79
  72. package/packages/react/src/index.ts +0 -42
  73. package/packages/react/src/pages/AILogsPage.tsx +0 -98
  74. package/packages/react/src/pages/ChatPage.tsx +0 -42
  75. package/packages/react/src/pages/ModelTestPage.tsx +0 -33
  76. package/packages/react/src/pages/index.ts +0 -5
  77. package/packages/react/tsconfig.json +0 -26
  78. package/packages/react/tsup.config.ts +0 -22
  79. package/packages/react-css/package.json +0 -45
  80. package/packages/react-css/src/ai.css +0 -857
  81. package/packages/react-css/src/components/AILogTable.tsx +0 -90
  82. package/packages/react-css/src/components/ChatWindow.tsx +0 -118
  83. package/packages/react-css/src/components/GenerationCard.tsx +0 -73
  84. package/packages/react-css/src/components/ImageGenerator.tsx +0 -103
  85. package/packages/react-css/src/components/ModelSelector.tsx +0 -44
  86. package/packages/react-css/src/components/ModelTestRunner.tsx +0 -148
  87. package/packages/react-css/src/components/VoiceSelector.tsx +0 -51
  88. package/packages/react-css/src/components/index.ts +0 -9
  89. package/packages/react-css/src/hooks/index.ts +0 -12
  90. package/packages/react-css/src/hooks/useAI.ts +0 -153
  91. package/packages/react-css/src/hooks/useAILogs.ts +0 -40
  92. package/packages/react-css/src/hooks/useAIModels.ts +0 -51
  93. package/packages/react-css/src/hooks/useChat.ts +0 -145
  94. package/packages/react-css/src/hooks/useContentManager.ts +0 -108
  95. package/packages/react-css/src/hooks/useImageGeneration.ts +0 -82
  96. package/packages/react-css/src/hooks/useMemory.ts +0 -161
  97. package/packages/react-css/src/hooks/useModelTest.ts +0 -122
  98. package/packages/react-css/src/hooks/useRealtimeAudio.ts +0 -203
  99. package/packages/react-css/src/hooks/useSkills.ts +0 -114
  100. package/packages/react-css/src/hooks/useTextToSpeech.ts +0 -99
  101. package/packages/react-css/src/hooks/useTranscription.ts +0 -119
  102. package/packages/react-css/src/hooks/useVideoGeneration.ts +0 -79
  103. package/packages/react-css/src/index.ts +0 -35
  104. package/packages/react-css/src/pages/AILogsPage.tsx +0 -98
  105. package/packages/react-css/src/pages/ChatPage.tsx +0 -42
  106. package/packages/react-css/src/pages/ModelTestPage.tsx +0 -33
  107. package/packages/react-css/src/pages/index.ts +0 -5
  108. package/packages/react-css/src/styles.css +0 -127
  109. package/packages/react-css/tsconfig.json +0 -26
  110. package/packages/react-css/tsup.config.ts +0 -2
  111. package/packages/shared/package.json +0 -71
  112. package/packages/shared/src/__tests__/ai.test.ts +0 -67
  113. package/packages/shared/src/ai-client.ts +0 -243
  114. package/packages/shared/src/config.ts +0 -235
  115. package/packages/shared/src/content.ts +0 -249
  116. package/packages/shared/src/convex/helpers.ts +0 -163
  117. package/packages/shared/src/convex/index.ts +0 -16
  118. package/packages/shared/src/convex/schemas.ts +0 -146
  119. package/packages/shared/src/convex/validators.ts +0 -136
  120. package/packages/shared/src/index.ts +0 -107
  121. package/packages/shared/src/memory.ts +0 -197
  122. package/packages/shared/src/providers/base.ts +0 -103
  123. package/packages/shared/src/providers/elevenlabs.ts +0 -155
  124. package/packages/shared/src/providers/index.ts +0 -28
  125. package/packages/shared/src/providers/openai-compatible.ts +0 -286
  126. package/packages/shared/src/providers/registry.ts +0 -113
  127. package/packages/shared/src/providers/replicate-fal.ts +0 -230
  128. package/packages/shared/src/skills.ts +0 -273
  129. package/packages/shared/src/types.ts +0 -501
  130. package/packages/shared/tsconfig.json +0 -25
  131. package/packages/shared/tsup.config.ts +0 -22
  132. package/packages/shared/vitest.config.ts +0 -4
  133. package/packages/solidjs/package.json +0 -59
  134. package/packages/solidjs/src/components/ChatWindow.tsx +0 -78
  135. package/packages/solidjs/src/components/GenerationCard.tsx +0 -62
  136. package/packages/solidjs/src/components/ModelTestRunner.tsx +0 -119
  137. package/packages/solidjs/src/components/index.ts +0 -5
  138. package/packages/solidjs/src/index.ts +0 -32
  139. package/packages/solidjs/src/pages/ChatPage.tsx +0 -22
  140. package/packages/solidjs/src/pages/ModelTestPage.tsx +0 -22
  141. package/packages/solidjs/src/pages/index.ts +0 -4
  142. package/packages/solidjs/src/primitives/createAI.ts +0 -79
  143. package/packages/solidjs/src/primitives/createChat.ts +0 -100
  144. package/packages/solidjs/src/primitives/createContentManager.ts +0 -61
  145. package/packages/solidjs/src/primitives/createImageGeneration.ts +0 -46
  146. package/packages/solidjs/src/primitives/createMemory.ts +0 -127
  147. package/packages/solidjs/src/primitives/createModelTest.ts +0 -89
  148. package/packages/solidjs/src/primitives/createSkills.ts +0 -83
  149. package/packages/solidjs/src/primitives/createTextToSpeech.ts +0 -56
  150. package/packages/solidjs/src/primitives/createVideoGeneration.ts +0 -46
  151. package/packages/solidjs/src/primitives/index.ts +0 -8
  152. package/packages/solidjs/tsconfig.json +0 -27
  153. package/packages/solidjs/tsup.config.ts +0 -21
  154. package/packages/solidjs-css/package.json +0 -44
  155. package/packages/solidjs-css/src/ai.css +0 -857
  156. package/packages/solidjs-css/src/components/ChatWindow.tsx +0 -78
  157. package/packages/solidjs-css/src/components/GenerationCard.tsx +0 -62
  158. package/packages/solidjs-css/src/components/ModelTestRunner.tsx +0 -119
  159. package/packages/solidjs-css/src/components/index.ts +0 -5
  160. package/packages/solidjs-css/src/index.ts +0 -26
  161. package/packages/solidjs-css/src/pages/ChatPage.tsx +0 -22
  162. package/packages/solidjs-css/src/pages/ModelTestPage.tsx +0 -22
  163. package/packages/solidjs-css/src/pages/index.ts +0 -4
  164. package/packages/solidjs-css/src/primitives/createAI.ts +0 -79
  165. package/packages/solidjs-css/src/primitives/createChat.ts +0 -100
  166. package/packages/solidjs-css/src/primitives/createContentManager.ts +0 -61
  167. package/packages/solidjs-css/src/primitives/createImageGeneration.ts +0 -46
  168. package/packages/solidjs-css/src/primitives/createMemory.ts +0 -127
  169. package/packages/solidjs-css/src/primitives/createModelTest.ts +0 -89
  170. package/packages/solidjs-css/src/primitives/createSkills.ts +0 -83
  171. package/packages/solidjs-css/src/primitives/createTextToSpeech.ts +0 -56
  172. package/packages/solidjs-css/src/primitives/createVideoGeneration.ts +0 -46
  173. package/packages/solidjs-css/src/primitives/index.ts +0 -1
  174. package/packages/solidjs-css/src/styles.css +0 -127
  175. package/packages/solidjs-css/tsconfig.json +0 -27
  176. package/packages/solidjs-css/tsup.config.ts +0 -2
  177. package/pnpm-workspace.yaml +0 -2
@@ -0,0 +1,1495 @@
1
+ // src/hooks/useAI.ts
2
+ import { useState, useCallback } from "react";
3
+ import { useAction } from "convex/react";
4
+ function useAI(options = {}) {
5
+ const [isLoading, setIsLoading] = useState(false);
6
+ const [error, setError] = useState(null);
7
+ const [lastResult, setLastResult] = useState(null);
8
+ const [lastType, setLastType] = useState(null);
9
+ const textAction = options.generateTextAction ? useAction(options.generateTextAction) : null;
10
+ const imageAction = options.generateImageAction ? useAction(options.generateImageAction) : null;
11
+ const audioAction = options.generateAudioAction ? useAction(options.generateAudioAction) : null;
12
+ const transcribeAction = options.transcribeAudioAction ? useAction(options.transcribeAudioAction) : null;
13
+ const videoAction = options.generateVideoAction ? useAction(options.generateVideoAction) : null;
14
+ const generateText = useCallback(async (args) => {
15
+ if (!textAction) throw new Error("generateTextAction not provided");
16
+ setIsLoading(true);
17
+ setError(null);
18
+ try {
19
+ const result = await textAction(args);
20
+ setLastResult(result);
21
+ setLastType("text");
22
+ return result;
23
+ } catch (err) {
24
+ const msg = err instanceof Error ? err.message : "Text generation failed";
25
+ setError(msg);
26
+ throw err;
27
+ } finally {
28
+ setIsLoading(false);
29
+ }
30
+ }, [textAction]);
31
+ const generateImage = useCallback(async (prompt, model) => {
32
+ if (!imageAction) throw new Error("generateImageAction not provided");
33
+ setIsLoading(true);
34
+ setError(null);
35
+ try {
36
+ const result = await imageAction({ prompt, model });
37
+ setLastResult(result);
38
+ setLastType("image");
39
+ return result;
40
+ } catch (err) {
41
+ const msg = err instanceof Error ? err.message : "Image generation failed";
42
+ setError(msg);
43
+ throw err;
44
+ } finally {
45
+ setIsLoading(false);
46
+ }
47
+ }, [imageAction]);
48
+ const generateAudio = useCallback(async (prompt, voice) => {
49
+ if (!audioAction) throw new Error("generateAudioAction not provided");
50
+ setIsLoading(true);
51
+ setError(null);
52
+ try {
53
+ const result = await audioAction({ prompt, voice });
54
+ setLastResult(result);
55
+ setLastType("audio");
56
+ return result;
57
+ } catch (err) {
58
+ const msg = err instanceof Error ? err.message : "Audio generation failed";
59
+ setError(msg);
60
+ throw err;
61
+ } finally {
62
+ setIsLoading(false);
63
+ }
64
+ }, [audioAction]);
65
+ const transcribeAudio = useCallback(async (audio) => {
66
+ if (!transcribeAction) throw new Error("transcribeAudioAction not provided");
67
+ setIsLoading(true);
68
+ setError(null);
69
+ try {
70
+ const result = await transcribeAction({ audio });
71
+ setLastResult(result);
72
+ setLastType("transcription");
73
+ return result;
74
+ } catch (err) {
75
+ const msg = err instanceof Error ? err.message : "Transcription failed";
76
+ setError(msg);
77
+ throw err;
78
+ } finally {
79
+ setIsLoading(false);
80
+ }
81
+ }, [transcribeAction]);
82
+ const generateVideo = useCallback(async (prompt) => {
83
+ if (!videoAction) throw new Error("generateVideoAction not provided");
84
+ setIsLoading(true);
85
+ setError(null);
86
+ try {
87
+ const result = await videoAction({ prompt });
88
+ setLastResult(result);
89
+ setLastType("video");
90
+ return result;
91
+ } catch (err) {
92
+ const msg = err instanceof Error ? err.message : "Video generation failed";
93
+ setError(msg);
94
+ throw err;
95
+ } finally {
96
+ setIsLoading(false);
97
+ }
98
+ }, [videoAction]);
99
+ const clearError = useCallback(() => setError(null), []);
100
+ return {
101
+ generateText,
102
+ generateImage,
103
+ generateAudio,
104
+ transcribeAudio,
105
+ generateVideo,
106
+ isLoading,
107
+ error,
108
+ lastResult,
109
+ lastType,
110
+ clearError
111
+ };
112
+ }
113
+
114
+ // src/hooks/useChat.ts
115
+ import { useState as useState2, useCallback as useCallback2 } from "react";
116
+ import { useQuery, useMutation, useAction as useAction2 } from "convex/react";
117
+ function useChat(options = {}) {
118
+ const [localMessages, setLocalMessages] = useState2([]);
119
+ const [isSending, setIsSending] = useState2(false);
120
+ const [error, setError] = useState2(null);
121
+ const [conversationId, setConversationId] = useState2(options.conversationId ?? null);
122
+ const convexMessages = options.listMessagesQuery && conversationId ? useQuery(options.listMessagesQuery, { conversationId }) : void 0;
123
+ const sendMutation = options.sendMessageMutation ? useMutation(options.sendMessageMutation) : null;
124
+ const createConversation = options.createConversationMutation ? useMutation(options.createConversationMutation) : null;
125
+ const textAction = options.generateTextAction ? useAction2(options.generateTextAction) : null;
126
+ const messages = convexMessages ?? localMessages;
127
+ const sendMessage = useCallback2(async (content) => {
128
+ setIsSending(true);
129
+ setError(null);
130
+ try {
131
+ let activeConversationId = conversationId;
132
+ if (!activeConversationId && createConversation) {
133
+ activeConversationId = await createConversation({
134
+ title: content.substring(0, 100),
135
+ model: options.model ?? "gpt-4o",
136
+ systemPrompt: options.systemPrompt
137
+ });
138
+ setConversationId(activeConversationId);
139
+ if (activeConversationId) options.onNewConversation?.(activeConversationId);
140
+ }
141
+ if (sendMutation && activeConversationId) {
142
+ await sendMutation({ conversationId: activeConversationId, content });
143
+ } else {
144
+ const userMsg = {
145
+ id: `msg-${Date.now()}`,
146
+ conversationId: activeConversationId ?? "local",
147
+ userId: "local",
148
+ role: "user",
149
+ content,
150
+ createdAt: Date.now()
151
+ };
152
+ setLocalMessages((prev) => [...prev, userMsg]);
153
+ }
154
+ if (textAction) {
155
+ const aiResponse = await textAction({
156
+ model: options.model ?? "gpt-4o",
157
+ messages: [
158
+ ...options.systemPrompt ? [{ role: "system", content: options.systemPrompt }] : [],
159
+ ...messages.map((m) => ({ role: m.role, content: m.content })),
160
+ { role: "user", content }
161
+ ],
162
+ caller: "chat"
163
+ });
164
+ if (!sendMutation) {
165
+ const assistantMsg = {
166
+ id: `msg-${Date.now()}-ai`,
167
+ conversationId: activeConversationId ?? "local",
168
+ userId: "ai",
169
+ role: "assistant",
170
+ content: aiResponse,
171
+ model: options.model ?? "gpt-4o",
172
+ createdAt: Date.now()
173
+ };
174
+ setLocalMessages((prev) => [...prev, assistantMsg]);
175
+ }
176
+ }
177
+ } catch (err) {
178
+ const msg = err instanceof Error ? err.message : "Failed to send message";
179
+ setError(msg);
180
+ } finally {
181
+ setIsSending(false);
182
+ }
183
+ }, [conversationId, sendMutation, createConversation, textAction, messages, options]);
184
+ const clearError = useCallback2(() => setError(null), []);
185
+ return {
186
+ messages,
187
+ isLoading: convexMessages === void 0 && !!options.listMessagesQuery,
188
+ isSending,
189
+ error,
190
+ sendMessage,
191
+ conversationId,
192
+ clearError
193
+ };
194
+ }
195
+
196
+ // src/hooks/useAILogs.ts
197
+ import { useQuery as useQuery2 } from "convex/react";
198
+ function useAILogs(options) {
199
+ const logs = useQuery2(options.listLogsQuery, {
200
+ model: options.filters?.model,
201
+ provider: options.filters?.provider,
202
+ status: options.filters?.status,
203
+ caller: options.filters?.caller,
204
+ limit: options.limit ?? 50
205
+ });
206
+ return {
207
+ logs: logs ?? [],
208
+ isLoading: logs === void 0
209
+ };
210
+ }
211
+
212
+ // src/hooks/useModelTest.ts
213
+ import { useState as useState3, useCallback as useCallback3 } from "react";
214
+ import { useAction as useAction3 } from "convex/react";
215
+ function useModelTest(options = {}) {
216
+ const [results, setResults] = useState3([]);
217
+ const [isRunning, setIsRunning] = useState3(false);
218
+ const textAction = options.generateTextAction ? useAction3(options.generateTextAction) : null;
219
+ const imageAction = options.generateImageAction ? useAction3(options.generateImageAction) : null;
220
+ const audioAction = options.generateAudioAction ? useAction3(options.generateAudioAction) : null;
221
+ const transcribeAction = options.transcribeAudioAction ? useAction3(options.transcribeAudioAction) : null;
222
+ const videoAction = options.generateVideoAction ? useAction3(options.generateVideoAction) : null;
223
+ const runTest = useCallback3(async (model, prompt, type = "text") => {
224
+ setIsRunning(true);
225
+ const start = Date.now();
226
+ try {
227
+ let result = "";
228
+ switch (type) {
229
+ case "text":
230
+ if (!textAction) throw new Error("generateTextAction not provided");
231
+ result = await textAction({
232
+ model,
233
+ messages: [{ role: "user", content: prompt }],
234
+ caller: "model-test"
235
+ });
236
+ break;
237
+ case "image":
238
+ if (!imageAction) throw new Error("generateImageAction not provided");
239
+ result = await imageAction({ prompt, model });
240
+ break;
241
+ case "audio":
242
+ if (!audioAction) throw new Error("generateAudioAction not provided");
243
+ result = await audioAction({ prompt });
244
+ break;
245
+ case "video":
246
+ if (!videoAction) throw new Error("generateVideoAction not provided");
247
+ result = await videoAction({ prompt });
248
+ break;
249
+ default:
250
+ throw new Error(`Unsupported test type: ${type}`);
251
+ }
252
+ const testResult = {
253
+ model,
254
+ type,
255
+ result,
256
+ durationMs: Date.now() - start,
257
+ timestamp: Date.now()
258
+ };
259
+ setResults((prev) => [...prev, testResult]);
260
+ return testResult;
261
+ } catch (err) {
262
+ const testResult = {
263
+ model,
264
+ type,
265
+ result: "",
266
+ durationMs: Date.now() - start,
267
+ timestamp: Date.now(),
268
+ error: err instanceof Error ? err.message : "Test failed"
269
+ };
270
+ setResults((prev) => [...prev, testResult]);
271
+ return testResult;
272
+ } finally {
273
+ setIsRunning(false);
274
+ }
275
+ }, [textAction, imageAction, audioAction, videoAction]);
276
+ const runBatchTest = useCallback3(async (models, prompt) => {
277
+ setIsRunning(true);
278
+ const batchResults = [];
279
+ for (const model of models) {
280
+ const result = await runTest(model, prompt);
281
+ batchResults.push(result);
282
+ }
283
+ setIsRunning(false);
284
+ return batchResults;
285
+ }, [runTest]);
286
+ const clearResults = useCallback3(() => setResults([]), []);
287
+ return { runTest, runBatchTest, results, isRunning, clearResults };
288
+ }
289
+
290
+ // src/hooks/useAIModels.ts
291
+ import { useQuery as useQuery3, useMutation as useMutation2 } from "convex/react";
292
+ import { useCallback as useCallback4 } from "react";
293
+ function useAIModels(options) {
294
+ const models = useQuery3(options.listModelsQuery, {});
295
+ const upsertMutation = options.upsertModelMutation ? useMutation2(options.upsertModelMutation) : null;
296
+ const upsertModel = useCallback4(async (model) => {
297
+ if (!upsertMutation) throw new Error("upsertModelMutation not provided");
298
+ await upsertMutation({
299
+ model: model.id,
300
+ provider: model.provider,
301
+ displayName: model.displayName,
302
+ inputCostPer1k: model.inputCostPer1k ?? 0,
303
+ outputCostPer1k: model.outputCostPer1k ?? 0,
304
+ capabilities: model.capabilities,
305
+ contextWindow: model.contextWindow,
306
+ isActive: model.isActive
307
+ });
308
+ }, [upsertMutation]);
309
+ return {
310
+ models: models ?? [],
311
+ isLoading: models === void 0,
312
+ upsertModel
313
+ };
314
+ }
315
+
316
+ // src/hooks/useImageGeneration.ts
317
+ import { useState as useState4, useCallback as useCallback5 } from "react";
318
+ import { useAction as useAction4 } from "convex/react";
319
+ function useImageGeneration(options) {
320
+ const [images, setImages] = useState4([]);
321
+ const [isGenerating, setIsGenerating] = useState4(false);
322
+ const [error, setError] = useState4(null);
323
+ const action = useAction4(options.generateImageAction);
324
+ const generate = useCallback5(async (prompt, opts) => {
325
+ setIsGenerating(true);
326
+ setError(null);
327
+ try {
328
+ const model = opts?.model ?? options.defaultModel ?? "dall-e-3";
329
+ const result = await action({
330
+ prompt,
331
+ model,
332
+ negativePrompt: opts?.negativePrompt,
333
+ size: opts?.size ?? "1024x1024",
334
+ quality: opts?.quality,
335
+ style: opts?.style,
336
+ n: opts?.n ?? 1,
337
+ seed: opts?.seed
338
+ });
339
+ const img = {
340
+ url: result.startsWith("http") ? result : `data:image/png;base64,${result}`,
341
+ prompt,
342
+ model,
343
+ timestamp: Date.now()
344
+ };
345
+ setImages((prev) => [img, ...prev]);
346
+ return img.url;
347
+ } catch (err) {
348
+ const msg = err instanceof Error ? err.message : "Image generation failed";
349
+ setError(msg);
350
+ throw err;
351
+ } finally {
352
+ setIsGenerating(false);
353
+ }
354
+ }, [action, options.defaultModel]);
355
+ return {
356
+ generate,
357
+ images,
358
+ isGenerating,
359
+ error,
360
+ clearImages: () => setImages([]),
361
+ clearError: () => setError(null)
362
+ };
363
+ }
364
+
365
+ // src/hooks/useTextToSpeech.ts
366
+ import { useState as useState5, useCallback as useCallback6, useRef as useRef2 } from "react";
367
+ import { useAction as useAction5 } from "convex/react";
368
+ function useTextToSpeech(options) {
369
+ const [isGenerating, setIsGenerating] = useState5(false);
370
+ const [isSpeaking, setIsSpeaking] = useState5(false);
371
+ const [error, setError] = useState5(null);
372
+ const [audioUrl, setAudioUrl] = useState5(null);
373
+ const audioRef = useRef2(null);
374
+ const action = useAction5(options.generateAudioAction);
375
+ const speak = useCallback6(async (text, opts) => {
376
+ setIsGenerating(true);
377
+ setError(null);
378
+ try {
379
+ const base64 = await action({
380
+ prompt: text,
381
+ voice: opts?.voice ?? options.defaultVoice ?? "alloy",
382
+ model: opts?.model ?? options.defaultModel,
383
+ speed: opts?.speed,
384
+ voiceSettings: opts?.voiceSettings
385
+ });
386
+ const url = base64.startsWith("http") ? base64 : `data:audio/mp3;base64,${base64}`;
387
+ setAudioUrl(url);
388
+ if (options.autoPlay !== false) {
389
+ if (audioRef.current) {
390
+ audioRef.current.pause();
391
+ }
392
+ const audio = new Audio(url);
393
+ audioRef.current = audio;
394
+ setIsSpeaking(true);
395
+ audio.onended = () => setIsSpeaking(false);
396
+ audio.onerror = () => {
397
+ setIsSpeaking(false);
398
+ setError("Audio playback failed");
399
+ };
400
+ await audio.play();
401
+ }
402
+ return url;
403
+ } catch (err) {
404
+ const msg = err instanceof Error ? err.message : "TTS generation failed";
405
+ setError(msg);
406
+ throw err;
407
+ } finally {
408
+ setIsGenerating(false);
409
+ }
410
+ }, [action, options]);
411
+ const stop = useCallback6(() => {
412
+ if (audioRef.current) {
413
+ audioRef.current.pause();
414
+ audioRef.current = null;
415
+ }
416
+ setIsSpeaking(false);
417
+ }, []);
418
+ return {
419
+ speak,
420
+ stop,
421
+ isSpeaking,
422
+ isGenerating,
423
+ error,
424
+ audioUrl,
425
+ clearError: () => setError(null)
426
+ };
427
+ }
428
+
429
+ // src/hooks/useVideoGeneration.ts
430
+ import { useState as useState6, useCallback as useCallback7 } from "react";
431
+ import { useAction as useAction6 } from "convex/react";
432
+ function useVideoGeneration(options) {
433
+ const [videos, setVideos] = useState6([]);
434
+ const [isGenerating, setIsGenerating] = useState6(false);
435
+ const [error, setError] = useState6(null);
436
+ const action = useAction6(options.generateVideoAction);
437
+ const generate = useCallback7(async (prompt, opts) => {
438
+ setIsGenerating(true);
439
+ setError(null);
440
+ try {
441
+ const model = opts?.model ?? options.defaultModel ?? "minimax/video-01";
442
+ const result = await action({
443
+ prompt,
444
+ model,
445
+ duration: opts?.duration,
446
+ aspectRatio: opts?.aspectRatio,
447
+ resolution: opts?.resolution,
448
+ startImage: opts?.startImage,
449
+ endImage: opts?.endImage
450
+ });
451
+ const vid = { url: result, prompt, model, timestamp: Date.now() };
452
+ setVideos((prev) => [vid, ...prev]);
453
+ return result;
454
+ } catch (err) {
455
+ const msg = err instanceof Error ? err.message : "Video generation failed";
456
+ setError(msg);
457
+ throw err;
458
+ } finally {
459
+ setIsGenerating(false);
460
+ }
461
+ }, [action, options.defaultModel]);
462
+ return {
463
+ generate,
464
+ videos,
465
+ isGenerating,
466
+ error,
467
+ clearVideos: () => setVideos([]),
468
+ clearError: () => setError(null)
469
+ };
470
+ }
471
+
472
+ // src/hooks/useTranscription.ts
473
+ import { useState as useState7, useCallback as useCallback8, useRef as useRef3 } from "react";
474
+ import { useAction as useAction7 } from "convex/react";
475
+ function useTranscription(options) {
476
+ const [isTranscribing, setIsTranscribing] = useState7(false);
477
+ const [isRecording, setIsRecording] = useState7(false);
478
+ const [lastResult, setLastResult] = useState7(null);
479
+ const [error, setError] = useState7(null);
480
+ const mediaRecorderRef = useRef3(null);
481
+ const chunksRef = useRef3([]);
482
+ const action = useAction7(options.transcribeAction);
483
+ const transcribe = useCallback8(async (audioBase64, opts) => {
484
+ setIsTranscribing(true);
485
+ setError(null);
486
+ const start = Date.now();
487
+ try {
488
+ const text = await action({
489
+ audio: audioBase64,
490
+ model: opts?.model ?? options.defaultModel,
491
+ language: opts?.language,
492
+ prompt: opts?.prompt
493
+ });
494
+ setLastResult({ text, timestamp: Date.now(), durationMs: Date.now() - start });
495
+ return text;
496
+ } catch (err) {
497
+ const msg = err instanceof Error ? err.message : "Transcription failed";
498
+ setError(msg);
499
+ throw err;
500
+ } finally {
501
+ setIsTranscribing(false);
502
+ }
503
+ }, [action, options.defaultModel]);
504
+ const startRecording = useCallback8(async () => {
505
+ if (!options.enableMicrophone) throw new Error("Microphone not enabled");
506
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
507
+ const mediaRecorder = new MediaRecorder(stream, { mimeType: "audio/webm" });
508
+ chunksRef.current = [];
509
+ mediaRecorder.ondataavailable = (e) => {
510
+ if (e.data.size > 0) chunksRef.current.push(e.data);
511
+ };
512
+ mediaRecorder.start();
513
+ mediaRecorderRef.current = mediaRecorder;
514
+ setIsRecording(true);
515
+ }, [options.enableMicrophone]);
516
+ const stopRecording = useCallback8(async () => {
517
+ return new Promise((resolve, reject) => {
518
+ const recorder = mediaRecorderRef.current;
519
+ if (!recorder) {
520
+ reject(new Error("Not recording"));
521
+ return;
522
+ }
523
+ recorder.onstop = async () => {
524
+ setIsRecording(false);
525
+ const blob = new Blob(chunksRef.current, { type: "audio/webm" });
526
+ const arrayBuffer = await blob.arrayBuffer();
527
+ const bytes = new Uint8Array(arrayBuffer);
528
+ let binary = "";
529
+ for (let i = 0; i < bytes.length; i++) {
530
+ binary += String.fromCharCode(bytes[i]);
531
+ }
532
+ const base64 = btoa(binary);
533
+ recorder.stream.getTracks().forEach((t) => t.stop());
534
+ try {
535
+ const text = await transcribe(base64);
536
+ resolve(text);
537
+ } catch (err) {
538
+ reject(err);
539
+ }
540
+ };
541
+ recorder.stop();
542
+ });
543
+ }, [transcribe]);
544
+ return {
545
+ transcribe,
546
+ startRecording,
547
+ stopRecording,
548
+ isTranscribing,
549
+ isRecording,
550
+ lastResult,
551
+ error,
552
+ clearError: () => setError(null)
553
+ };
554
+ }
555
+
556
+ // src/hooks/useRealtimeAudio.ts
557
+ import { useState as useState8, useCallback as useCallback9, useRef as useRef4, useEffect as useEffect2 } from "react";
558
+ function useRealtimeAudio(options) {
559
+ const [isConnected, setIsConnected] = useState8(false);
560
+ const [isSpeaking, setIsSpeaking] = useState8(false);
561
+ const [isListening, setIsListening] = useState8(false);
562
+ const [error, setError] = useState8(null);
563
+ const [transcript, setTranscript] = useState8("");
564
+ const [response, setResponse] = useState8("");
565
+ const wsRef = useRef4(null);
566
+ const audioContextRef = useRef4(null);
567
+ const streamRef = useRef4(null);
568
+ const processorRef = useRef4(null);
569
+ const connect = useCallback9(async () => {
570
+ try {
571
+ setError(null);
572
+ let wsUrl = options.wsUrl ?? "wss://api.openai.com/v1/realtime";
573
+ if (options.getSessionAction) {
574
+ const session = await options.getSessionAction({
575
+ model: options.model ?? "gpt-4o-realtime-preview",
576
+ voice: options.voice ?? "alloy",
577
+ instructions: options.instructions
578
+ });
579
+ wsUrl = session.url ?? wsUrl;
580
+ }
581
+ const ws = new WebSocket(wsUrl);
582
+ wsRef.current = ws;
583
+ ws.onopen = () => {
584
+ setIsConnected(true);
585
+ ws.send(JSON.stringify({
586
+ type: "session.update",
587
+ session: {
588
+ model: options.model ?? "gpt-4o-realtime-preview",
589
+ voice: options.voice ?? "alloy",
590
+ instructions: options.instructions ?? "You are a helpful assistant.",
591
+ input_audio_format: options.inputAudioFormat ?? "pcm16",
592
+ output_audio_format: options.outputAudioFormat ?? "pcm16",
593
+ turn_detection: options.turnDetection ?? {
594
+ type: "server_vad",
595
+ threshold: 0.5,
596
+ prefix_padding_ms: 300,
597
+ silence_duration_ms: 500
598
+ },
599
+ tools: options.tools ?? []
600
+ }
601
+ }));
602
+ startAudioCapture(ws);
603
+ };
604
+ ws.onmessage = (event) => {
605
+ const data = JSON.parse(event.data);
606
+ options.onEvent?.(data);
607
+ switch (data.type) {
608
+ case "input_audio_buffer.speech_started":
609
+ setIsSpeaking(true);
610
+ break;
611
+ case "input_audio_buffer.speech_stopped":
612
+ setIsSpeaking(false);
613
+ break;
614
+ case "response.text.delta":
615
+ setResponse((prev) => prev + data.delta);
616
+ break;
617
+ case "response.text.done":
618
+ options.onResponse?.(data.text);
619
+ setResponse("");
620
+ break;
621
+ case "response.audio.delta":
622
+ playAudioDelta(data.delta);
623
+ break;
624
+ case "error":
625
+ setError(data.error.message);
626
+ break;
627
+ }
628
+ };
629
+ ws.onerror = () => setError("WebSocket connection error");
630
+ ws.onclose = () => {
631
+ setIsConnected(false);
632
+ setIsListening(false);
633
+ stopAudioCapture();
634
+ };
635
+ } catch (err) {
636
+ setError(err instanceof Error ? err.message : "Failed to connect");
637
+ }
638
+ }, [options]);
639
+ const startAudioCapture = useCallback9(async (ws) => {
640
+ try {
641
+ const stream = await navigator.mediaDevices.getUserMedia({ audio: { sampleRate: 24e3, channelCount: 1 } });
642
+ streamRef.current = stream;
643
+ const audioCtx = new AudioContext({ sampleRate: 24e3 });
644
+ audioContextRef.current = audioCtx;
645
+ const source = audioCtx.createMediaStreamSource(stream);
646
+ const processor = audioCtx.createScriptProcessor(4096, 1, 1);
647
+ processorRef.current = processor;
648
+ processor.onaudioprocess = (e) => {
649
+ if (ws.readyState !== WebSocket.OPEN) return;
650
+ const inputData = e.inputBuffer.getChannelData(0);
651
+ const pcm16 = new Int16Array(inputData.length);
652
+ for (let i = 0; i < inputData.length; i++) {
653
+ const s = Math.max(-1, Math.min(1, inputData[i]));
654
+ pcm16[i] = s < 0 ? s * 32768 : s * 32767;
655
+ }
656
+ const bytes = new Uint8Array(pcm16.buffer);
657
+ let binary = "";
658
+ for (let i = 0; i < bytes.length; i++) {
659
+ binary += String.fromCharCode(bytes[i]);
660
+ }
661
+ ws.send(JSON.stringify({
662
+ type: "input_audio_buffer.append",
663
+ audio: btoa(binary)
664
+ }));
665
+ };
666
+ source.connect(processor);
667
+ processor.connect(audioCtx.destination);
668
+ setIsListening(true);
669
+ } catch (err) {
670
+ setError(err instanceof Error ? err.message : "Microphone access denied");
671
+ }
672
+ }, []);
673
+ const stopAudioCapture = useCallback9(() => {
674
+ processorRef.current?.disconnect();
675
+ processorRef.current = null;
676
+ streamRef.current?.getTracks().forEach((t) => t.stop());
677
+ streamRef.current = null;
678
+ audioContextRef.current?.close();
679
+ audioContextRef.current = null;
680
+ setIsListening(false);
681
+ }, []);
682
+ const playAudioDelta = useCallback9((_delta) => {
683
+ }, []);
684
+ const disconnect = useCallback9(() => {
685
+ wsRef.current?.close();
686
+ wsRef.current = null;
687
+ stopAudioCapture();
688
+ setIsConnected(false);
689
+ }, [stopAudioCapture]);
690
+ useEffect2(() => {
691
+ return () => {
692
+ disconnect();
693
+ };
694
+ }, []);
695
+ return {
696
+ connect,
697
+ disconnect,
698
+ isConnected,
699
+ isSpeaking,
700
+ isListening,
701
+ error,
702
+ transcript,
703
+ response,
704
+ clearError: () => setError(null)
705
+ };
706
+ }
707
+
708
+ // src/hooks/useContentManager.ts
709
+ import { useState as useState9, useCallback as useCallback10 } from "react";
710
+ function useContentManager(options) {
711
+ const { generateFn, defaultType = "text", defaultTone, defaultModel } = options;
712
+ const [result, setResult] = useState9(null);
713
+ const [isGenerating, setIsGenerating] = useState9(false);
714
+ const [error, setError] = useState9(null);
715
+ const execute = useCallback10(async (action, input, extra) => {
716
+ setIsGenerating(true);
717
+ setError(null);
718
+ try {
719
+ const res = await generateFn({
720
+ action,
721
+ input,
722
+ type: extra?.type ?? defaultType,
723
+ tone: extra?.tone ?? defaultTone,
724
+ model: extra?.model ?? defaultModel,
725
+ ...extra
726
+ });
727
+ setResult(res);
728
+ return res;
729
+ } catch (err) {
730
+ const e = err instanceof Error ? err : new Error(String(err));
731
+ setError(e);
732
+ throw e;
733
+ } finally {
734
+ setIsGenerating(false);
735
+ }
736
+ }, [generateFn, defaultType, defaultTone, defaultModel]);
737
+ const generate = useCallback10((input, opts) => execute("generate", input, opts), [execute]);
738
+ const rewrite = useCallback10((text, instructions) => execute("rewrite", text, { instructions }), [execute]);
739
+ const translate = useCallback10((text, language) => execute("translate", text, { language }), [execute]);
740
+ const summarize = useCallback10((text) => execute("summarize", text), [execute]);
741
+ const variations = useCallback10((text, count = 3) => execute("variations", text, { variations: count }), [execute]);
742
+ const changeTone = useCallback10((text, tone) => execute("change-tone", text, { tone }), [execute]);
743
+ const proofread = useCallback10((text) => execute("proofread", text), [execute]);
744
+ const reset = useCallback10(() => {
745
+ setResult(null);
746
+ setError(null);
747
+ }, []);
748
+ return {
749
+ generate,
750
+ rewrite,
751
+ translate,
752
+ summarize,
753
+ variations,
754
+ changeTone,
755
+ proofread,
756
+ result,
757
+ isGenerating,
758
+ error,
759
+ reset
760
+ };
761
+ }
762
+
763
+ // src/hooks/useMemory.ts
764
+ import { useState as useState10, useCallback as useCallback11 } from "react";
765
+ function useMemory(options) {
766
+ const {
767
+ storeFn,
768
+ searchFn,
769
+ deleteFn,
770
+ clearFn,
771
+ defaultNamespace = "user",
772
+ defaultScopeId = ""
773
+ } = options;
774
+ const [memories, setMemories] = useState10([]);
775
+ const [isLoading, setIsLoading] = useState10(false);
776
+ const [error, setError] = useState10(null);
777
+ const store = useCallback11(async (key, value, opts) => {
778
+ setIsLoading(true);
779
+ setError(null);
780
+ try {
781
+ const entry = await storeFn({
782
+ namespace: opts?.namespace ?? defaultNamespace,
783
+ type: opts?.type ?? "fact",
784
+ importance: opts?.importance ?? "medium",
785
+ key,
786
+ value,
787
+ metadata: opts?.metadata,
788
+ scopeId: opts?.scopeId ?? defaultScopeId
789
+ });
790
+ setMemories((prev) => [...prev, entry]);
791
+ return entry;
792
+ } catch (err) {
793
+ const e = err instanceof Error ? err : new Error(String(err));
794
+ setError(e);
795
+ throw e;
796
+ } finally {
797
+ setIsLoading(false);
798
+ }
799
+ }, [storeFn, defaultNamespace, defaultScopeId]);
800
+ const recall = useCallback11(async (key, namespace) => {
801
+ setIsLoading(true);
802
+ setError(null);
803
+ try {
804
+ const results = await searchFn({
805
+ namespace: namespace ?? defaultNamespace,
806
+ key,
807
+ scopeId: defaultScopeId,
808
+ limit: 1
809
+ });
810
+ return results.length > 0 ? results[0] : null;
811
+ } catch (err) {
812
+ const e = err instanceof Error ? err : new Error(String(err));
813
+ setError(e);
814
+ throw e;
815
+ } finally {
816
+ setIsLoading(false);
817
+ }
818
+ }, [searchFn, defaultNamespace, defaultScopeId]);
819
+ const search = useCallback11(async (query) => {
820
+ setIsLoading(true);
821
+ setError(null);
822
+ try {
823
+ const results = await searchFn({
824
+ scopeId: defaultScopeId,
825
+ ...query
826
+ });
827
+ setMemories(results);
828
+ return results;
829
+ } catch (err) {
830
+ const e = err instanceof Error ? err : new Error(String(err));
831
+ setError(e);
832
+ throw e;
833
+ } finally {
834
+ setIsLoading(false);
835
+ }
836
+ }, [searchFn, defaultScopeId]);
837
+ const remove = useCallback11(async (id) => {
838
+ setError(null);
839
+ try {
840
+ await deleteFn(id);
841
+ setMemories((prev) => prev.filter((m) => m.id !== id));
842
+ } catch (err) {
843
+ const e = err instanceof Error ? err : new Error(String(err));
844
+ setError(e);
845
+ throw e;
846
+ }
847
+ }, [deleteFn]);
848
+ const clear = useCallback11(async (namespace) => {
849
+ setError(null);
850
+ try {
851
+ await clearFn(namespace ?? defaultNamespace, defaultScopeId);
852
+ setMemories([]);
853
+ } catch (err) {
854
+ const e = err instanceof Error ? err : new Error(String(err));
855
+ setError(e);
856
+ throw e;
857
+ }
858
+ }, [clearFn, defaultNamespace, defaultScopeId]);
859
+ return { store, recall, search, remove, clear, memories, isLoading, error };
860
+ }
861
+
862
+ // src/hooks/useSkills.ts
863
+ import { useState as useState11, useCallback as useCallback12, useMemo } from "react";
864
+ import { BUILT_IN_SKILLS } from "@geenius/ai-shared";
865
+ function useSkills(options) {
866
+ const { executeFn, customSkills = [], defaultModel, userId } = options;
867
+ const [result, setResult] = useState11(null);
868
+ const [isExecuting, setIsExecuting] = useState11(false);
869
+ const [error, setError] = useState11(null);
870
+ const allSkills = useMemo(() => {
871
+ const map = /* @__PURE__ */ new Map();
872
+ for (const skill of Object.values(BUILT_IN_SKILLS)) map.set(skill.id, skill);
873
+ for (const skill of customSkills) map.set(skill.id, skill);
874
+ return Array.from(map.values());
875
+ }, [customSkills]);
876
+ const byCategory = useCallback12((category) => allSkills.filter((s) => s.category === category), [allSkills]);
877
+ const searchSkills = useCallback12((query) => {
878
+ const q = query.toLowerCase();
879
+ return allSkills.filter(
880
+ (s) => s.name.toLowerCase().includes(q) || s.description.toLowerCase().includes(q) || s.tags?.some((t) => t.toLowerCase().includes(q))
881
+ );
882
+ }, [allSkills]);
883
+ const getSkill = useCallback12((id) => allSkills.find((s) => s.id === id), [allSkills]);
884
+ const execute = useCallback12(async (skillId, params, context) => {
885
+ setIsExecuting(true);
886
+ setError(null);
887
+ try {
888
+ const res = await executeFn({
889
+ skillId,
890
+ params,
891
+ userId,
892
+ context,
893
+ model: defaultModel
894
+ });
895
+ setResult(res);
896
+ return res;
897
+ } catch (err) {
898
+ const e = err instanceof Error ? err : new Error(String(err));
899
+ setError(e);
900
+ throw e;
901
+ } finally {
902
+ setIsExecuting(false);
903
+ }
904
+ }, [executeFn, userId, defaultModel]);
905
+ const reset = useCallback12(() => {
906
+ setResult(null);
907
+ setError(null);
908
+ }, []);
909
+ return {
910
+ skills: allSkills,
911
+ byCategory,
912
+ search: searchSkills,
913
+ getSkill,
914
+ execute,
915
+ result,
916
+ isExecuting,
917
+ error,
918
+ reset
919
+ };
920
+ }
921
+
922
+ // src/components/ChatWindow.tsx
923
+ import { useState as useState12, useRef as useRef6, useEffect as useEffect3 } from "react";
924
+ import { jsx, jsxs } from "react/jsx-runtime";
925
+ function ChatWindow(props) {
926
+ const [input, setInput] = useState12("");
927
+ const messagesEndRef = useRef6(null);
928
+ const chat = useChat(props);
929
+ useEffect3(() => {
930
+ messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
931
+ }, [chat.messages.length]);
932
+ const handleSubmit = async (e) => {
933
+ e?.preventDefault();
934
+ if (!input.trim() || chat.isSending) return;
935
+ const content = input.trim();
936
+ setInput("");
937
+ await chat.sendMessage(content);
938
+ };
939
+ return /* @__PURE__ */ jsxs("div", { className: props.className, "data-ai-component": "chat-window", children: [
940
+ /* @__PURE__ */ jsxs("div", { "data-ai-messages": true, children: [
941
+ chat.messages.length === 0 && /* @__PURE__ */ jsx("div", { "data-ai-empty": true, children: /* @__PURE__ */ jsx("p", { children: "Start a conversation" }) }),
942
+ chat.messages.map(
943
+ (msg) => props.renderMessage ? props.renderMessage(msg) : /* @__PURE__ */ jsxs(
944
+ "div",
945
+ {
946
+ "data-ai-message": true,
947
+ "data-ai-role": msg.role,
948
+ children: [
949
+ /* @__PURE__ */ jsx("div", { "data-ai-message-role": true, children: msg.role }),
950
+ /* @__PURE__ */ jsx("div", { "data-ai-message-content": true, children: msg.content }),
951
+ msg.tokens && /* @__PURE__ */ jsxs("span", { "data-ai-message-tokens": true, children: [
952
+ msg.tokens,
953
+ " tokens"
954
+ ] })
955
+ ]
956
+ },
957
+ msg.id
958
+ )
959
+ ),
960
+ chat.isSending && /* @__PURE__ */ jsx("div", { "data-ai-message": true, "data-ai-role": "assistant", "data-ai-loading": true, children: /* @__PURE__ */ jsxs("div", { "data-ai-typing-indicator": true, children: [
961
+ /* @__PURE__ */ jsx("span", {}),
962
+ /* @__PURE__ */ jsx("span", {}),
963
+ /* @__PURE__ */ jsx("span", {})
964
+ ] }) }),
965
+ /* @__PURE__ */ jsx("div", { ref: messagesEndRef })
966
+ ] }),
967
+ chat.error && /* @__PURE__ */ jsxs("div", { "data-ai-error": true, role: "alert", children: [
968
+ /* @__PURE__ */ jsx("span", { children: chat.error }),
969
+ /* @__PURE__ */ jsx("button", { onClick: chat.clearError, "data-ai-dismiss": true, children: "\xD7" })
970
+ ] }),
971
+ props.renderInput ? props.renderInput({
972
+ value: input,
973
+ onChange: setInput,
974
+ onSubmit: handleSubmit,
975
+ isSending: chat.isSending
976
+ }) : /* @__PURE__ */ jsxs("form", { onSubmit: handleSubmit, "data-ai-input-form": true, children: [
977
+ /* @__PURE__ */ jsx(
978
+ "textarea",
979
+ {
980
+ value: input,
981
+ onChange: (e) => setInput(e.target.value),
982
+ placeholder: "Type a message\u2026",
983
+ disabled: chat.isSending,
984
+ "data-ai-input": true,
985
+ onKeyDown: (e) => {
986
+ if (e.key === "Enter" && !e.shiftKey) {
987
+ e.preventDefault();
988
+ handleSubmit();
989
+ }
990
+ }
991
+ }
992
+ ),
993
+ /* @__PURE__ */ jsx(
994
+ "button",
995
+ {
996
+ type: "submit",
997
+ disabled: chat.isSending || !input.trim(),
998
+ "data-ai-send": true,
999
+ children: chat.isSending ? "Sending\u2026" : "Send"
1000
+ }
1001
+ )
1002
+ ] })
1003
+ ] });
1004
+ }
1005
+
1006
+ // src/components/ModelSelector.tsx
1007
+ import { jsx as jsx2, jsxs as jsxs2 } from "react/jsx-runtime";
1008
+ function ModelSelector(props) {
1009
+ const filtered = props.filterProvider ? props.models.filter((m) => m.provider === props.filterProvider) : props.models;
1010
+ const activeModels = filtered.filter((m) => m.isActive);
1011
+ return /* @__PURE__ */ jsxs2(
1012
+ "select",
1013
+ {
1014
+ className: props.className,
1015
+ value: props.selectedModel ?? "",
1016
+ onChange: (e) => props.onSelect(e.target.value),
1017
+ disabled: props.disabled,
1018
+ "data-ai-component": "model-selector",
1019
+ children: [
1020
+ /* @__PURE__ */ jsx2("option", { value: "", disabled: true, children: "Select a model\u2026" }),
1021
+ activeModels.map((model) => /* @__PURE__ */ jsxs2("option", { value: model.id, "data-ai-provider": model.provider, children: [
1022
+ model.displayName ?? model.name,
1023
+ props.showCost ? ` ($${model.inputCostPer1k}/$${model.outputCostPer1k}/1k)` : ""
1024
+ ] }, model.id))
1025
+ ]
1026
+ }
1027
+ );
1028
+ }
1029
+
1030
+ // src/components/AILogTable.tsx
1031
+ import { jsx as jsx3, jsxs as jsxs3 } from "react/jsx-runtime";
1032
+ function formatDuration(ms) {
1033
+ if (ms < 1e3) return `${ms}ms`;
1034
+ return `${(ms / 1e3).toFixed(1)}s`;
1035
+ }
1036
+ function formatCost(usd) {
1037
+ if (!usd) return "\u2014";
1038
+ if (usd < 0.01) return `$${usd.toFixed(4)}`;
1039
+ return `$${usd.toFixed(2)}`;
1040
+ }
1041
+ function StatusBadge({ status }) {
1042
+ return /* @__PURE__ */ jsxs3("span", { "data-ai-status": status, children: [
1043
+ status === "success" ? "\u2713" : "\u2717",
1044
+ " ",
1045
+ status
1046
+ ] });
1047
+ }
1048
+ function AILogTable(props) {
1049
+ if (props.isLoading) {
1050
+ return /* @__PURE__ */ jsx3("div", { "data-ai-component": "log-table", "data-ai-loading": true, children: "Loading logs\u2026" });
1051
+ }
1052
+ if (props.logs.length === 0) {
1053
+ return /* @__PURE__ */ jsx3("div", { "data-ai-component": "log-table", "data-ai-empty": true, children: "No AI logs yet" });
1054
+ }
1055
+ return /* @__PURE__ */ jsx3("div", { className: props.className, "data-ai-component": "log-table", children: /* @__PURE__ */ jsxs3("table", { "data-ai-table": true, children: [
1056
+ /* @__PURE__ */ jsx3("thead", { children: /* @__PURE__ */ jsxs3("tr", { children: [
1057
+ /* @__PURE__ */ jsx3("th", { children: "Time" }),
1058
+ /* @__PURE__ */ jsx3("th", { children: "Model" }),
1059
+ /* @__PURE__ */ jsx3("th", { children: "Provider" }),
1060
+ /* @__PURE__ */ jsx3("th", { children: "Caller" }),
1061
+ /* @__PURE__ */ jsx3("th", { children: "Status" }),
1062
+ /* @__PURE__ */ jsx3("th", { children: "Duration" }),
1063
+ /* @__PURE__ */ jsx3("th", { children: "Tokens" }),
1064
+ /* @__PURE__ */ jsx3("th", { children: "Cost" })
1065
+ ] }) }),
1066
+ /* @__PURE__ */ jsx3("tbody", { children: props.logs.map((log) => /* @__PURE__ */ jsxs3(
1067
+ "tr",
1068
+ {
1069
+ onClick: () => props.onRowClick?.(log),
1070
+ "data-ai-log-row": true,
1071
+ "data-ai-status": log.status,
1072
+ children: [
1073
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "time", children: new Date(log.timestamp).toLocaleTimeString() }),
1074
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "model", children: log.model }),
1075
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "provider", children: log.provider }),
1076
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "caller", children: log.caller }),
1077
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "status", children: /* @__PURE__ */ jsx3(StatusBadge, { status: log.status }) }),
1078
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "duration", children: formatDuration(log.durationMs) }),
1079
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "tokens", children: log.totalTokens ?? "\u2014" }),
1080
+ /* @__PURE__ */ jsx3("td", { "data-ai-cell": "cost", children: formatCost(log.totalCostUsd) })
1081
+ ]
1082
+ },
1083
+ log.requestId
1084
+ )) })
1085
+ ] }) });
1086
+ }
1087
+
1088
+ // src/components/ModelTestRunner.tsx
1089
+ import { useState as useState13 } from "react";
1090
+ import { jsx as jsx4, jsxs as jsxs4 } from "react/jsx-runtime";
1091
+ var TABS = [
1092
+ { id: "text", label: "Text Generation" },
1093
+ { id: "image", label: "Image Generation" },
1094
+ { id: "audio", label: "Text-to-Speech" },
1095
+ { id: "transcription", label: "Audio-to-Text" },
1096
+ { id: "video", label: "Video Generation" }
1097
+ ];
1098
+ function ModelTestRunner(props) {
1099
+ const [activeTab, setActiveTab] = useState13(props.defaultTab ?? "text");
1100
+ const [prompt, setPrompt] = useState13("");
1101
+ const [selectedModel, setSelectedModel] = useState13("");
1102
+ const [batchMode, setBatchMode] = useState13(false);
1103
+ const test = useModelTest(props);
1104
+ const handleSubmit = async (e) => {
1105
+ e.preventDefault();
1106
+ if (!prompt.trim()) return;
1107
+ if (batchMode && props.availableModels) {
1108
+ await test.runBatchTest(props.availableModels, prompt.trim());
1109
+ } else if (selectedModel) {
1110
+ await test.runTest(selectedModel, prompt.trim(), activeTab);
1111
+ }
1112
+ };
1113
+ return /* @__PURE__ */ jsxs4("div", { className: props.className, "data-ai-component": "model-test-runner", children: [
1114
+ /* @__PURE__ */ jsx4("div", { "data-ai-tabs": true, role: "tablist", children: TABS.map((tab) => /* @__PURE__ */ jsx4(
1115
+ "button",
1116
+ {
1117
+ role: "tab",
1118
+ "aria-selected": activeTab === tab.id,
1119
+ onClick: () => setActiveTab(tab.id),
1120
+ "data-ai-tab": true,
1121
+ "data-ai-tab-active": activeTab === tab.id ? "" : void 0,
1122
+ children: tab.label
1123
+ },
1124
+ tab.id
1125
+ )) }),
1126
+ /* @__PURE__ */ jsxs4("form", { onSubmit: handleSubmit, "data-ai-test-form": true, children: [
1127
+ /* @__PURE__ */ jsxs4("div", { "data-ai-field": "model", children: [
1128
+ /* @__PURE__ */ jsx4("label", { htmlFor: "test-model", children: "Model" }),
1129
+ /* @__PURE__ */ jsxs4(
1130
+ "select",
1131
+ {
1132
+ id: "test-model",
1133
+ value: selectedModel,
1134
+ onChange: (e) => setSelectedModel(e.target.value),
1135
+ disabled: test.isRunning || batchMode,
1136
+ "data-ai-input": true,
1137
+ children: [
1138
+ /* @__PURE__ */ jsx4("option", { value: "", children: "Select model\u2026" }),
1139
+ (props.availableModels ?? []).map((m) => /* @__PURE__ */ jsx4("option", { value: m, children: m }, m))
1140
+ ]
1141
+ }
1142
+ )
1143
+ ] }),
1144
+ /* @__PURE__ */ jsxs4("div", { "data-ai-field": "prompt", children: [
1145
+ /* @__PURE__ */ jsx4("label", { htmlFor: "test-prompt", children: "Prompt" }),
1146
+ /* @__PURE__ */ jsx4(
1147
+ "textarea",
1148
+ {
1149
+ id: "test-prompt",
1150
+ value: prompt,
1151
+ onChange: (e) => setPrompt(e.target.value),
1152
+ placeholder: "Enter your test prompt\u2026",
1153
+ disabled: test.isRunning,
1154
+ "data-ai-input": true
1155
+ }
1156
+ )
1157
+ ] }),
1158
+ /* @__PURE__ */ jsxs4("div", { "data-ai-actions": true, children: [
1159
+ /* @__PURE__ */ jsxs4("label", { "data-ai-toggle": true, children: [
1160
+ /* @__PURE__ */ jsx4(
1161
+ "input",
1162
+ {
1163
+ type: "checkbox",
1164
+ checked: batchMode,
1165
+ onChange: (e) => setBatchMode(e.target.checked),
1166
+ disabled: test.isRunning
1167
+ }
1168
+ ),
1169
+ "Test all models"
1170
+ ] }),
1171
+ /* @__PURE__ */ jsx4("button", { type: "submit", disabled: test.isRunning || !prompt.trim(), "data-ai-submit": true, children: test.isRunning ? "Running\u2026" : batchMode ? "Run All" : "Run Test" }),
1172
+ test.results.length > 0 && /* @__PURE__ */ jsx4("button", { type: "button", onClick: test.clearResults, "data-ai-clear": true, children: "Clear Results" })
1173
+ ] })
1174
+ ] }),
1175
+ test.results.length > 0 && /* @__PURE__ */ jsx4("div", { "data-ai-test-results": true, children: test.results.map((result, i) => props.renderResult ? props.renderResult(result) : /* @__PURE__ */ jsxs4("div", { "data-ai-test-result": true, "data-ai-status": result.error ? "error" : "success", children: [
1176
+ /* @__PURE__ */ jsxs4("div", { "data-ai-result-header": true, children: [
1177
+ /* @__PURE__ */ jsx4("span", { "data-ai-result-model": true, children: result.model }),
1178
+ /* @__PURE__ */ jsxs4("span", { "data-ai-result-duration": true, children: [
1179
+ result.durationMs,
1180
+ "ms"
1181
+ ] }),
1182
+ result.error && /* @__PURE__ */ jsx4("span", { "data-ai-result-error": true, children: result.error })
1183
+ ] }),
1184
+ !result.error && /* @__PURE__ */ jsx4("div", { "data-ai-result-content": true, children: result.type === "image" ? /* @__PURE__ */ jsx4(
1185
+ "img",
1186
+ {
1187
+ src: result.result.startsWith("http") ? result.result : `data:image/png;base64,${result.result}`,
1188
+ alt: "Generated",
1189
+ "data-ai-result-image": true
1190
+ }
1191
+ ) : /* @__PURE__ */ jsx4("pre", { "data-ai-result-text": true, children: result.result }) })
1192
+ ] }, i)) })
1193
+ ] });
1194
+ }
1195
+
1196
+ // src/components/GenerationCard.tsx
1197
+ import { jsx as jsx5, jsxs as jsxs5 } from "react/jsx-runtime";
1198
+ function GenerationCard(props) {
1199
+ return /* @__PURE__ */ jsxs5("div", { className: props.className, "data-ai-component": "generation-card", "data-ai-type": props.type, children: [
1200
+ /* @__PURE__ */ jsxs5("div", { "data-ai-card-header": true, children: [
1201
+ /* @__PURE__ */ jsx5("span", { "data-ai-card-type": true, children: props.type }),
1202
+ props.model && /* @__PURE__ */ jsx5("span", { "data-ai-card-model": true, children: props.model }),
1203
+ props.durationMs != null && /* @__PURE__ */ jsxs5("span", { "data-ai-card-duration": true, children: [
1204
+ props.durationMs,
1205
+ "ms"
1206
+ ] })
1207
+ ] }),
1208
+ props.error ? /* @__PURE__ */ jsx5("div", { "data-ai-card-error": true, role: "alert", children: props.error }) : /* @__PURE__ */ jsx5("div", { "data-ai-card-content": true, children: props.type === "image" ? /* @__PURE__ */ jsx5(
1209
+ "img",
1210
+ {
1211
+ src: props.content.startsWith("http") ? props.content : `data:image/png;base64,${props.content}`,
1212
+ alt: "AI Generated",
1213
+ "data-ai-card-image": true
1214
+ }
1215
+ ) : props.type === "audio" || props.type === "speech" ? /* @__PURE__ */ jsx5("audio", { controls: true, "data-ai-card-audio": true, children: /* @__PURE__ */ jsx5(
1216
+ "source",
1217
+ {
1218
+ src: props.content.startsWith("http") ? props.content : `data:audio/mp3;base64,${props.content}`,
1219
+ type: "audio/mp3"
1220
+ }
1221
+ ) }) : props.type === "video" ? /* @__PURE__ */ jsx5("video", { controls: true, "data-ai-card-video": true, children: /* @__PURE__ */ jsx5("source", { src: props.content, type: "video/mp4" }) }) : /* @__PURE__ */ jsx5("pre", { "data-ai-card-text": true, children: props.content }) }),
1222
+ (props.tokens || props.cost != null) && /* @__PURE__ */ jsxs5("div", { "data-ai-card-footer": true, children: [
1223
+ props.tokens && /* @__PURE__ */ jsxs5("span", { "data-ai-card-tokens": true, children: [
1224
+ props.tokens,
1225
+ " tokens"
1226
+ ] }),
1227
+ props.cost != null && /* @__PURE__ */ jsxs5("span", { "data-ai-card-cost": true, children: [
1228
+ "$",
1229
+ props.cost < 0.01 ? props.cost.toFixed(4) : props.cost.toFixed(2)
1230
+ ] })
1231
+ ] })
1232
+ ] });
1233
+ }
1234
+
1235
+ // src/components/ImageGenerator.tsx
1236
+ import { useState as useState14 } from "react";
1237
+ import { jsx as jsx6, jsxs as jsxs6 } from "react/jsx-runtime";
1238
+ function ImageGenerator(props) {
1239
+ const [prompt, setPrompt] = useState14("");
1240
+ const [selectedModel, setSelectedModel] = useState14(props.defaultModel ?? "");
1241
+ const [size, setSize] = useState14("1024x1024");
1242
+ const [quality, setQuality] = useState14("standard");
1243
+ const { generate, images, isGenerating, error, clearImages, clearError } = useImageGeneration(props);
1244
+ const handleSubmit = async (e) => {
1245
+ e.preventDefault();
1246
+ if (!prompt.trim() || isGenerating) return;
1247
+ await generate(prompt.trim(), { model: selectedModel || void 0, size, quality });
1248
+ };
1249
+ return /* @__PURE__ */ jsxs6("div", { className: props.className, "data-ai-component": "image-generator", children: [
1250
+ /* @__PURE__ */ jsxs6("form", { onSubmit: handleSubmit, "data-ai-image-form": true, children: [
1251
+ /* @__PURE__ */ jsx6("div", { "data-ai-field": "prompt", children: /* @__PURE__ */ jsx6(
1252
+ "textarea",
1253
+ {
1254
+ value: prompt,
1255
+ onChange: (e) => setPrompt(e.target.value),
1256
+ placeholder: "Describe the image you want to create\u2026",
1257
+ disabled: isGenerating,
1258
+ "data-ai-input": true
1259
+ }
1260
+ ) }),
1261
+ /* @__PURE__ */ jsxs6("div", { "data-ai-image-options": true, children: [
1262
+ props.availableModels && /* @__PURE__ */ jsxs6(
1263
+ "select",
1264
+ {
1265
+ value: selectedModel,
1266
+ onChange: (e) => setSelectedModel(e.target.value),
1267
+ disabled: isGenerating,
1268
+ "data-ai-select": true,
1269
+ children: [
1270
+ /* @__PURE__ */ jsx6("option", { value: "", children: "Default model" }),
1271
+ props.availableModels.map((m) => /* @__PURE__ */ jsx6("option", { value: m, children: m }, m))
1272
+ ]
1273
+ }
1274
+ ),
1275
+ /* @__PURE__ */ jsxs6("select", { value: size, onChange: (e) => setSize(e.target.value), disabled: isGenerating, "data-ai-select": true, children: [
1276
+ /* @__PURE__ */ jsx6("option", { value: "1024x1024", children: "1024\xD71024" }),
1277
+ /* @__PURE__ */ jsx6("option", { value: "1792x1024", children: "1792\xD71024" }),
1278
+ /* @__PURE__ */ jsx6("option", { value: "1024x1792", children: "1024\xD71792" }),
1279
+ /* @__PURE__ */ jsx6("option", { value: "512x512", children: "512\xD7512" })
1280
+ ] }),
1281
+ /* @__PURE__ */ jsxs6("select", { value: quality, onChange: (e) => setQuality(e.target.value), disabled: isGenerating, "data-ai-select": true, children: [
1282
+ /* @__PURE__ */ jsx6("option", { value: "standard", children: "Standard" }),
1283
+ /* @__PURE__ */ jsx6("option", { value: "hd", children: "HD" })
1284
+ ] })
1285
+ ] }),
1286
+ /* @__PURE__ */ jsxs6("div", { "data-ai-actions": true, children: [
1287
+ /* @__PURE__ */ jsx6("button", { type: "submit", disabled: isGenerating || !prompt.trim(), "data-ai-submit": true, children: isGenerating ? "Generating\u2026" : "Generate Image" }),
1288
+ images.length > 0 && /* @__PURE__ */ jsx6("button", { type: "button", onClick: clearImages, "data-ai-clear": true, children: "Clear Gallery" })
1289
+ ] })
1290
+ ] }),
1291
+ error && /* @__PURE__ */ jsxs6("div", { "data-ai-error": true, role: "alert", children: [
1292
+ /* @__PURE__ */ jsx6("span", { children: error }),
1293
+ /* @__PURE__ */ jsx6("button", { onClick: clearError, children: "\xD7" })
1294
+ ] }),
1295
+ images.length > 0 && /* @__PURE__ */ jsx6("div", { "data-ai-image-gallery": true, children: images.map((img, i) => /* @__PURE__ */ jsxs6("div", { "data-ai-image-card": true, children: [
1296
+ /* @__PURE__ */ jsx6("img", { src: img.url, alt: img.prompt, "data-ai-generated-image": true }),
1297
+ /* @__PURE__ */ jsxs6("div", { "data-ai-image-meta": true, children: [
1298
+ /* @__PURE__ */ jsx6("span", { "data-ai-image-model": true, children: img.model }),
1299
+ /* @__PURE__ */ jsx6("span", { "data-ai-image-time": true, children: new Date(img.timestamp).toLocaleTimeString() })
1300
+ ] })
1301
+ ] }, i)) })
1302
+ ] });
1303
+ }
1304
+
1305
+ // src/components/VoiceSelector.tsx
1306
+ import { jsx as jsx7, jsxs as jsxs7 } from "react/jsx-runtime";
1307
+ function VoiceSelector(props) {
1308
+ return /* @__PURE__ */ jsx7("div", { className: props.className, "data-ai-component": "voice-selector", children: props.voices.map((voice) => /* @__PURE__ */ jsxs7(
1309
+ "button",
1310
+ {
1311
+ onClick: () => props.onSelect(voice.id),
1312
+ disabled: props.disabled,
1313
+ "data-ai-voice": true,
1314
+ "data-ai-voice-selected": props.selectedVoice === voice.id ? "" : void 0,
1315
+ "data-ai-provider": voice.provider,
1316
+ children: [
1317
+ /* @__PURE__ */ jsx7("span", { "data-ai-voice-name": true, children: voice.name }),
1318
+ voice.gender && /* @__PURE__ */ jsx7("span", { "data-ai-voice-gender": true, children: voice.gender }),
1319
+ voice.language && /* @__PURE__ */ jsx7("span", { "data-ai-voice-language": true, children: voice.language }),
1320
+ props.showProvider && /* @__PURE__ */ jsx7("span", { "data-ai-voice-provider": true, children: voice.provider }),
1321
+ props.showPreview && voice.preview && /* @__PURE__ */ jsx7(
1322
+ "button",
1323
+ {
1324
+ "data-ai-voice-preview": true,
1325
+ onClick: (e) => {
1326
+ e.stopPropagation();
1327
+ props.onPreview?.(voice.id);
1328
+ },
1329
+ children: "\u25B6"
1330
+ }
1331
+ )
1332
+ ]
1333
+ },
1334
+ voice.id
1335
+ )) });
1336
+ }
1337
+
1338
+ // src/pages/ChatPage.tsx
1339
+ import { useState as useState15 } from "react";
1340
+ import { jsx as jsx8, jsxs as jsxs8 } from "react/jsx-runtime";
1341
+ function ChatPage(props) {
1342
+ const [activeConversationId, setActiveConversationId] = useState15(props.conversationId);
1343
+ return /* @__PURE__ */ jsxs8("div", { className: props.className, "data-ai-page": "chat", children: [
1344
+ /* @__PURE__ */ jsxs8("div", { "data-ai-page-header": true, children: [
1345
+ /* @__PURE__ */ jsx8("h1", { "data-ai-page-title": true, children: "AI Chat" }),
1346
+ /* @__PURE__ */ jsx8(
1347
+ "button",
1348
+ {
1349
+ onClick: () => setActiveConversationId(void 0),
1350
+ "data-ai-new-chat": true,
1351
+ children: "New Chat"
1352
+ }
1353
+ )
1354
+ ] }),
1355
+ /* @__PURE__ */ jsx8(
1356
+ ChatWindow,
1357
+ {
1358
+ ...props,
1359
+ conversationId: activeConversationId,
1360
+ onNewConversation: (id) => {
1361
+ setActiveConversationId(id);
1362
+ props.onNewConversation?.(id);
1363
+ }
1364
+ }
1365
+ )
1366
+ ] });
1367
+ }
1368
+
1369
+ // src/pages/ModelTestPage.tsx
1370
+ import { DEFAULT_MODELS } from "@geenius/ai-shared";
1371
+ import { jsx as jsx9, jsxs as jsxs9 } from "react/jsx-runtime";
1372
+ function ModelTestPage(props) {
1373
+ const models = props.availableModels ?? DEFAULT_MODELS.map((m) => m.id);
1374
+ return /* @__PURE__ */ jsxs9("div", { className: props.className, "data-ai-page": "model-test", children: [
1375
+ /* @__PURE__ */ jsxs9("div", { "data-ai-page-header": true, children: [
1376
+ /* @__PURE__ */ jsx9("h1", { "data-ai-page-title": true, children: props.title ?? "Model Test Lab" }),
1377
+ /* @__PURE__ */ jsx9("p", { "data-ai-page-subtitle": true, children: "Test AI models individually or compare them side-by-side" })
1378
+ ] }),
1379
+ /* @__PURE__ */ jsx9(
1380
+ ModelTestRunner,
1381
+ {
1382
+ ...props,
1383
+ availableModels: models
1384
+ }
1385
+ )
1386
+ ] });
1387
+ }
1388
+
1389
+ // src/pages/AILogsPage.tsx
1390
+ import { useState as useState16 } from "react";
1391
+ import { jsx as jsx10, jsxs as jsxs10 } from "react/jsx-runtime";
1392
+ function AILogsPage(props) {
1393
+ const [modelFilter, setModelFilter] = useState16("");
1394
+ const [statusFilter, setStatusFilter] = useState16("");
1395
+ const { logs, isLoading } = useAILogs({
1396
+ listLogsQuery: props.listLogsQuery,
1397
+ filters: {
1398
+ model: modelFilter || void 0,
1399
+ status: statusFilter || void 0
1400
+ },
1401
+ limit: 100
1402
+ });
1403
+ const totalCalls = logs.length;
1404
+ const successCalls = logs.filter((l) => l.status === "success").length;
1405
+ const errorCalls = totalCalls - successCalls;
1406
+ const totalCost = logs.reduce((sum, l) => sum + (l.totalCostUsd ?? 0), 0);
1407
+ const totalTokens = logs.reduce((sum, l) => sum + (l.totalTokens ?? 0), 0);
1408
+ return /* @__PURE__ */ jsxs10("div", { className: props.className, "data-ai-page": "logs", children: [
1409
+ /* @__PURE__ */ jsx10("div", { "data-ai-page-header": true, children: /* @__PURE__ */ jsx10("h1", { "data-ai-page-title": true, children: props.title ?? "AI Logs" }) }),
1410
+ /* @__PURE__ */ jsxs10("div", { "data-ai-stats-grid": true, children: [
1411
+ /* @__PURE__ */ jsxs10("div", { "data-ai-stat": true, children: [
1412
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-label": true, children: "Total Calls" }),
1413
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-value": true, children: totalCalls })
1414
+ ] }),
1415
+ /* @__PURE__ */ jsxs10("div", { "data-ai-stat": true, "data-ai-status": "success", children: [
1416
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-label": true, children: "Success" }),
1417
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-value": true, children: successCalls })
1418
+ ] }),
1419
+ /* @__PURE__ */ jsxs10("div", { "data-ai-stat": true, "data-ai-status": "error", children: [
1420
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-label": true, children: "Errors" }),
1421
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-value": true, children: errorCalls })
1422
+ ] }),
1423
+ /* @__PURE__ */ jsxs10("div", { "data-ai-stat": true, children: [
1424
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-label": true, children: "Total Cost" }),
1425
+ /* @__PURE__ */ jsxs10("span", { "data-ai-stat-value": true, children: [
1426
+ "$",
1427
+ totalCost.toFixed(2)
1428
+ ] })
1429
+ ] }),
1430
+ /* @__PURE__ */ jsxs10("div", { "data-ai-stat": true, children: [
1431
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-label": true, children: "Total Tokens" }),
1432
+ /* @__PURE__ */ jsx10("span", { "data-ai-stat-value": true, children: totalTokens.toLocaleString() })
1433
+ ] })
1434
+ ] }),
1435
+ /* @__PURE__ */ jsxs10("div", { "data-ai-filters": true, children: [
1436
+ /* @__PURE__ */ jsx10(
1437
+ "input",
1438
+ {
1439
+ type: "text",
1440
+ placeholder: "Filter by model\u2026",
1441
+ value: modelFilter,
1442
+ onChange: (e) => setModelFilter(e.target.value),
1443
+ "data-ai-filter-input": true
1444
+ }
1445
+ ),
1446
+ /* @__PURE__ */ jsxs10(
1447
+ "select",
1448
+ {
1449
+ value: statusFilter,
1450
+ onChange: (e) => setStatusFilter(e.target.value),
1451
+ "data-ai-filter-select": true,
1452
+ children: [
1453
+ /* @__PURE__ */ jsx10("option", { value: "", children: "All statuses" }),
1454
+ /* @__PURE__ */ jsx10("option", { value: "success", children: "Success" }),
1455
+ /* @__PURE__ */ jsx10("option", { value: "error", children: "Error" })
1456
+ ]
1457
+ }
1458
+ )
1459
+ ] }),
1460
+ /* @__PURE__ */ jsx10(
1461
+ AILogTable,
1462
+ {
1463
+ logs,
1464
+ isLoading,
1465
+ onRowClick: props.onRowClick
1466
+ }
1467
+ )
1468
+ ] });
1469
+ }
1470
+ export {
1471
+ AILogTable,
1472
+ AILogsPage,
1473
+ ChatPage,
1474
+ ChatWindow,
1475
+ GenerationCard,
1476
+ ImageGenerator,
1477
+ ModelSelector,
1478
+ ModelTestPage,
1479
+ ModelTestRunner,
1480
+ VoiceSelector,
1481
+ useAI,
1482
+ useAILogs,
1483
+ useAIModels,
1484
+ useChat,
1485
+ useContentManager,
1486
+ useImageGeneration,
1487
+ useMemory,
1488
+ useModelTest,
1489
+ useRealtimeAudio,
1490
+ useSkills,
1491
+ useTextToSpeech,
1492
+ useTranscription,
1493
+ useVideoGeneration
1494
+ };
1495
+ //# sourceMappingURL=index.js.map