@bytexbyte/nxtlinq-ai-agent-sdk 1.6.15 → 1.6.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/nxtlinq-api.d.ts +1 -0
- package/dist/api/nxtlinq-api.d.ts.map +1 -1
- package/dist/api/nxtlinq-api.js +32 -2
- package/dist/components/context/ChatBotContext.d.ts.map +1 -1
- package/dist/components/context/ChatBotContext.js +273 -46
- package/dist/components/types/ChatBotTypes.d.ts +5 -8
- package/dist/components/types/ChatBotTypes.d.ts.map +1 -1
- package/dist/components/types/ChatBotTypes.js +1 -41
- package/dist/components/ui/BerifyMeModal.d.ts +5 -0
- package/dist/components/ui/BerifyMeModal.d.ts.map +1 -1
- package/dist/components/ui/BerifyMeModal.js +7 -5
- package/dist/components/ui/MessageInput.d.ts.map +1 -1
- package/dist/components/ui/MessageInput.js +64 -42
- package/dist/components/ui/MessageList.d.ts.map +1 -1
- package/dist/components/ui/MessageList.js +6 -3
- package/dist/components/ui/ModelSelector.d.ts.map +1 -1
- package/dist/components/ui/ModelSelector.js +16 -2
- package/dist/core/lib/useSpeechToTextFromMic/helper.d.ts +3 -3
- package/dist/core/lib/useSpeechToTextFromMic/helper.d.ts.map +1 -1
- package/dist/core/lib/useSpeechToTextFromMic/helper.js +14 -12
- package/dist/core/lib/useSpeechToTextFromMic/index.d.ts +1 -1
- package/dist/core/lib/useSpeechToTextFromMic/index.d.ts.map +1 -1
- package/dist/core/lib/useSpeechToTextFromMic/index.js +28 -21
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +0 -2
- package/dist/types/ait-api.d.ts +14 -0
- package/dist/types/ait-api.d.ts.map +1 -1
- package/dist/umd-entry.d.ts.map +1 -1
- package/dist/umd-entry.js +1 -23
- package/package.json +1 -1
- package/umd/nxtlinq-ai-agent.umd.js +142 -134
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"nxtlinq-api.d.ts","sourceRoot":"","sources":["../../src/api/nxtlinq-api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;
|
|
1
|
+
{"version":3,"file":"nxtlinq-api.d.ts","sourceRoot":"","sources":["../../src/api/nxtlinq-api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAM1C,eAAO,MAAM,WAAW,GAAI,aAAa,YAAY,GAAG,SAAS,SAQhE,CAAC;AA2YF,eAAO,MAAM,gBAAgB,GAAI,QAAQ,MAAM,EAAE,WAAW,MAAM,KAAG,MAUpE,CAAC"}
|
package/dist/api/nxtlinq-api.js
CHANGED
|
@@ -1,5 +1,16 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
// Will be set by setApiHosts based on environment
|
|
2
|
+
let AI_AGENT_API_HOST = '';
|
|
3
|
+
let AIT_SERVICE_API_HOST = '';
|
|
4
|
+
export const setApiHosts = (environment) => {
|
|
5
|
+
if (environment === 'production') {
|
|
6
|
+
AI_AGENT_API_HOST = 'https://ai-agent.nxtlinq.ai';
|
|
7
|
+
AIT_SERVICE_API_HOST = 'https://ait-service.nxtlinq.ai';
|
|
8
|
+
}
|
|
9
|
+
else {
|
|
10
|
+
AI_AGENT_API_HOST = 'https://ai-agent-staging.nxtlinq.ai';
|
|
11
|
+
AIT_SERVICE_API_HOST = 'https://staging-ait-service.nxtlinq.ai';
|
|
12
|
+
}
|
|
13
|
+
};
|
|
3
14
|
// Helper function to get auth header
|
|
4
15
|
const getAuthHeader = () => {
|
|
5
16
|
const token = localStorage.getItem('nxtlinqAITServiceAccessToken');
|
|
@@ -234,6 +245,25 @@ const createAgentApi = () => ({
|
|
|
234
245
|
console.error('Failed to clone user topic:', error);
|
|
235
246
|
return { error: error instanceof Error ? error.message : 'Failed to clone user topic' };
|
|
236
247
|
}
|
|
248
|
+
},
|
|
249
|
+
getServiceModels: async (params) => {
|
|
250
|
+
try {
|
|
251
|
+
const response = await fetch(`${AI_AGENT_API_HOST}/api/service-models`, {
|
|
252
|
+
method: 'GET',
|
|
253
|
+
headers: {
|
|
254
|
+
'x-api-key': params.apiKey,
|
|
255
|
+
'x-api-secret': params.apiSecret,
|
|
256
|
+
}
|
|
257
|
+
});
|
|
258
|
+
if (!response.ok) {
|
|
259
|
+
throw new Error('Failed to fetch service models');
|
|
260
|
+
}
|
|
261
|
+
return await response.json();
|
|
262
|
+
}
|
|
263
|
+
catch (error) {
|
|
264
|
+
console.error('Failed to fetch service models:', error);
|
|
265
|
+
return { error: error instanceof Error ? error.message : 'Failed to fetch service models' };
|
|
266
|
+
}
|
|
237
267
|
}
|
|
238
268
|
});
|
|
239
269
|
// Permissions API module
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAS/B,OAAO,
|
|
1
|
+
{"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAS/B,OAAO,EAEL,kBAAkB,EAClB,YAAY,EAEb,MAAM,uBAAuB,CAAC;AAM/B,eAAO,MAAM,UAAU,0BAMtB,CAAC;AAEF,eAAO,MAAM,eAAe,EAAE,KAAK,CAAC,EAAE,CAAC,YAAY,CAovElD,CAAC"}
|
|
@@ -3,13 +3,13 @@ import { ethers } from 'ethers';
|
|
|
3
3
|
import stringify from 'fast-json-stable-stringify';
|
|
4
4
|
import * as React from 'react';
|
|
5
5
|
import { v4 as uuidv4 } from 'uuid';
|
|
6
|
-
import { createNxtlinqApi } from '../../api/nxtlinq-api';
|
|
6
|
+
import { createNxtlinqApi, setApiHosts } from '../../api/nxtlinq-api';
|
|
7
7
|
import useLocalStorage from '../../core/lib/useLocalStorage';
|
|
8
8
|
import useSessionStorage from '../../core/lib/useSessionStorage';
|
|
9
9
|
import { useSpeechToTextFromMic } from '../../core/lib/useSpeechToTextFromMic';
|
|
10
10
|
import metakeepClient from '../../core/metakeepClient';
|
|
11
11
|
import { sleep } from '../../core/utils';
|
|
12
|
-
|
|
12
|
+
const MIC_ENABLED_SESSION_KEY = 'chatbot-mic-enabled';
|
|
13
13
|
const ChatBotContext = React.createContext(undefined);
|
|
14
14
|
export const useChatBot = () => {
|
|
15
15
|
const context = React.useContext(ChatBotContext);
|
|
@@ -18,9 +18,9 @@ export const useChatBot = () => {
|
|
|
18
18
|
}
|
|
19
19
|
return context;
|
|
20
20
|
};
|
|
21
|
-
export const ChatBotProvider = ({ onMessage, onError, onToolUse, presetMessages = [], placeholder = 'Type a message...', className = '', maxRetries = 3, retryDelay = 2000, serviceId, apiKey, apiSecret, onVerifyWallet, permissionGroup, children,
|
|
21
|
+
export const ChatBotProvider = ({ onMessage, onError, onToolUse, presetMessages = [], placeholder = 'Type a message...', className = '', maxRetries = 3, retryDelay = 2000, serviceId, apiKey, apiSecret, environment = 'production', onVerifyWallet, permissionGroup, children,
|
|
22
22
|
// AI Model related attributes
|
|
23
|
-
|
|
23
|
+
onModelChange,
|
|
24
24
|
// Storage mode configuration
|
|
25
25
|
useSessionStorage: useSessionStorageMode = false,
|
|
26
26
|
// Wallet verification configuration
|
|
@@ -33,9 +33,11 @@ customUsername,
|
|
|
33
33
|
idvBannerDismissSeconds = 86400,
|
|
34
34
|
// 24 hours in seconds
|
|
35
35
|
isStopRecordingOnSend = false, }) => {
|
|
36
|
+
// Set API hosts immediately based on environment (before any API calls)
|
|
37
|
+
setApiHosts(environment);
|
|
36
38
|
const nxtlinqApi = React.useMemo(() => createNxtlinqApi(apiKey, apiSecret), [apiKey, apiSecret]);
|
|
37
39
|
// Custom hook
|
|
38
|
-
const {
|
|
40
|
+
const { isMicEnabled, transcript, partialTranscript, start: startRecording, stop: stopRecording, clear: clearRecording } = useSpeechToTextFromMic({
|
|
39
41
|
apiKey,
|
|
40
42
|
apiSecret
|
|
41
43
|
});
|
|
@@ -68,6 +70,8 @@ isStopRecordingOnSend = false, }) => {
|
|
|
68
70
|
? useSessionStorage('chatbot-suggestions', presetMessages)
|
|
69
71
|
: React.useState(presetMessages);
|
|
70
72
|
const [isAITEnabling, setIsAITEnabling] = React.useState(false);
|
|
73
|
+
const [isAwaitingMicGesture, setIsAwaitingMicGesture] = React.useState(false);
|
|
74
|
+
const [autoSendEnabled, setAutoSendEnabled] = React.useState(false);
|
|
71
75
|
// Use refs to get latest state values in hasPermission function
|
|
72
76
|
const hitAddressRef = React.useRef(hitAddress);
|
|
73
77
|
const aitRef = React.useRef(ait);
|
|
@@ -75,9 +79,15 @@ isStopRecordingOnSend = false, }) => {
|
|
|
75
79
|
const nxtlinqAITServiceAccessTokenRef = React.useRef(nxtlinqAITServiceAccessToken);
|
|
76
80
|
const signerRef = React.useRef(signer);
|
|
77
81
|
// Refs for input value and recording state
|
|
78
|
-
const
|
|
82
|
+
const isMicEnabledRef = React.useRef(false);
|
|
83
|
+
const isRestoringRecordingRef = React.useRef(false);
|
|
84
|
+
const hasSyncedMicStateRef = React.useRef(false);
|
|
85
|
+
const isReacquiringMicRef = React.useRef(false);
|
|
86
|
+
const pendingMicAutoStartRef = React.useRef(false);
|
|
87
|
+
const autoStartGestureCleanupRef = React.useRef(null);
|
|
79
88
|
const textInputRef = React.useRef(null);
|
|
80
89
|
const lastPartialRangeRef = React.useRef(null);
|
|
90
|
+
const lastAutoSentTranscriptRef = React.useRef('');
|
|
81
91
|
function insertPartial(input, partial, caret) {
|
|
82
92
|
let start = caret;
|
|
83
93
|
let end = caret;
|
|
@@ -159,17 +169,211 @@ isStopRecordingOnSend = false, }) => {
|
|
|
159
169
|
if (!textInputRef.current)
|
|
160
170
|
return;
|
|
161
171
|
const { next, caret } = finalizePartial(inputValue, transcript);
|
|
162
|
-
|
|
172
|
+
const normalizedText = normalizeTranscript(next);
|
|
173
|
+
setInputValue(normalizedText);
|
|
163
174
|
setTimeout(() => {
|
|
164
175
|
if (textInputRef.current) {
|
|
165
176
|
textInputRef.current.selectionStart = caret;
|
|
166
177
|
textInputRef.current.selectionEnd = caret;
|
|
167
178
|
}
|
|
168
179
|
}, 0);
|
|
169
|
-
|
|
180
|
+
// Auto send on speech complete (if user enabled the toggle)
|
|
181
|
+
if (autoSendEnabled && isMicEnabled && normalizedText.trim()) {
|
|
182
|
+
// Avoid duplicate sends by checking if this transcript was already sent
|
|
183
|
+
if (lastAutoSentTranscriptRef.current !== normalizedText.trim()) {
|
|
184
|
+
lastAutoSentTranscriptRef.current = normalizedText.trim();
|
|
185
|
+
// Small delay to ensure input value is updated
|
|
186
|
+
setTimeout(() => {
|
|
187
|
+
if (normalizedText.trim() && !isLoading) {
|
|
188
|
+
sendMessage(normalizedText.trim()).catch(error => {
|
|
189
|
+
console.error('Failed to auto-send message from speech:', error);
|
|
190
|
+
// Reset the ref so user can try again
|
|
191
|
+
lastAutoSentTranscriptRef.current = '';
|
|
192
|
+
});
|
|
193
|
+
}
|
|
194
|
+
}, 100);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
198
|
+
}, [transcript, autoSendEnabled, isMicEnabled, isLoading]);
|
|
199
|
+
React.useEffect(() => {
|
|
200
|
+
isMicEnabledRef.current = isMicEnabled;
|
|
201
|
+
// Reset auto-sent transcript ref when starting new recording
|
|
202
|
+
if (isMicEnabled) {
|
|
203
|
+
lastAutoSentTranscriptRef.current = '';
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
// Reset auto send when mic is disabled
|
|
207
|
+
setAutoSendEnabled(false);
|
|
208
|
+
}
|
|
209
|
+
}, [isMicEnabled]);
|
|
210
|
+
React.useEffect(() => {
|
|
211
|
+
if (typeof window === 'undefined') {
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
if (!useSessionStorageMode) {
|
|
215
|
+
sessionStorage.removeItem(MIC_ENABLED_SESSION_KEY);
|
|
216
|
+
hasSyncedMicStateRef.current = false;
|
|
217
|
+
return;
|
|
218
|
+
}
|
|
219
|
+
if (!hasSyncedMicStateRef.current) {
|
|
220
|
+
return;
|
|
221
|
+
}
|
|
222
|
+
try {
|
|
223
|
+
sessionStorage.setItem(MIC_ENABLED_SESSION_KEY, JSON.stringify(isMicEnabled));
|
|
224
|
+
}
|
|
225
|
+
catch (error) {
|
|
226
|
+
console.warn('Failed to persist recording state to sessionStorage:', error);
|
|
227
|
+
}
|
|
228
|
+
}, [isMicEnabled, useSessionStorageMode]);
|
|
170
229
|
React.useEffect(() => {
|
|
171
|
-
|
|
172
|
-
|
|
230
|
+
if (typeof window === 'undefined') {
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
const cleanupAutoStartListeners = () => {
|
|
234
|
+
if (autoStartGestureCleanupRef.current) {
|
|
235
|
+
autoStartGestureCleanupRef.current();
|
|
236
|
+
autoStartGestureCleanupRef.current = null;
|
|
237
|
+
}
|
|
238
|
+
};
|
|
239
|
+
if (!useSessionStorageMode) {
|
|
240
|
+
cleanupAutoStartListeners();
|
|
241
|
+
pendingMicAutoStartRef.current = false;
|
|
242
|
+
setIsAwaitingMicGesture(false);
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
if (hasSyncedMicStateRef.current || isReacquiringMicRef.current) {
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
const rawStoredValue = sessionStorage.getItem(MIC_ENABLED_SESSION_KEY);
|
|
249
|
+
const shouldRestore = rawStoredValue === 'true';
|
|
250
|
+
if (!shouldRestore || isMicEnabledRef.current || isRestoringRecordingRef.current) {
|
|
251
|
+
hasSyncedMicStateRef.current = true;
|
|
252
|
+
setIsAwaitingMicGesture(false);
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
pendingMicAutoStartRef.current = true;
|
|
256
|
+
function scheduleRetryOnUserGesture() {
|
|
257
|
+
if (autoStartGestureCleanupRef.current)
|
|
258
|
+
return;
|
|
259
|
+
setIsAwaitingMicGesture(true);
|
|
260
|
+
const events = ['pointerdown', 'keydown', 'touchstart'];
|
|
261
|
+
const handler = async () => {
|
|
262
|
+
cleanupAutoStartListeners();
|
|
263
|
+
await attemptAutoStart('user-gesture');
|
|
264
|
+
};
|
|
265
|
+
events.forEach(event => window.addEventListener(event, handler, { once: true }));
|
|
266
|
+
autoStartGestureCleanupRef.current = () => {
|
|
267
|
+
events.forEach(event => window.removeEventListener(event, handler));
|
|
268
|
+
autoStartGestureCleanupRef.current = null;
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
async function attemptAutoStart(reason) {
|
|
272
|
+
isReacquiringMicRef.current = true;
|
|
273
|
+
// For initial restore, show gesture prompt immediately while checking
|
|
274
|
+
// This gives user early feedback and allows them to click anytime
|
|
275
|
+
if (reason === 'initial') {
|
|
276
|
+
scheduleRetryOnUserGesture();
|
|
277
|
+
}
|
|
278
|
+
if (typeof navigator !== 'undefined' && navigator.mediaDevices?.getUserMedia) {
|
|
279
|
+
try {
|
|
280
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
281
|
+
stream.getTracks().forEach(track => track.stop());
|
|
282
|
+
}
|
|
283
|
+
catch (error) {
|
|
284
|
+
console.error('Failed to reacquire microphone stream:', error);
|
|
285
|
+
const errorName = error?.name || '';
|
|
286
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
287
|
+
if (reason === 'initial') {
|
|
288
|
+
const isPermissionPermanentlyDenied = errorName === 'NotAllowedError' &&
|
|
289
|
+
/permission.*denied|blocked|not.*granted/i.test(errorMessage);
|
|
290
|
+
if (isPermissionPermanentlyDenied) {
|
|
291
|
+
// Permission permanently denied, hide the prompt
|
|
292
|
+
pendingMicAutoStartRef.current = false;
|
|
293
|
+
setIsAwaitingMicGesture(false);
|
|
294
|
+
try {
|
|
295
|
+
sessionStorage.setItem(MIC_ENABLED_SESSION_KEY, JSON.stringify(false));
|
|
296
|
+
}
|
|
297
|
+
catch (storageError) {
|
|
298
|
+
console.warn('Failed to reset recording state in sessionStorage:', storageError);
|
|
299
|
+
}
|
|
300
|
+
cleanupAutoStartListeners();
|
|
301
|
+
}
|
|
302
|
+
// Otherwise, keep the prompt showing (already shown above)
|
|
303
|
+
isReacquiringMicRef.current = false;
|
|
304
|
+
return;
|
|
305
|
+
}
|
|
306
|
+
// For user-gesture retry, treat as error
|
|
307
|
+
pendingMicAutoStartRef.current = false;
|
|
308
|
+
setIsAwaitingMicGesture(false);
|
|
309
|
+
try {
|
|
310
|
+
sessionStorage.setItem(MIC_ENABLED_SESSION_KEY, JSON.stringify(false));
|
|
311
|
+
}
|
|
312
|
+
catch (storageError) {
|
|
313
|
+
console.warn('Failed to reset recording state in sessionStorage:', storageError);
|
|
314
|
+
}
|
|
315
|
+
isReacquiringMicRef.current = false;
|
|
316
|
+
hasSyncedMicStateRef.current = true;
|
|
317
|
+
cleanupAutoStartListeners();
|
|
318
|
+
return;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
else if (reason === 'initial') {
|
|
322
|
+
// getUserMedia not available, prompt already shown above
|
|
323
|
+
isReacquiringMicRef.current = false;
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
isRestoringRecordingRef.current = true;
|
|
327
|
+
// Intercept console.warn to detect AudioContext warnings
|
|
328
|
+
let audioContextWarningDetected = false;
|
|
329
|
+
const originalWarn = console.warn;
|
|
330
|
+
console.warn = (...args) => {
|
|
331
|
+
const message = args.join(' ');
|
|
332
|
+
if (message.includes('AudioContext was not allowed to start')) {
|
|
333
|
+
audioContextWarningDetected = true;
|
|
334
|
+
}
|
|
335
|
+
originalWarn.apply(console, args);
|
|
336
|
+
};
|
|
337
|
+
try {
|
|
338
|
+
await startRecording();
|
|
339
|
+
console.warn = originalWarn;
|
|
340
|
+
// Check mic status after a short delay
|
|
341
|
+
await new Promise(resolve => setTimeout(resolve, 300));
|
|
342
|
+
// If mic started successfully, hide the prompt
|
|
343
|
+
if (isMicEnabled) {
|
|
344
|
+
pendingMicAutoStartRef.current = false;
|
|
345
|
+
setIsAwaitingMicGesture(false);
|
|
346
|
+
cleanupAutoStartListeners();
|
|
347
|
+
}
|
|
348
|
+
else if (reason === 'initial') {
|
|
349
|
+
// Mic didn't start, keep the prompt showing (already shown earlier)
|
|
350
|
+
isRestoringRecordingRef.current = false;
|
|
351
|
+
return;
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
catch (error) {
|
|
355
|
+
console.error('startRecording threw an error:', error);
|
|
356
|
+
try {
|
|
357
|
+
sessionStorage.setItem(MIC_ENABLED_SESSION_KEY, JSON.stringify(false));
|
|
358
|
+
}
|
|
359
|
+
catch (storageError) {
|
|
360
|
+
console.warn('Failed to reset recording state in sessionStorage:', storageError);
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
finally {
|
|
364
|
+
console.warn = originalWarn;
|
|
365
|
+
isRestoringRecordingRef.current = false;
|
|
366
|
+
isReacquiringMicRef.current = false;
|
|
367
|
+
hasSyncedMicStateRef.current = true;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
attemptAutoStart('initial');
|
|
371
|
+
return () => {
|
|
372
|
+
cleanupAutoStartListeners();
|
|
373
|
+
pendingMicAutoStartRef.current = false;
|
|
374
|
+
setIsAwaitingMicGesture(false);
|
|
375
|
+
};
|
|
376
|
+
}, [useSessionStorageMode, startRecording]);
|
|
173
377
|
const [notification, setNotification] = React.useState({
|
|
174
378
|
show: false,
|
|
175
379
|
type: 'info',
|
|
@@ -178,26 +382,48 @@ isStopRecordingOnSend = false, }) => {
|
|
|
178
382
|
duration: 5000
|
|
179
383
|
});
|
|
180
384
|
// AI Model related state
|
|
181
|
-
const [
|
|
182
|
-
const [
|
|
183
|
-
//
|
|
385
|
+
const [modelsFromApi, setModelsFromApi] = React.useState([]);
|
|
386
|
+
const [selectedModelIndex, setSelectedModelIndex] = useLocalStorage('selectedAIModelIndex', 0);
|
|
387
|
+
// Use models from API only (no default fallback)
|
|
388
|
+
// This ensures SDK always uses Dashboard-configured models
|
|
389
|
+
const effectiveAvailableModels = React.useMemo(() => {
|
|
390
|
+
return modelsFromApi;
|
|
391
|
+
}, [modelsFromApi]);
|
|
392
|
+
// Auto determine if selector should be shown based on number of models
|
|
393
|
+
// Show selector only when there are 2 or more models
|
|
394
|
+
const showModelSelector = React.useMemo(() => {
|
|
395
|
+
return effectiveAvailableModels.length >= 2;
|
|
396
|
+
}, [effectiveAvailableModels]);
|
|
397
|
+
// Fetch available models from API on mount
|
|
398
|
+
React.useEffect(() => {
|
|
399
|
+
const fetchModels = async () => {
|
|
400
|
+
try {
|
|
401
|
+
const result = await nxtlinqApi.agent.getServiceModels({ apiKey, apiSecret });
|
|
402
|
+
if ('availableModels' in result && Array.isArray(result.availableModels)) {
|
|
403
|
+
setModelsFromApi(result.availableModels);
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
catch (error) {
|
|
407
|
+
console.error('Failed to fetch available models from API:', error);
|
|
408
|
+
// Silently fail and use default models
|
|
409
|
+
}
|
|
410
|
+
};
|
|
411
|
+
fetchModels();
|
|
412
|
+
}, [apiKey, apiSecret, nxtlinqApi]);
|
|
413
|
+
// Validate selectedModelIndex against effectiveAvailableModels
|
|
184
414
|
React.useEffect(() => {
|
|
185
415
|
// Check if selectedModelIndex is out of bounds
|
|
186
|
-
if (selectedModelIndex >=
|
|
187
|
-
setSelectedModelIndex(
|
|
416
|
+
if (selectedModelIndex >= effectiveAvailableModels.length) {
|
|
417
|
+
setSelectedModelIndex(0);
|
|
188
418
|
return;
|
|
189
419
|
}
|
|
190
|
-
// Check if the selected model still exists in
|
|
191
|
-
const selectedModel =
|
|
420
|
+
// Check if the selected model still exists in effectiveAvailableModels
|
|
421
|
+
const selectedModel = effectiveAvailableModels[selectedModelIndex];
|
|
192
422
|
if (!selectedModel) {
|
|
193
|
-
setSelectedModelIndex(defaultModelIndex);
|
|
194
|
-
return;
|
|
195
|
-
}
|
|
196
|
-
// If defaultModelIndex is also out of bounds or invalid, use 0
|
|
197
|
-
if (defaultModelIndex >= availableModels.length || !availableModels[defaultModelIndex]) {
|
|
198
423
|
setSelectedModelIndex(0);
|
|
424
|
+
return;
|
|
199
425
|
}
|
|
200
|
-
}, [
|
|
426
|
+
}, [effectiveAvailableModels, selectedModelIndex, setSelectedModelIndex]);
|
|
201
427
|
// Notification functions
|
|
202
428
|
const showNotification = (type, message, duration = 5000) => {
|
|
203
429
|
setNotification({
|
|
@@ -888,25 +1114,21 @@ isStopRecordingOnSend = false, }) => {
|
|
|
888
1114
|
// AI Model related functions
|
|
889
1115
|
const handleModelChange = React.useCallback((modelIndex) => {
|
|
890
1116
|
setSelectedModelIndex(modelIndex);
|
|
891
|
-
const selectedModel =
|
|
1117
|
+
const selectedModel = effectiveAvailableModels[modelIndex];
|
|
892
1118
|
onModelChange?.(selectedModel);
|
|
893
|
-
}, [
|
|
1119
|
+
}, [effectiveAvailableModels, onModelChange, setSelectedModelIndex]);
|
|
894
1120
|
const getCurrentModel = React.useCallback(() => {
|
|
895
1121
|
// Safety check: ensure selectedModelIndex is within bounds
|
|
896
1122
|
let safeIndex = selectedModelIndex;
|
|
897
|
-
if (selectedModelIndex >=
|
|
898
|
-
safeIndex = defaultModelIndex;
|
|
899
|
-
}
|
|
900
|
-
// If defaultModelIndex is also out of bounds, use 0
|
|
901
|
-
if (safeIndex >= availableModels.length) {
|
|
1123
|
+
if (selectedModelIndex >= effectiveAvailableModels.length) {
|
|
902
1124
|
safeIndex = 0;
|
|
903
1125
|
}
|
|
904
1126
|
// If no models available, return a default model
|
|
905
|
-
if (
|
|
1127
|
+
if (effectiveAvailableModels.length === 0) {
|
|
906
1128
|
return { label: 'Unknown', value: 'unknown' };
|
|
907
1129
|
}
|
|
908
|
-
return
|
|
909
|
-
}, [
|
|
1130
|
+
return effectiveAvailableModels[safeIndex];
|
|
1131
|
+
}, [effectiveAvailableModels, selectedModelIndex]);
|
|
910
1132
|
const updateSuggestions = React.useCallback(async (pseudoId, externalId) => {
|
|
911
1133
|
const result = await nxtlinqApi.agent.generateSuggestions({
|
|
912
1134
|
apiKey,
|
|
@@ -929,6 +1151,8 @@ isStopRecordingOnSend = false, }) => {
|
|
|
929
1151
|
if (!content.trim() || isLoading)
|
|
930
1152
|
return;
|
|
931
1153
|
const currentModel = getCurrentModel();
|
|
1154
|
+
// Initialize with current model, will be updated with actual model from backend response
|
|
1155
|
+
let actualModelUsed = currentModel.value;
|
|
932
1156
|
// Only add user message on first attempt, not on retries
|
|
933
1157
|
// Also check if this is not a preset message (preset messages are added separately)
|
|
934
1158
|
if (retryCount === 0 && !isPresetMessage) {
|
|
@@ -957,6 +1181,8 @@ isStopRecordingOnSend = false, }) => {
|
|
|
957
1181
|
customUserInfo,
|
|
958
1182
|
message: content,
|
|
959
1183
|
});
|
|
1184
|
+
// Get the actual model used from response (may differ due to fallback)
|
|
1185
|
+
actualModelUsed = response.model || currentModel.value;
|
|
960
1186
|
if ('error' in response || response.result === 'error') {
|
|
961
1187
|
// Check if it's an AIT-related error
|
|
962
1188
|
const errorResponse = response;
|
|
@@ -1161,7 +1387,7 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1161
1387
|
role: 'assistant',
|
|
1162
1388
|
timestamp: new Date().toISOString(),
|
|
1163
1389
|
metadata: {
|
|
1164
|
-
model:
|
|
1390
|
+
model: actualModelUsed,
|
|
1165
1391
|
permissions: permissions,
|
|
1166
1392
|
issuedBy: hitAddress || '',
|
|
1167
1393
|
toolUse: toolUse
|
|
@@ -1182,7 +1408,7 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1182
1408
|
role: 'assistant',
|
|
1183
1409
|
timestamp: new Date().toISOString(),
|
|
1184
1410
|
metadata: {
|
|
1185
|
-
model:
|
|
1411
|
+
model: actualModelUsed,
|
|
1186
1412
|
permissions: permissions,
|
|
1187
1413
|
issuedBy: hitAddress || ''
|
|
1188
1414
|
}
|
|
@@ -1198,7 +1424,7 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1198
1424
|
role: 'assistant',
|
|
1199
1425
|
timestamp: new Date().toISOString(),
|
|
1200
1426
|
metadata: {
|
|
1201
|
-
model:
|
|
1427
|
+
model: actualModelUsed,
|
|
1202
1428
|
permissions: permissions,
|
|
1203
1429
|
issuedBy: hitAddress || ''
|
|
1204
1430
|
}
|
|
@@ -1223,7 +1449,7 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1223
1449
|
role: 'assistant',
|
|
1224
1450
|
timestamp: new Date().toISOString(),
|
|
1225
1451
|
metadata: {
|
|
1226
|
-
model:
|
|
1452
|
+
model: actualModelUsed,
|
|
1227
1453
|
permissions: permissions,
|
|
1228
1454
|
issuedBy: hitAddress || '',
|
|
1229
1455
|
isRetry: true
|
|
@@ -1259,15 +1485,17 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1259
1485
|
const handleSubmit = async (e) => {
|
|
1260
1486
|
e.preventDefault();
|
|
1261
1487
|
if (isStopRecordingOnSend) {
|
|
1262
|
-
while (
|
|
1488
|
+
while (isMicEnabledRef.current) {
|
|
1263
1489
|
stopRecording();
|
|
1264
1490
|
await sleep(1000);
|
|
1265
1491
|
}
|
|
1266
1492
|
}
|
|
1267
1493
|
else {
|
|
1268
|
-
if (
|
|
1494
|
+
if (isMicEnabledRef.current) {
|
|
1269
1495
|
await sleep(1000);
|
|
1270
1496
|
clearRecording();
|
|
1497
|
+
// Reset auto-sent transcript ref when clearing recording
|
|
1498
|
+
lastAutoSentTranscriptRef.current = '';
|
|
1271
1499
|
}
|
|
1272
1500
|
}
|
|
1273
1501
|
if (!textInputRef.current)
|
|
@@ -1793,13 +2021,15 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1793
2021
|
isWalletLoading,
|
|
1794
2022
|
isAutoConnecting,
|
|
1795
2023
|
notification,
|
|
1796
|
-
|
|
2024
|
+
isMicEnabled,
|
|
2025
|
+
isAwaitingMicGesture,
|
|
1797
2026
|
transcript,
|
|
1798
2027
|
textInputRef,
|
|
2028
|
+
autoSendEnabled,
|
|
1799
2029
|
// AI Model related state
|
|
1800
|
-
availableModels,
|
|
2030
|
+
availableModels: effectiveAvailableModels,
|
|
1801
2031
|
selectedModelIndex,
|
|
1802
|
-
showModelSelector
|
|
2032
|
+
showModelSelector,
|
|
1803
2033
|
suggestions,
|
|
1804
2034
|
isAITEnabling,
|
|
1805
2035
|
// Actions
|
|
@@ -1813,8 +2043,8 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1813
2043
|
setNotification,
|
|
1814
2044
|
// AI Model related actions
|
|
1815
2045
|
setSelectedModelIndex,
|
|
1816
|
-
setShowModelSelector: setShowModelSelectorState,
|
|
1817
2046
|
setSuggestions,
|
|
2047
|
+
setAutoSendEnabled,
|
|
1818
2048
|
// Functions
|
|
1819
2049
|
connectWallet,
|
|
1820
2050
|
signInWallet,
|
|
@@ -1858,9 +2088,6 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1858
2088
|
apiSecret,
|
|
1859
2089
|
onVerifyWallet,
|
|
1860
2090
|
permissionGroup,
|
|
1861
|
-
availableModels,
|
|
1862
|
-
defaultModelIndex,
|
|
1863
|
-
showModelSelector,
|
|
1864
2091
|
onModelChange,
|
|
1865
2092
|
useSessionStorage: useSessionStorageMode,
|
|
1866
2093
|
requireWalletIDVVerification,
|
|
@@ -29,9 +29,6 @@ export interface AIModel {
|
|
|
29
29
|
label: string;
|
|
30
30
|
value: string;
|
|
31
31
|
}
|
|
32
|
-
export declare const DEFAULT_AI_MODELS: AIModel[];
|
|
33
|
-
export declare const ALL_AVAILABLE_MODELS: AIModel[];
|
|
34
|
-
export declare const AI_MODEL_MAP: Record<string, string>;
|
|
35
32
|
export interface ChatBotProps {
|
|
36
33
|
onMessage?: (message: Message) => void;
|
|
37
34
|
onError?: (error: Error) => void;
|
|
@@ -44,14 +41,12 @@ export interface ChatBotProps {
|
|
|
44
41
|
serviceId: string;
|
|
45
42
|
apiKey: string;
|
|
46
43
|
apiSecret: string;
|
|
44
|
+
environment?: 'production' | 'staging';
|
|
47
45
|
onVerifyWallet?: () => Promise<{
|
|
48
46
|
token: string;
|
|
49
47
|
} | undefined>;
|
|
50
48
|
permissionGroup?: string;
|
|
51
49
|
children?: React.ReactNode;
|
|
52
|
-
availableModels?: AIModel[];
|
|
53
|
-
defaultModelIndex?: number;
|
|
54
|
-
showModelSelector?: boolean;
|
|
55
50
|
onModelChange?: (model: AIModel) => void;
|
|
56
51
|
useSessionStorage?: boolean;
|
|
57
52
|
requireWalletIDVVerification?: boolean;
|
|
@@ -86,9 +81,11 @@ export interface ChatBotContextType {
|
|
|
86
81
|
autoHide?: boolean;
|
|
87
82
|
duration?: number;
|
|
88
83
|
};
|
|
89
|
-
|
|
84
|
+
isMicEnabled: boolean;
|
|
85
|
+
isAwaitingMicGesture: boolean;
|
|
90
86
|
transcript: string;
|
|
91
87
|
textInputRef: React.RefObject<HTMLInputElement>;
|
|
88
|
+
autoSendEnabled: boolean;
|
|
92
89
|
availableModels: AIModel[];
|
|
93
90
|
selectedModelIndex: number;
|
|
94
91
|
showModelSelector: boolean;
|
|
@@ -102,8 +99,8 @@ export interface ChatBotContextType {
|
|
|
102
99
|
setIsWalletLoading: (loading: boolean) => void;
|
|
103
100
|
setNotification: (notification: any) => void;
|
|
104
101
|
setSelectedModelIndex: (index: number) => void;
|
|
105
|
-
setShowModelSelector: (show: boolean) => void;
|
|
106
102
|
setSuggestions: (suggestions: PresetMessage[]) => void;
|
|
103
|
+
setAutoSendEnabled: (enabled: boolean) => void;
|
|
107
104
|
connectWallet: (autoShowSignInMessage?: boolean) => Promise<string | false | undefined>;
|
|
108
105
|
signInWallet: (autoShowSuccessMessage?: boolean) => Promise<void>;
|
|
109
106
|
sendMessage: (content: string, retryCount?: number) => Promise<void>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatBotTypes.d.ts","sourceRoot":"","sources":["../../../src/components/types/ChatBotTypes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B;AAED,MAAM,WAAW,QAAQ;IACvB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAC/B,QAAQ,CAAC,EAAE,QAAQ,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAGD,MAAM,WAAW,OAAO;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;CACf;AAED,
|
|
1
|
+
{"version":3,"file":"ChatBotTypes.d.ts","sourceRoot":"","sources":["../../../src/components/types/ChatBotTypes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B;AAED,MAAM,WAAW,QAAQ;IACvB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAC/B,QAAQ,CAAC,EAAE,QAAQ,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAGD,MAAM,WAAW,OAAO;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,YAAY;IAC3B,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IACvC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IACjC,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IAC1D,cAAc,CAAC,EAAE,aAAa,EAAE,CAAC;IACjC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;IACvC,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;QAC7B,KAAK,EAAE,MAAM,CAAC;KACf,GAAG,SAAS,CAAC,CAAC;IACf,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;IAE3B,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,IAAI,CAAC;IAEzC,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAE5B,4BAA4B,CAAC,EAAE,OAAO,CAAC;IACvC,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAE9B,cAAc,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAErC,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB,uBAAuB,CAAC,EAAE,MAAM,CAAC;IAEjC,YAAY,CAAC,EAAE,UAAU,GAAG,UAAU,CAAC;IACvC,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,kBAAkB;IAEjC,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,OAAO,CAAC;IACnB,MAAM,EAAE,OAAO,CAAC;IAChB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC;IAChB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,oBAAoB,EAAE,iBAAiB,EAAE,CAAC;IAC1C,kBAAkB,EAAE,OAAO,CAAC;IAC5B,oBAAoB,EAAE,OAAO,CAAC;IAC9B,YAAY,EAAE,OAAO,CAAC;IACtB,aAAa,EAAE,OAAO,CAAC;IACvB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,GAAG,CAAC;IAChB,eAAe,EAAE,OAAO,CAAC;IACzB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,YAAY,EAAE;QACZ,IAAI,EAAE,OAAO,CAAC;QACd,IAAI,EAAE,SAAS,GAAG,OAAO,GAAG,SAAS,GAAG,MAAM,CAAC;QAC/C,OAAO,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,CAAC;IACF,YAAY,EAAE,OAAO,CAAC;IACtB,oBAAoB,EAAE,OAAO,CAAC;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,KAAK,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAC;IAChD,eAAe,EAAE,OAAO,CAAC;IAEzB,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,kBAAkB,EAAE,MAAM,CAAC;IAC3B,iBAAiB,EAAE,OAAO,CAAC;IAC3B,WAAW,EAAE,aAAa,EAAE,CAAC;IAG7B,aAAa,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,SAAS,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACnC,qBAAqB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,uBAAuB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACjD,cAAc,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,KAAK,IAAI,CAAC;IAChD,aAAa,EAAE,CAAC,QAAQ,EAAE,OAAO,KAAK,IAAI,CAAC;IAC3C,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,eAAe,EAAE,CAAC,YAAY,EAAE,GAAG,KAAK,IAAI,CAAC;IAE7C,qBAAqB,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAC/C,cAAc,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,IAAI,CAAC;IACvD,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAG/C,aAAa,EAAE,CAAC,qBAAqB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,GAAG,KAAK,GAAG,SAAS,CAAC,CAAC;IACxF,YAAY,EAAE,CAAC,sBAAsB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAClE,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACrE,YAAY,EAAE,CAAC,CAAC,EAAE,KAAK,CAAC,SAAS,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACpD,mBAAmB,EAAE,CAAC,OAAO,EAAE,aAAa,KAAK,IAAI,CAAC;IACtD,eAAe,EAAE,CAAC,cAAc,CAAC,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC9D,SAAS,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC;IAClD,uBAAuB,EAAE,CAAC,MAAM,EAAE,UAAU,GAAG,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC1E,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,QAAQ,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,UAAU,EAAE,CAAC,sBAAsB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAChE,cAAc,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IACpC,aAAa,EAAE,MAAM,IAAI,CAAC;IAC1B,cAAc,EAAE,MAAM,IAAI,CAAC;IAE3B,iBAAiB,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IAChD,eAAe,EAAE,MAAM,OAAO,CAAC;IAG/B,MAAM,EAAE,CAAC,cAAc,CAAC,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACrD,eAAe,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,KAAK,GAAG,SAAS,CAAC,CAAC;IAC3D,QAAQ,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC9B,sBAAsB,EAAE,OAAO,CAAC;IAChC,cAAc,EAAE,CAAC,MAAM,EAAE,UAAU,GAAG,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,MAAM,CAAC;IAGzB,KAAK,EAAE,YAAY,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;CACpB"}
|
|
@@ -1,41 +1 @@
|
|
|
1
|
-
export
|
|
2
|
-
{ label: 'Claude', value: 'claude' },
|
|
3
|
-
{ label: 'ChatGPT', value: 'open-ai' },
|
|
4
|
-
{ label: 'Llama', value: 'llama' },
|
|
5
|
-
{ label: 'Gemini', value: 'gemini' },
|
|
6
|
-
];
|
|
7
|
-
// All available models for reference
|
|
8
|
-
export const ALL_AVAILABLE_MODELS = [
|
|
9
|
-
{ label: 'Claude', value: 'claude' },
|
|
10
|
-
{ label: 'ChatGPT', value: 'open-ai' },
|
|
11
|
-
{ label: 'Llama', value: 'llama' },
|
|
12
|
-
{ label: 'Gemini', value: 'gemini' },
|
|
13
|
-
{ label: 'Nova', value: 'nova' },
|
|
14
|
-
{ label: "Claude-v2", value: "v2/claude" },
|
|
15
|
-
{ label: "ChatGPT-v2", value: "v2/open-ai" },
|
|
16
|
-
{ label: "Llama-v2", value: "v2/llama" },
|
|
17
|
-
{ label: "Gemini-v2", value: "v2/gemini" },
|
|
18
|
-
{ label: "Nova-v2", value: "v2/nova" },
|
|
19
|
-
{ label: "Claude-v2.1", value: "v2.1/claude" },
|
|
20
|
-
{ label: "ChatGPT-v2.1", value: "v2.1/open-ai" },
|
|
21
|
-
{ label: "Llama-v2.1", value: "v2.1/llama" },
|
|
22
|
-
{ label: "Gemini-v2.1", value: "v2.1/gemini" },
|
|
23
|
-
{ label: "Nova-v2.1", value: "v2.1/nova" },
|
|
24
|
-
];
|
|
25
|
-
export const AI_MODEL_MAP = {
|
|
26
|
-
claude: 'Claude',
|
|
27
|
-
'open-ai': 'ChatGPT',
|
|
28
|
-
llama: 'Llama',
|
|
29
|
-
gemini: 'Gemini',
|
|
30
|
-
nova: 'Nova',
|
|
31
|
-
'v2/claude': 'Claude-v2',
|
|
32
|
-
'v2/open-ai': 'ChatGPT-v2',
|
|
33
|
-
'v2/llama': 'Llama-v2',
|
|
34
|
-
'v2/gemini': 'Gemini-v2',
|
|
35
|
-
'v2/nova': 'Nova-v2',
|
|
36
|
-
'v2.1/claude': 'Claude-v2.1',
|
|
37
|
-
'v2.1/open-ai': 'ChatGPT-v2.1',
|
|
38
|
-
'v2.1/llama': 'Llama-v2.1',
|
|
39
|
-
'v2.1/gemini': 'Gemini-v2.1',
|
|
40
|
-
'v2.1/nova': 'Nova-v2.1',
|
|
41
|
-
};
|
|
1
|
+
export {};
|