@bytexbyte/nxtlinq-ai-agent-sdk 1.6.19 → 1.6.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/context/ChatBotContext.d.ts.map +1 -1
- package/dist/components/context/ChatBotContext.js +87 -60
- package/dist/components/types/ChatBotTypes.d.ts +5 -0
- package/dist/components/types/ChatBotTypes.d.ts.map +1 -1
- package/dist/components/ui/BerifyMeModal.d.ts +0 -5
- package/dist/components/ui/BerifyMeModal.d.ts.map +1 -1
- package/dist/components/ui/BerifyMeModal.js +5 -7
- package/dist/components/ui/MessageInput.d.ts.map +1 -1
- package/dist/components/ui/MessageInput.js +11 -2
- package/dist/core/lib/textToSpeech.d.ts +22 -0
- package/dist/core/lib/textToSpeech.d.ts.map +1 -0
- package/dist/core/lib/textToSpeech.js +109 -0
- package/dist/core/utils/walletUtils.d.ts.map +1 -1
- package/dist/core/utils/walletUtils.js +0 -4
- package/package.json +1 -1
- package/umd/nxtlinq-ai-agent.umd.js +173 -155
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;
|
|
1
|
+
{"version":3,"file":"ChatBotContext.d.ts","sourceRoot":"","sources":["../../../src/components/context/ChatBotContext.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAU/B,OAAO,EAEL,kBAAkB,EAClB,YAAY,EAEb,MAAM,uBAAuB,CAAC;AAM/B,eAAO,MAAM,UAAU,0BAMtB,CAAC;AAEF,eAAO,MAAM,eAAe,EAAE,KAAK,CAAC,EAAE,CAAC,YAAY,CA6xElD,CAAC"}
|
|
@@ -7,6 +7,7 @@ import { createNxtlinqApi, setApiHosts } from '../../api/nxtlinq-api';
|
|
|
7
7
|
import useLocalStorage from '../../core/lib/useLocalStorage';
|
|
8
8
|
import useSessionStorage from '../../core/lib/useSessionStorage';
|
|
9
9
|
import { useSpeechToTextFromMic } from '../../core/lib/useSpeechToTextFromMic';
|
|
10
|
+
import textToBuffer, { getDefaultSpeechToken } from '../../core/lib/textToSpeech';
|
|
10
11
|
import metakeepClient from '../../core/metakeepClient';
|
|
11
12
|
import { sleep } from '../../core/utils';
|
|
12
13
|
const MIC_ENABLED_SESSION_KEY = 'chatbot-mic-enabled';
|
|
@@ -72,6 +73,13 @@ isStopRecordingOnSend = false, }) => {
|
|
|
72
73
|
const [isAITEnabling, setIsAITEnabling] = React.useState(false);
|
|
73
74
|
const [isAwaitingMicGesture, setIsAwaitingMicGesture] = React.useState(false);
|
|
74
75
|
const [autoSendEnabled, setAutoSendEnabled] = useLocalStorage('chatbot-auto-send-enabled', true);
|
|
76
|
+
// Speech related state
|
|
77
|
+
const [textToSpeechEnabled, setTextToSpeechEnabled] = useLocalStorage('chatbot-text-to-speech-enabled', false);
|
|
78
|
+
const [speechingIndex, setSpeechingIndex] = React.useState(undefined);
|
|
79
|
+
// Speech related refs
|
|
80
|
+
const audioCtxRef = React.useRef(null);
|
|
81
|
+
const audioSourceRef = React.useRef(null);
|
|
82
|
+
const speechingRef = React.useRef(false);
|
|
75
83
|
// Use refs to get latest state values in hasPermission function
|
|
76
84
|
const hitAddressRef = React.useRef(hitAddress);
|
|
77
85
|
const aitRef = React.useRef(ait);
|
|
@@ -543,10 +551,6 @@ isStopRecordingOnSend = false, }) => {
|
|
|
543
551
|
return true;
|
|
544
552
|
try {
|
|
545
553
|
const payload = JSON.parse(atob(nxtlinqAITServiceAccessToken.split('.')[1]));
|
|
546
|
-
const exp = payload.exp * 1000;
|
|
547
|
-
const now = Date.now();
|
|
548
|
-
if (exp < now)
|
|
549
|
-
return true;
|
|
550
554
|
const address = payload.address;
|
|
551
555
|
if (address !== hitAddress)
|
|
552
556
|
return true;
|
|
@@ -582,17 +586,15 @@ isStopRecordingOnSend = false, }) => {
|
|
|
582
586
|
if (nxtlinqAITServiceAccessToken) {
|
|
583
587
|
try {
|
|
584
588
|
const payload = JSON.parse(atob(nxtlinqAITServiceAccessToken.split('.')[1]));
|
|
585
|
-
const exp = payload.exp * 1000;
|
|
586
|
-
const now = Date.now();
|
|
587
589
|
const address = payload.address;
|
|
588
|
-
// Check if
|
|
589
|
-
if (
|
|
590
|
-
console.log('Token
|
|
590
|
+
// Check if address matches
|
|
591
|
+
if (address !== userAddress) {
|
|
592
|
+
console.log('Token address mismatch, clearing token');
|
|
591
593
|
clearExpiredToken();
|
|
592
594
|
// Don't call refreshAIT with invalid token
|
|
593
595
|
}
|
|
594
596
|
else {
|
|
595
|
-
// Token
|
|
597
|
+
// Token address matches, try to load AIT
|
|
596
598
|
await refreshAIT();
|
|
597
599
|
await sleep(1000);
|
|
598
600
|
}
|
|
@@ -838,56 +840,6 @@ isStopRecordingOnSend = false, }) => {
|
|
|
838
840
|
}
|
|
839
841
|
try {
|
|
840
842
|
const payload = JSON.parse(atob(currentToken.split('.')[1]));
|
|
841
|
-
const exp = payload.exp * 1000;
|
|
842
|
-
const now = Date.now();
|
|
843
|
-
if (exp < now) {
|
|
844
|
-
setNxtlinqAITServiceAccessToken('');
|
|
845
|
-
if (autoRetry) {
|
|
846
|
-
setIsLoading(false); // Stop thinking before showing message
|
|
847
|
-
setMessages(prev => [...prev, {
|
|
848
|
-
id: Date.now().toString(),
|
|
849
|
-
content: 'Your wallet session has expired. Please sign in again.',
|
|
850
|
-
role: 'assistant',
|
|
851
|
-
timestamp: new Date().toISOString(),
|
|
852
|
-
button: 'signIn'
|
|
853
|
-
}]);
|
|
854
|
-
try {
|
|
855
|
-
setIsAutoConnecting(true); // Mark as auto-signing
|
|
856
|
-
await signInWallet(false); // Don't show success message yet
|
|
857
|
-
onAutoSignIn?.(); // Call callback if provided
|
|
858
|
-
setIsAutoConnecting(false); // Clear auto-signing state
|
|
859
|
-
// Show brief success message for auto-sign-in after session expiry
|
|
860
|
-
showSuccess('Auto sign-in successful after session expiry');
|
|
861
|
-
// Ensure AIT is refreshed after sign-in
|
|
862
|
-
await refreshAIT();
|
|
863
|
-
// Wait for AIT to be fully loaded with polling
|
|
864
|
-
let attempts = 0;
|
|
865
|
-
const maxAttempts = 5; // Wait up to 10 seconds (5 * 2000ms)
|
|
866
|
-
while (!aitRef.current && attempts < maxAttempts) {
|
|
867
|
-
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
868
|
-
attempts++;
|
|
869
|
-
}
|
|
870
|
-
// Only continue if AIT is actually loaded
|
|
871
|
-
if (aitRef.current) {
|
|
872
|
-
// Wait a bit more to ensure permissions are also loaded
|
|
873
|
-
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
874
|
-
// If sign-in successful, continue with permission check
|
|
875
|
-
const result = await hasPermission(requiredPermission, false);
|
|
876
|
-
return result;
|
|
877
|
-
}
|
|
878
|
-
else {
|
|
879
|
-
return false;
|
|
880
|
-
}
|
|
881
|
-
}
|
|
882
|
-
catch (error) {
|
|
883
|
-
console.error('Failed to auto-sign-in after session expiry:', error);
|
|
884
|
-
setIsAutoConnecting(false); // Clear auto-signing state on error
|
|
885
|
-
return false;
|
|
886
|
-
}
|
|
887
|
-
}
|
|
888
|
-
// If autoRetry is false, don't show message again, just return false
|
|
889
|
-
return false;
|
|
890
|
-
}
|
|
891
843
|
const address = payload.address;
|
|
892
844
|
if (address !== currentHitAddress) {
|
|
893
845
|
setNxtlinqAITServiceAccessToken('');
|
|
@@ -1441,6 +1393,16 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1441
1393
|
window.location.href = redirectUrl;
|
|
1442
1394
|
}, 100);
|
|
1443
1395
|
}
|
|
1396
|
+
// Auto-play text-to-speech for AI response if enabled
|
|
1397
|
+
if (botResponse && botResponse.role === 'assistant' && textToSpeechEnabled) {
|
|
1398
|
+
// Use setTimeout to ensure the message is added to state first
|
|
1399
|
+
setTimeout(() => {
|
|
1400
|
+
// Play speech without needing exact index (index is mainly for UI indication)
|
|
1401
|
+
playTextToSpeech(botResponse.content).catch(error => {
|
|
1402
|
+
console.error('Failed to play text-to-speech:', error);
|
|
1403
|
+
});
|
|
1404
|
+
}, 100);
|
|
1405
|
+
}
|
|
1444
1406
|
}
|
|
1445
1407
|
catch (error) {
|
|
1446
1408
|
console.error('Failed to send message:', error);
|
|
@@ -1541,6 +1503,64 @@ isStopRecordingOnSend = false, }) => {
|
|
|
1541
1503
|
setMessages(prev => [...prev, errorMessage]);
|
|
1542
1504
|
}
|
|
1543
1505
|
};
|
|
1506
|
+
// Stop text-to-speech
|
|
1507
|
+
const stopTextToSpeech = React.useCallback(() => {
|
|
1508
|
+
if (speechingRef.current && audioSourceRef.current) {
|
|
1509
|
+
speechingRef.current = false;
|
|
1510
|
+
audioSourceRef.current.stop();
|
|
1511
|
+
audioSourceRef.current.disconnect();
|
|
1512
|
+
audioSourceRef.current = null;
|
|
1513
|
+
}
|
|
1514
|
+
setSpeechingIndex(undefined);
|
|
1515
|
+
}, []);
|
|
1516
|
+
// Play text-to-speech
|
|
1517
|
+
const playTextToSpeech = React.useCallback(async (text, messageIndex) => {
|
|
1518
|
+
if (!textToSpeechEnabled || !text.trim()) {
|
|
1519
|
+
return;
|
|
1520
|
+
}
|
|
1521
|
+
// Stop any ongoing speech
|
|
1522
|
+
stopTextToSpeech();
|
|
1523
|
+
try {
|
|
1524
|
+
if (audioCtxRef.current === null) {
|
|
1525
|
+
audioCtxRef.current = new AudioContext();
|
|
1526
|
+
}
|
|
1527
|
+
audioSourceRef.current = audioCtxRef.current.createBufferSource();
|
|
1528
|
+
if (audioSourceRef.current) {
|
|
1529
|
+
const bufferData = await textToBuffer(text, getDefaultSpeechToken);
|
|
1530
|
+
if (!bufferData) {
|
|
1531
|
+
console.error('Failed to get audio buffer');
|
|
1532
|
+
return;
|
|
1533
|
+
}
|
|
1534
|
+
const buffer = await new Promise((resolve, reject) => {
|
|
1535
|
+
if (audioCtxRef.current !== null && bufferData) {
|
|
1536
|
+
audioCtxRef.current.decodeAudioData(bufferData, (decodedBuffer) => resolve(decodedBuffer), (error) => reject(error));
|
|
1537
|
+
}
|
|
1538
|
+
else {
|
|
1539
|
+
reject(new Error('Audio context or buffer data is missing'));
|
|
1540
|
+
}
|
|
1541
|
+
});
|
|
1542
|
+
if (audioSourceRef.current && !audioSourceRef.current.buffer) {
|
|
1543
|
+
audioSourceRef.current.buffer = buffer;
|
|
1544
|
+
audioSourceRef.current.connect(audioCtxRef.current.destination);
|
|
1545
|
+
speechingRef.current = true;
|
|
1546
|
+
if (messageIndex !== undefined) {
|
|
1547
|
+
setSpeechingIndex(messageIndex);
|
|
1548
|
+
}
|
|
1549
|
+
audioSourceRef.current.start();
|
|
1550
|
+
audioSourceRef.current.onended = () => {
|
|
1551
|
+
speechingRef.current = false;
|
|
1552
|
+
setSpeechingIndex(undefined);
|
|
1553
|
+
audioSourceRef.current = null;
|
|
1554
|
+
};
|
|
1555
|
+
}
|
|
1556
|
+
}
|
|
1557
|
+
}
|
|
1558
|
+
catch (error) {
|
|
1559
|
+
console.error('Failed to play text-to-speech:', error);
|
|
1560
|
+
speechingRef.current = false;
|
|
1561
|
+
setSpeechingIndex(undefined);
|
|
1562
|
+
}
|
|
1563
|
+
}, [textToSpeechEnabled, stopTextToSpeech]);
|
|
1544
1564
|
// Handle preset message
|
|
1545
1565
|
const handlePresetMessage = (message) => {
|
|
1546
1566
|
if (message.autoSend) {
|
|
@@ -2029,6 +2049,9 @@ isStopRecordingOnSend = false, }) => {
|
|
|
2029
2049
|
transcript,
|
|
2030
2050
|
textInputRef,
|
|
2031
2051
|
autoSendEnabled,
|
|
2052
|
+
// Speech related state
|
|
2053
|
+
textToSpeechEnabled,
|
|
2054
|
+
speechingIndex,
|
|
2032
2055
|
// AI Model related state
|
|
2033
2056
|
availableModels: effectiveAvailableModels,
|
|
2034
2057
|
selectedModelIndex,
|
|
@@ -2048,6 +2071,7 @@ isStopRecordingOnSend = false, }) => {
|
|
|
2048
2071
|
setSelectedModelIndex,
|
|
2049
2072
|
setSuggestions,
|
|
2050
2073
|
setAutoSendEnabled,
|
|
2074
|
+
setTextToSpeechEnabled,
|
|
2051
2075
|
// Functions
|
|
2052
2076
|
connectWallet,
|
|
2053
2077
|
signInWallet,
|
|
@@ -2068,6 +2092,9 @@ isStopRecordingOnSend = false, }) => {
|
|
|
2068
2092
|
// AI Model related functions
|
|
2069
2093
|
handleModelChange,
|
|
2070
2094
|
getCurrentModel,
|
|
2095
|
+
// Speech related functions
|
|
2096
|
+
playTextToSpeech,
|
|
2097
|
+
stopTextToSpeech,
|
|
2071
2098
|
// Additional properties for PermissionForm
|
|
2072
2099
|
onSave: savePermissions,
|
|
2073
2100
|
onConnectWallet: () => connectWallet(false),
|
|
@@ -86,6 +86,8 @@ export interface ChatBotContextType {
|
|
|
86
86
|
transcript: string;
|
|
87
87
|
textInputRef: React.RefObject<HTMLInputElement>;
|
|
88
88
|
autoSendEnabled: boolean;
|
|
89
|
+
textToSpeechEnabled: boolean;
|
|
90
|
+
speechingIndex: number | undefined;
|
|
89
91
|
availableModels: AIModel[];
|
|
90
92
|
selectedModelIndex: number;
|
|
91
93
|
showModelSelector: boolean;
|
|
@@ -101,6 +103,7 @@ export interface ChatBotContextType {
|
|
|
101
103
|
setSelectedModelIndex: (index: number) => void;
|
|
102
104
|
setSuggestions: (suggestions: PresetMessage[]) => void;
|
|
103
105
|
setAutoSendEnabled: (enabled: boolean) => void;
|
|
106
|
+
setTextToSpeechEnabled: (enabled: boolean) => void;
|
|
104
107
|
connectWallet: (autoShowSignInMessage?: boolean) => Promise<string | false | undefined>;
|
|
105
108
|
signInWallet: (autoShowSuccessMessage?: boolean) => Promise<void>;
|
|
106
109
|
sendMessage: (content: string, retryCount?: number) => Promise<void>;
|
|
@@ -119,6 +122,8 @@ export interface ChatBotContextType {
|
|
|
119
122
|
clearRecording: () => void;
|
|
120
123
|
handleModelChange: (modelIndex: number) => void;
|
|
121
124
|
getCurrentModel: () => AIModel;
|
|
125
|
+
playTextToSpeech: (text: string) => Promise<void>;
|
|
126
|
+
stopTextToSpeech: () => void;
|
|
122
127
|
onSave: (newPermissions?: string[]) => Promise<void>;
|
|
123
128
|
onConnectWallet: () => Promise<string | false | undefined>;
|
|
124
129
|
onSignIn: () => Promise<void>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatBotTypes.d.ts","sourceRoot":"","sources":["../../../src/components/types/ChatBotTypes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B;AAED,MAAM,WAAW,QAAQ;IACvB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAC/B,QAAQ,CAAC,EAAE,QAAQ,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAGD,MAAM,WAAW,OAAO;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;CACf;
|
|
1
|
+
{"version":3,"file":"ChatBotTypes.d.ts","sourceRoot":"","sources":["../../../src/components/types/ChatBotTypes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B;AAED,MAAM,WAAW,QAAQ;IACvB,OAAO,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAC/B,QAAQ,CAAC,EAAE,QAAQ,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAGD,MAAM,WAAW,OAAO;IACtB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;CACf;AACD,MAAM,WAAW,YAAY;IAC3B,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IACvC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IACjC,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IAC1D,cAAc,CAAC,EAAE,aAAa,EAAE,CAAC;IACjC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;IACvC,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;QAC7B,KAAK,EAAE,MAAM,CAAC;KACf,GAAG,SAAS,CAAC,CAAC;IACf,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;IAE3B,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,IAAI,CAAC;IAEzC,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAE5B,4BAA4B,CAAC,EAAE,OAAO,CAAC;IACvC,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAE9B,cAAc,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAErC,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB,uBAAuB,CAAC,EAAE,MAAM,CAAC;IAEjC,YAAY,CAAC,EAAE,UAAU,GAAG,UAAU,CAAC;IACvC,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,WAAW,kBAAkB;IAEjC,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,OAAO,CAAC;IACnB,MAAM,EAAE,OAAO,CAAC;IAChB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,GAAG,EAAE,GAAG,GAAG,IAAI,CAAC;IAChB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,oBAAoB,EAAE,iBAAiB,EAAE,CAAC;IAC1C,kBAAkB,EAAE,OAAO,CAAC;IAC5B,oBAAoB,EAAE,OAAO,CAAC;IAC9B,YAAY,EAAE,OAAO,CAAC;IACtB,aAAa,EAAE,OAAO,CAAC;IACvB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,EAAE,GAAG,CAAC;IAChB,eAAe,EAAE,OAAO,CAAC;IACzB,gBAAgB,EAAE,OAAO,CAAC;IAC1B,YAAY,EAAE;QACZ,IAAI,EAAE,OAAO,CAAC;QACd,IAAI,EAAE,SAAS,GAAG,OAAO,GAAG,SAAS,GAAG,MAAM,CAAC;QAC/C,OAAO,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,CAAC;IACF,YAAY,EAAE,OAAO,CAAC;IACtB,oBAAoB,EAAE,OAAO,CAAC;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,KAAK,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAC;IAChD,eAAe,EAAE,OAAO,CAAC;IAEzB,mBAAmB,EAAE,OAAO,CAAC;IAC7B,cAAc,EAAE,MAAM,GAAG,SAAS,CAAC;IAEnC,eAAe,EAAE,OAAO,EAAE,CAAC;IAC3B,kBAAkB,EAAE,MAAM,CAAC;IAC3B,iBAAiB,EAAE,OAAO,CAAC;IAC3B,WAAW,EAAE,aAAa,EAAE,CAAC;IAG7B,aAAa,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,SAAS,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACnC,qBAAqB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,uBAAuB,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IACjD,cAAc,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,KAAK,IAAI,CAAC;IAChD,aAAa,EAAE,CAAC,QAAQ,EAAE,OAAO,KAAK,IAAI,CAAC;IAC3C,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,eAAe,EAAE,CAAC,YAAY,EAAE,GAAG,KAAK,IAAI,CAAC;IAE7C,qBAAqB,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAC/C,cAAc,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,IAAI,CAAC;IACvD,kBAAkB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/C,sBAAsB,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAC;IAGnD,aAAa,EAAE,CAAC,qBAAqB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,GAAG,KAAK,GAAG,SAAS,CAAC,CAAC;IACxF,YAAY,EAAE,CAAC,sBAAsB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAClE,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACrE,YAAY,EAAE,CAAC,CAAC,EAAE,KAAK,CAAC,SAAS,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACpD,mBAAmB,EAAE,CAAC,OAAO,EAAE,aAAa,KAAK,IAAI,CAAC;IACtD,eAAe,EAAE,CAAC,cAAc,CAAC,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC9D,SAAS,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC;IAClD,uBAAuB,EAAE,CAAC,MAAM,EAAE,UAAU,GAAG,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC1E,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACvC,QAAQ,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,UAAU,EAAE,CAAC,sBAAsB,CAAC,EAAE,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAChE,cAAc,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IACpC,aAAa,EAAE,MAAM,IAAI,CAAC;IAC1B,cAAc,EAAE,MAAM,IAAI,CAAC;IAE3B,iBAAiB,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IAChD,eAAe,EAAE,MAAM,OAAO,CAAC;IAE/B,gBAAgB,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAClD,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAG7B,MAAM,EAAE,CAAC,cAAc,CAAC,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACrD,eAAe,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,KAAK,GAAG,SAAS,CAAC,CAAC;IAC3D,QAAQ,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC9B,sBAAsB,EAAE,OAAO,CAAC;IAChC,cAAc,EAAE,CAAC,MAAM,EAAE,UAAU,GAAG,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACjE,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,MAAM,CAAC;IAGzB,KAAK,EAAE,YAAY,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;CACpB"}
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
/** @jsxImportSource @emotion/react */
|
|
2
1
|
import React from 'react';
|
|
3
2
|
interface BerifyMeModalProps {
|
|
4
3
|
isOpen: boolean;
|
|
@@ -11,10 +10,6 @@ declare global {
|
|
|
11
10
|
interface Window {
|
|
12
11
|
BerifyMeSDK?: {
|
|
13
12
|
modal: any;
|
|
14
|
-
environment: {
|
|
15
|
-
Staging: any;
|
|
16
|
-
Production: any;
|
|
17
|
-
};
|
|
18
13
|
};
|
|
19
14
|
}
|
|
20
15
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"BerifyMeModal.d.ts","sourceRoot":"","sources":["../../../src/components/ui/BerifyMeModal.tsx"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"BerifyMeModal.d.ts","sourceRoot":"","sources":["../../../src/components/ui/BerifyMeModal.tsx"],"names":[],"mappings":"AAEA,OAAO,KAA4B,MAAM,OAAO,CAAC;AAGjD,UAAU,kBAAkB;IAC1B,MAAM,EAAE,OAAO,CAAC;IAChB,OAAO,EAAE,MAAM,IAAI,CAAC;IACpB,sBAAsB,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,MAAM,KAAK,IAAI,CAAC;IAClD,IAAI,CAAC,EAAE,UAAU,GAAG,UAAU,CAAC;CAChC;AASD,eAAO,MAAM,aAAa,EAAE,KAAK,CAAC,EAAE,CAAC,kBAAkB,CAoHtD,CAAC;AAGF,OAAO,CAAC,MAAM,CAAC;IACb,UAAU,MAAM;QACd,WAAW,CAAC,EAAE;YACZ,KAAK,EAAE,GAAG,CAAC;SACZ,CAAC;KACH;CACF"}
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import { jsx as _jsx } from "@emotion/react/jsx-runtime";
|
|
2
2
|
/** @jsxImportSource @emotion/react */
|
|
3
|
-
import React, { useEffect, useRef } from 'react';
|
|
4
3
|
import { css } from '@emotion/react';
|
|
4
|
+
import React, { useEffect, useRef } from 'react';
|
|
5
5
|
import { modalOverlay } from './styles/isolatedStyles';
|
|
6
6
|
// Built-in BerifyMe configuration
|
|
7
7
|
const BUILT_IN_BERIFYME_CONFIG = {
|
|
8
|
-
apiKeyId: '
|
|
9
|
-
secretKey: '
|
|
10
|
-
environment: '
|
|
8
|
+
apiKeyId: 'idv_XjN7vvuQVfHnaUkVQAEhdTuxAsQeKoa9',
|
|
9
|
+
secretKey: 'aaa444b1-087c-4b17-821a-9a6974286905',
|
|
10
|
+
environment: 'idv'
|
|
11
11
|
};
|
|
12
12
|
export const BerifyMeModal = ({ isOpen, onClose, onVerificationComplete, mode = 'built-in' }) => {
|
|
13
13
|
const modalRef = useRef(null);
|
|
@@ -71,14 +71,12 @@ export const BerifyMeModal = ({ isOpen, onClose, onVerificationComplete, mode =
|
|
|
71
71
|
const redirectUrl = `${window.location.origin}${window.location.pathname}?isAutoConnect=true&method=berifyme&returnUrl=${encodeURIComponent(currentUrl)}`;
|
|
72
72
|
// Create BerifyMe modal using built-in config
|
|
73
73
|
const berifyMeModal = window.BerifyMeSDK.modal;
|
|
74
|
-
// Always use staging environment for now
|
|
75
|
-
const environment = window.BerifyMeSDK.environment.Staging;
|
|
76
74
|
// Use React 18's createRoot
|
|
77
75
|
const { createRoot } = require('react-dom/client');
|
|
78
76
|
const root = createRoot(modalRef.current);
|
|
79
77
|
berifyMeModalRef.current = root;
|
|
80
78
|
root.render(React.createElement(berifyMeModal, {
|
|
81
|
-
environment,
|
|
79
|
+
environment: BUILT_IN_BERIFYME_CONFIG.environment,
|
|
82
80
|
apiKeyId: BUILT_IN_BERIFYME_CONFIG.apiKeyId,
|
|
83
81
|
secretKey: BUILT_IN_BERIFYME_CONFIG.secretKey,
|
|
84
82
|
redirectUrl,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MessageInput.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageInput.tsx"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"MessageInput.d.ts","sourceRoot":"","sources":["../../../src/components/ui/MessageInput.tsx"],"names":[],"mappings":"AAQA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAI/B,eAAO,MAAM,YAAY,EAAE,KAAK,CAAC,EA0LhC,CAAC"}
|
|
@@ -4,11 +4,13 @@ import { css } from '@emotion/react';
|
|
|
4
4
|
import MicIcon from '@mui/icons-material/Mic';
|
|
5
5
|
import MicOffIcon from '@mui/icons-material/MicOff';
|
|
6
6
|
import SendIcon from '@mui/icons-material/Send';
|
|
7
|
+
import VolumeUpIcon from '@mui/icons-material/VolumeUp';
|
|
8
|
+
import VolumeOffIcon from '@mui/icons-material/VolumeOff';
|
|
7
9
|
import { IconButton, InputBase, Tooltip } from '@mui/material';
|
|
8
10
|
import { useChatBot } from '../context/ChatBotContext';
|
|
9
11
|
import { actionButton } from './styles/isolatedStyles';
|
|
10
12
|
export const MessageInput = () => {
|
|
11
|
-
const { inputValue, setInputValue, isLoading, isAITLoading, handleSubmit, isMicEnabled, isAwaitingMicGesture, startRecording, stopRecording, textInputRef, autoSendEnabled, setAutoSendEnabled, props: { placeholder = 'Type a message...' } } = useChatBot();
|
|
13
|
+
const { inputValue, setInputValue, isLoading, isAITLoading, handleSubmit, isMicEnabled, isAwaitingMicGesture, startRecording, stopRecording, textInputRef, autoSendEnabled, setAutoSendEnabled, textToSpeechEnabled, setTextToSpeechEnabled, props: { placeholder = 'Type a message...' } } = useChatBot();
|
|
12
14
|
const isDisabled = isLoading || isAITLoading;
|
|
13
15
|
const inputPlaceholder = isAITLoading ? 'Loading wallet configuration...' : placeholder;
|
|
14
16
|
const handleKeyPress = (e) => {
|
|
@@ -39,7 +41,14 @@ export const MessageInput = () => {
|
|
|
39
41
|
border-top: 1px solid #eee !important;
|
|
40
42
|
`, children: [_jsx(InputBase, { value: inputValue, onChange: (e) => setInputValue(e.target.value), onKeyPress: handleKeyPress, placeholder: inputPlaceholder, fullWidth: true, inputProps: {
|
|
41
43
|
ref: textInputRef
|
|
42
|
-
}, endAdornment: _jsxs(_Fragment, { children: [
|
|
44
|
+
}, endAdornment: _jsxs(_Fragment, { children: [_jsx(Tooltip, { title: textToSpeechEnabled ? 'Text-to-speech enabled' : 'Text-to-speech disabled', children: _jsx(IconButton, { size: "small", onClick: (e) => {
|
|
45
|
+
e.stopPropagation();
|
|
46
|
+
setTextToSpeechEnabled(!textToSpeechEnabled);
|
|
47
|
+
}, css: css `
|
|
48
|
+
padding: 4px !important;
|
|
49
|
+
margin-right: 4px !important;
|
|
50
|
+
color: ${textToSpeechEnabled ? '#1976d2' : '#9e9e9e'} !important;
|
|
51
|
+
`, children: textToSpeechEnabled ? _jsx(VolumeUpIcon, { fontSize: "small" }) : _jsx(VolumeOffIcon, { fontSize: "small" }) }) }), isMicEnabled && (_jsx(Tooltip, { title: autoSendEnabled ? 'Auto send enabled' : 'Auto send disabled', children: _jsx(IconButton, { size: "small", onClick: (e) => {
|
|
43
52
|
e.stopPropagation();
|
|
44
53
|
setAutoSendEnabled(!autoSendEnabled);
|
|
45
54
|
}, css: css `
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export interface SpeechTokenResponse {
|
|
2
|
+
authToken: string;
|
|
3
|
+
region: string;
|
|
4
|
+
error?: string;
|
|
5
|
+
}
|
|
6
|
+
export interface TextToSpeechConfig {
|
|
7
|
+
getToken: () => Promise<SpeechTokenResponse>;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Default getSpeechToken function that directly calls Azure token service
|
|
11
|
+
* Uses hardcoded SPEECH_KEY and SPEECH_REGION to get Azure Speech token
|
|
12
|
+
*/
|
|
13
|
+
export declare function getDefaultSpeechToken(): Promise<SpeechTokenResponse>;
|
|
14
|
+
/**
|
|
15
|
+
* Get or refresh Azure Cognitive Services Speech token
|
|
16
|
+
*/
|
|
17
|
+
export declare function getTokenOrRefresh(getToken: () => Promise<SpeechTokenResponse>): Promise<SpeechTokenResponse>;
|
|
18
|
+
/**
|
|
19
|
+
* Convert text to audio buffer using Azure Cognitive Services Speech SDK
|
|
20
|
+
*/
|
|
21
|
+
export default function textToBuffer(text: string, getToken: () => Promise<SpeechTokenResponse>): Promise<ArrayBuffer | undefined>;
|
|
22
|
+
//# sourceMappingURL=textToSpeech.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"textToSpeech.d.ts","sourceRoot":"","sources":["../../../src/core/lib/textToSpeech.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,mBAAmB;IAClC,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,kBAAkB;IACjC,QAAQ,EAAE,MAAM,OAAO,CAAC,mBAAmB,CAAC,CAAC;CAC9C;AAQD;;;GAGG;AACH,wBAAsB,qBAAqB,IAAI,OAAO,CAAC,mBAAmB,CAAC,CAkC1E;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CACrC,QAAQ,EAAE,MAAM,OAAO,CAAC,mBAAmB,CAAC,GAC3C,OAAO,CAAC,mBAAmB,CAAC,CAsC9B;AAED;;GAEG;AACH,wBAA8B,YAAY,CACxC,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,MAAM,OAAO,CAAC,mBAAmB,CAAC,GAC3C,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC,CAmClC"}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { SpeechConfig, SpeechSynthesizer } from 'microsoft-cognitiveservices-speech-sdk';
|
|
2
|
+
import Cookie from 'universal-cookie';
|
|
3
|
+
// Hardcoded Azure Speech credentials
|
|
4
|
+
const SPEECH_KEY = '8b2cec35ebcd4e6e94da56537c5e910c';
|
|
5
|
+
const SPEECH_REGION = 'westcentralus';
|
|
6
|
+
const cookie = new Cookie();
|
|
7
|
+
/**
|
|
8
|
+
* Default getSpeechToken function that directly calls Azure token service
|
|
9
|
+
* Uses hardcoded SPEECH_KEY and SPEECH_REGION to get Azure Speech token
|
|
10
|
+
*/
|
|
11
|
+
export async function getDefaultSpeechToken() {
|
|
12
|
+
try {
|
|
13
|
+
const tokenResponse = await fetch(`https://${SPEECH_REGION}.api.cognitive.microsoft.com/sts/v1.0/issueToken`, {
|
|
14
|
+
method: 'POST',
|
|
15
|
+
headers: {
|
|
16
|
+
'Ocp-Apim-Subscription-Key': SPEECH_KEY,
|
|
17
|
+
},
|
|
18
|
+
});
|
|
19
|
+
if (!tokenResponse.ok) {
|
|
20
|
+
const errorText = await tokenResponse.text();
|
|
21
|
+
return {
|
|
22
|
+
authToken: '',
|
|
23
|
+
region: '',
|
|
24
|
+
error: `Failed to get token: ${errorText}`
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
const token = await tokenResponse.text();
|
|
28
|
+
return {
|
|
29
|
+
authToken: token,
|
|
30
|
+
region: SPEECH_REGION
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
catch (error) {
|
|
34
|
+
console.error('Failed to fetch Azure Speech token:', error);
|
|
35
|
+
return {
|
|
36
|
+
authToken: '',
|
|
37
|
+
region: '',
|
|
38
|
+
error: error instanceof Error ? error.message : 'Failed to fetch token'
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Get or refresh Azure Cognitive Services Speech token
|
|
44
|
+
*/
|
|
45
|
+
export async function getTokenOrRefresh(getToken) {
|
|
46
|
+
const speechToken = cookie.get('speech-token');
|
|
47
|
+
if (speechToken === undefined) {
|
|
48
|
+
try {
|
|
49
|
+
const tokenRes = await getToken();
|
|
50
|
+
if (tokenRes.error) {
|
|
51
|
+
return tokenRes;
|
|
52
|
+
}
|
|
53
|
+
// Cache token in cookie for 9 minutes (540 seconds)
|
|
54
|
+
// Token typically expires in 10 minutes
|
|
55
|
+
cookie.set('speech-token', tokenRes.region + ':' + tokenRes.authToken, {
|
|
56
|
+
maxAge: 540,
|
|
57
|
+
path: '/'
|
|
58
|
+
});
|
|
59
|
+
return tokenRes;
|
|
60
|
+
}
|
|
61
|
+
catch (e) {
|
|
62
|
+
const err = e;
|
|
63
|
+
console.error('Failed to get speech token:', err);
|
|
64
|
+
return {
|
|
65
|
+
authToken: '',
|
|
66
|
+
region: '',
|
|
67
|
+
error: err.response?.data || 'Failed to get speech token'
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
// Token exists in cookie, parse it
|
|
73
|
+
const idx = speechToken.indexOf(':');
|
|
74
|
+
if (idx === -1) {
|
|
75
|
+
// Invalid format, fetch new token
|
|
76
|
+
cookie.remove('speech-token', { path: '/' });
|
|
77
|
+
return getTokenOrRefresh(getToken);
|
|
78
|
+
}
|
|
79
|
+
return {
|
|
80
|
+
authToken: speechToken.slice(idx + 1),
|
|
81
|
+
region: speechToken.slice(0, idx)
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Convert text to audio buffer using Azure Cognitive Services Speech SDK
|
|
87
|
+
*/
|
|
88
|
+
export default async function textToBuffer(text, getToken) {
|
|
89
|
+
const tokenRes = await getTokenOrRefresh(getToken);
|
|
90
|
+
if (tokenRes.error || !tokenRes.authToken || !tokenRes.region) {
|
|
91
|
+
console.error('Failed to get speech token:', tokenRes.error);
|
|
92
|
+
return undefined;
|
|
93
|
+
}
|
|
94
|
+
const speechConfig = SpeechConfig.fromAuthorizationToken(tokenRes.authToken, tokenRes.region);
|
|
95
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
96
|
+
// @ts-ignore
|
|
97
|
+
const synthesizer = new SpeechSynthesizer(speechConfig, null);
|
|
98
|
+
const res = await new Promise((resolve) => {
|
|
99
|
+
synthesizer.speakTextAsync(text, (result) => {
|
|
100
|
+
resolve(result);
|
|
101
|
+
}, (err) => resolve(err));
|
|
102
|
+
});
|
|
103
|
+
synthesizer.close();
|
|
104
|
+
if (typeof res === 'string') {
|
|
105
|
+
console.error('Text-to-speech error:', res);
|
|
106
|
+
return undefined;
|
|
107
|
+
}
|
|
108
|
+
return res.audioData;
|
|
109
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"walletUtils.d.ts","sourceRoot":"","sources":["../../../src/core/utils/walletUtils.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAGhC,MAAM,WAAW,sBAAsB;IACrC,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;CACvB;AAED,eAAO,MAAM,aAAa,QAAa,OAAO,CAAC,sBAAsB,CAkBpE,CAAC;AAEF,eAAO,MAAM,gBAAgB,QAAO,IAEnC,CAAC;AAEF,eAAO,MAAM,sBAAsB,QAAO,MAAM,GAAG,IAElD,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,OAAO,MAAM,EAAE,iBAAiB,MAAM,KAAG,
|
|
1
|
+
{"version":3,"file":"walletUtils.d.ts","sourceRoot":"","sources":["../../../src/core/utils/walletUtils.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAGhC,MAAM,WAAW,sBAAsB;IACrC,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC;CACvB;AAED,eAAO,MAAM,aAAa,QAAa,OAAO,CAAC,sBAAsB,CAkBpE,CAAC;AAEF,eAAO,MAAM,gBAAgB,QAAO,IAEnC,CAAC;AAEF,eAAO,MAAM,sBAAsB,QAAO,MAAM,GAAG,IAElD,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,OAAO,MAAM,EAAE,iBAAiB,MAAM,KAAG,OAStE,CAAC"}
|
|
@@ -24,10 +24,6 @@ export const getStoredWalletAddress = () => {
|
|
|
24
24
|
export const validateToken = (token, expectedAddress) => {
|
|
25
25
|
try {
|
|
26
26
|
const payload = JSON.parse(atob(token.split('.')[1]));
|
|
27
|
-
const exp = payload.exp * 1000;
|
|
28
|
-
const now = Date.now();
|
|
29
|
-
if (exp < now)
|
|
30
|
-
return false;
|
|
31
27
|
const address = payload.address;
|
|
32
28
|
return address === expectedAddress;
|
|
33
29
|
}
|
package/package.json
CHANGED