rampup 0.1.3 ā 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/auth.js +6 -5
- package/index.js +414 -109
- package/package.json +5 -2
package/auth.js
CHANGED
|
@@ -141,7 +141,7 @@ export async function getUserInfo() {
|
|
|
141
141
|
};
|
|
142
142
|
}
|
|
143
143
|
|
|
144
|
-
const API_BASE = process.env.RAMP_API_URL || 'https://
|
|
144
|
+
const API_BASE = process.env.RAMP_API_URL || 'https://ramp-api-946191982468.us-central1.run.app';
|
|
145
145
|
|
|
146
146
|
/**
|
|
147
147
|
* Login via browser OAuth flow with polling
|
|
@@ -149,7 +149,7 @@ const API_BASE = process.env.RAMP_API_URL || 'https://app.rampup.dev';
|
|
|
149
149
|
*/
|
|
150
150
|
export async function loginWithBrowser() {
|
|
151
151
|
// Create a login session
|
|
152
|
-
const sessionResponse = await fetch(`${API_BASE}/api/
|
|
152
|
+
const sessionResponse = await fetch(`${API_BASE}/api/ramp/auth/session`, {
|
|
153
153
|
method: 'POST',
|
|
154
154
|
headers: { 'Content-Type': 'application/json' },
|
|
155
155
|
});
|
|
@@ -160,8 +160,9 @@ export async function loginWithBrowser() {
|
|
|
160
160
|
|
|
161
161
|
const { sessionId } = await sessionResponse.json();
|
|
162
162
|
|
|
163
|
-
// Open browser to login page with session ID
|
|
164
|
-
const
|
|
163
|
+
// Open browser to login page with session ID (always use web app URL)
|
|
164
|
+
const WEB_APP_URL = 'https://app.rampup.dev';
|
|
165
|
+
const loginUrl = `${WEB_APP_URL}/cli-login?session=${sessionId}`;
|
|
165
166
|
|
|
166
167
|
console.log(`\nOpening browser for authentication...`);
|
|
167
168
|
console.log(`If browser doesn't open, visit: ${loginUrl}\n`);
|
|
@@ -182,7 +183,7 @@ export async function loginWithBrowser() {
|
|
|
182
183
|
attempts++;
|
|
183
184
|
|
|
184
185
|
try {
|
|
185
|
-
const pollResponse = await fetch(`${API_BASE}/api/
|
|
186
|
+
const pollResponse = await fetch(`${API_BASE}/api/ramp/auth/session/${sessionId}`);
|
|
186
187
|
|
|
187
188
|
if (pollResponse.status === 410) {
|
|
188
189
|
throw new Error('Login session expired');
|
package/index.js
CHANGED
|
@@ -704,6 +704,7 @@ program
|
|
|
704
704
|
.command('voice')
|
|
705
705
|
.description('Voice-based codebase learning (talk to your code)')
|
|
706
706
|
.option('-p, --path <path>', 'Project path', '.')
|
|
707
|
+
.option('-t, --text', 'Use text input instead of microphone')
|
|
707
708
|
.action(async (options) => {
|
|
708
709
|
console.log(chalk.bold.blue('\nšļø Voice Mode\n'));
|
|
709
710
|
console.log(chalk.gray('Talk to your codebase. Say "exit" or press Ctrl+C to quit.\n'));
|
|
@@ -738,7 +739,7 @@ program
|
|
|
738
739
|
// Get fresh token after potential login
|
|
739
740
|
const authToken = await getIdToken();
|
|
740
741
|
|
|
741
|
-
const
|
|
742
|
+
const RAMP_API_URL = process.env.RAMP_API_URL || 'https://ramp-api-946191982468.us-central1.run.app';
|
|
742
743
|
|
|
743
744
|
// Track usage
|
|
744
745
|
const usageFile = path.join(process.env.HOME, '.ramp', 'voice-usage.json');
|
|
@@ -749,7 +750,6 @@ program
|
|
|
749
750
|
} catch {}
|
|
750
751
|
|
|
751
752
|
const sessionStart = Date.now();
|
|
752
|
-
let sessionMinutes = 0;
|
|
753
753
|
|
|
754
754
|
// Gather codebase context once
|
|
755
755
|
const spinner = ora('Reading codebase...').start();
|
|
@@ -781,85 +781,389 @@ program
|
|
|
781
781
|
await getStructure(projectPath);
|
|
782
782
|
context += `\nStructure:\n${structure.slice(0, 2000)}\n`;
|
|
783
783
|
|
|
784
|
-
spinner.succeed('
|
|
784
|
+
spinner.succeed('Codebase loaded');
|
|
785
785
|
} catch (error) {
|
|
786
786
|
spinner.fail(`Error: ${error.message}`);
|
|
787
787
|
process.exit(1);
|
|
788
788
|
}
|
|
789
789
|
|
|
790
|
-
|
|
790
|
+
// Check for text-only mode
|
|
791
|
+
if (options.text) {
|
|
792
|
+
await runTextVoiceMode(authToken, context, projectPath, usage, usageFile, sessionStart, RAMP_API_URL);
|
|
793
|
+
return;
|
|
794
|
+
}
|
|
791
795
|
|
|
792
|
-
//
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
796
|
+
// Try to use realtime voice with microphone
|
|
797
|
+
try {
|
|
798
|
+
await runRealtimeVoiceMode(authToken, context, projectPath, usage, usageFile, sessionStart, RAMP_API_URL);
|
|
799
|
+
} catch (micError) {
|
|
800
|
+
console.log(chalk.yellow(`\nā ļø Microphone not available: ${micError.message}`));
|
|
801
|
+
console.log(chalk.dim('Falling back to text input mode...\n'));
|
|
802
|
+
await runTextVoiceMode(authToken, context, projectPath, usage, usageFile, sessionStart, RAMP_API_URL);
|
|
803
|
+
}
|
|
804
|
+
});
|
|
805
|
+
|
|
806
|
+
// Realtime voice mode using OpenAI Realtime API
|
|
807
|
+
async function runRealtimeVoiceMode(authToken, context, projectPath, usage, usageFile, sessionStart, RAMP_API_URL) {
|
|
808
|
+
const WebSocket = (await import('ws')).default;
|
|
809
|
+
let mic;
|
|
810
|
+
try {
|
|
811
|
+
mic = (await import('mic')).default;
|
|
812
|
+
} catch (e) {
|
|
813
|
+
throw new Error('mic package not available - run: npm install -g rampup');
|
|
814
|
+
}
|
|
815
|
+
|
|
816
|
+
console.log(chalk.cyan('Connecting to voice service...\n'));
|
|
817
|
+
|
|
818
|
+
// Get ephemeral token from our API
|
|
819
|
+
const sessionResponse = await fetch(`${RAMP_API_URL}/api/ramp/realtime/session`, {
|
|
820
|
+
method: 'POST',
|
|
821
|
+
headers: {
|
|
822
|
+
'Authorization': `Bearer ${authToken}`,
|
|
823
|
+
'Content-Type': 'application/json',
|
|
824
|
+
},
|
|
825
|
+
body: JSON.stringify({
|
|
826
|
+
model: 'gpt-4o-realtime-preview-2024-12-17',
|
|
827
|
+
voice: 'verse',
|
|
828
|
+
}),
|
|
829
|
+
});
|
|
830
|
+
|
|
831
|
+
if (!sessionResponse.ok) {
|
|
832
|
+
const error = await sessionResponse.json().catch(() => ({}));
|
|
833
|
+
throw new Error(error.message || error.error || 'Failed to create voice session');
|
|
834
|
+
}
|
|
835
|
+
|
|
836
|
+
const session = await sessionResponse.json();
|
|
837
|
+
const { clientSecret, sessionId } = session;
|
|
838
|
+
|
|
839
|
+
// Connect to OpenAI Realtime API
|
|
840
|
+
const ws = new WebSocket('wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview-2024-12-17', {
|
|
841
|
+
headers: {
|
|
842
|
+
'Authorization': `Bearer ${clientSecret}`,
|
|
843
|
+
'OpenAI-Beta': 'realtime=v1',
|
|
844
|
+
},
|
|
845
|
+
});
|
|
846
|
+
|
|
847
|
+
let isConnected = false;
|
|
848
|
+
let micInstance = null;
|
|
849
|
+
let micInputStream = null;
|
|
850
|
+
let audioChunks = [];
|
|
851
|
+
let isListening = false;
|
|
852
|
+
let sessionDurationSeconds = 0;
|
|
853
|
+
const sessionTimer = setInterval(() => sessionDurationSeconds++, 1000);
|
|
854
|
+
|
|
855
|
+
// Handle WebSocket events
|
|
856
|
+
ws.on('open', () => {
|
|
857
|
+
isConnected = true;
|
|
858
|
+
console.log(chalk.green('ā Connected to voice service'));
|
|
859
|
+
|
|
860
|
+
// Configure the session with codebase context
|
|
861
|
+
ws.send(JSON.stringify({
|
|
862
|
+
type: 'session.update',
|
|
863
|
+
session: {
|
|
864
|
+
modalities: ['text', 'audio'],
|
|
865
|
+
instructions: `You are Ramp, a helpful voice assistant for developers exploring codebases.
|
|
866
|
+
Keep responses concise (1-3 sentences) since they'll be spoken aloud.
|
|
867
|
+
|
|
868
|
+
Project context:
|
|
869
|
+
${context}
|
|
870
|
+
|
|
871
|
+
Be friendly, practical, and reference specific files when relevant. If asked about code structure, explain it clearly.`,
|
|
872
|
+
voice: 'verse',
|
|
873
|
+
input_audio_format: 'pcm16',
|
|
874
|
+
output_audio_format: 'pcm16',
|
|
875
|
+
input_audio_transcription: { model: 'whisper-1' },
|
|
876
|
+
turn_detection: {
|
|
877
|
+
type: 'server_vad',
|
|
878
|
+
threshold: 0.5,
|
|
879
|
+
prefix_padding_ms: 300,
|
|
880
|
+
silence_duration_ms: 500,
|
|
799
881
|
},
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
882
|
+
},
|
|
883
|
+
}));
|
|
884
|
+
|
|
885
|
+
// Start microphone
|
|
886
|
+
startMicrophone();
|
|
887
|
+
});
|
|
888
|
+
|
|
889
|
+
ws.on('message', async (data) => {
|
|
890
|
+
try {
|
|
891
|
+
const event = JSON.parse(data.toString());
|
|
892
|
+
|
|
893
|
+
switch (event.type) {
|
|
894
|
+
case 'session.created':
|
|
895
|
+
case 'session.updated':
|
|
896
|
+
console.log(chalk.green('ā Session ready - speak now!\n'));
|
|
897
|
+
break;
|
|
898
|
+
|
|
899
|
+
case 'input_audio_buffer.speech_started':
|
|
900
|
+
process.stdout.write(chalk.dim('š¤ Listening... '));
|
|
901
|
+
break;
|
|
902
|
+
|
|
903
|
+
case 'input_audio_buffer.speech_stopped':
|
|
904
|
+
console.log(chalk.dim('processing...'));
|
|
905
|
+
break;
|
|
906
|
+
|
|
907
|
+
case 'conversation.item.input_audio_transcription.completed':
|
|
908
|
+
if (event.transcript) {
|
|
909
|
+
console.log(chalk.green(`\nYou: ${event.transcript}`));
|
|
910
|
+
if (event.transcript.toLowerCase().includes('exit') ||
|
|
911
|
+
event.transcript.toLowerCase().includes('quit') ||
|
|
912
|
+
event.transcript.toLowerCase().includes('goodbye')) {
|
|
913
|
+
cleanup();
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
break;
|
|
917
|
+
|
|
918
|
+
case 'response.audio.delta':
|
|
919
|
+
// Collect audio chunks
|
|
920
|
+
if (event.delta) {
|
|
921
|
+
audioChunks.push(Buffer.from(event.delta, 'base64'));
|
|
922
|
+
}
|
|
923
|
+
break;
|
|
924
|
+
|
|
925
|
+
case 'response.audio_transcript.delta':
|
|
926
|
+
// Stream transcript to console
|
|
927
|
+
if (event.delta) {
|
|
928
|
+
process.stdout.write(chalk.cyan(event.delta));
|
|
929
|
+
}
|
|
930
|
+
break;
|
|
931
|
+
|
|
932
|
+
case 'response.audio_transcript.done':
|
|
933
|
+
console.log('\n');
|
|
934
|
+
break;
|
|
935
|
+
|
|
936
|
+
case 'response.audio.done':
|
|
937
|
+
// Play collected audio
|
|
938
|
+
if (audioChunks.length > 0) {
|
|
939
|
+
await playAudioChunks(audioChunks);
|
|
940
|
+
audioChunks = [];
|
|
941
|
+
}
|
|
942
|
+
break;
|
|
943
|
+
|
|
944
|
+
case 'response.done':
|
|
945
|
+
// Response complete, ready for next input
|
|
946
|
+
break;
|
|
947
|
+
|
|
948
|
+
case 'error':
|
|
949
|
+
console.error(chalk.red(`\nError: ${event.error?.message || 'Unknown error'}`));
|
|
950
|
+
break;
|
|
951
|
+
}
|
|
952
|
+
} catch (e) {
|
|
953
|
+
// Ignore parse errors
|
|
954
|
+
}
|
|
955
|
+
});
|
|
956
|
+
|
|
957
|
+
ws.on('error', (error) => {
|
|
958
|
+
console.error(chalk.red(`\nConnection error: ${error.message}`));
|
|
959
|
+
cleanup();
|
|
960
|
+
});
|
|
961
|
+
|
|
962
|
+
ws.on('close', () => {
|
|
963
|
+
if (isConnected) {
|
|
964
|
+
console.log(chalk.dim('\nConnection closed'));
|
|
965
|
+
cleanup();
|
|
966
|
+
}
|
|
967
|
+
});
|
|
968
|
+
|
|
969
|
+
function startMicrophone() {
|
|
970
|
+
try {
|
|
971
|
+
micInstance = mic({
|
|
972
|
+
rate: '24000',
|
|
973
|
+
channels: '1',
|
|
974
|
+
bitwidth: '16',
|
|
975
|
+
encoding: 'signed-integer',
|
|
976
|
+
endian: 'little',
|
|
977
|
+
device: 'default',
|
|
978
|
+
debug: false,
|
|
806
979
|
});
|
|
807
980
|
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
981
|
+
micInputStream = micInstance.getAudioStream();
|
|
982
|
+
|
|
983
|
+
micInputStream.on('data', (chunk) => {
|
|
984
|
+
if (isConnected && ws.readyState === WebSocket.OPEN) {
|
|
985
|
+
// Send audio to OpenAI
|
|
986
|
+
ws.send(JSON.stringify({
|
|
987
|
+
type: 'input_audio_buffer.append',
|
|
988
|
+
audio: chunk.toString('base64'),
|
|
989
|
+
}));
|
|
990
|
+
}
|
|
991
|
+
});
|
|
992
|
+
|
|
993
|
+
micInputStream.on('error', (err) => {
|
|
994
|
+
console.error(chalk.red(`Microphone error: ${err.message}`));
|
|
995
|
+
});
|
|
996
|
+
|
|
997
|
+
micInstance.start();
|
|
998
|
+
isListening = true;
|
|
999
|
+
} catch (err) {
|
|
1000
|
+
throw new Error(`Failed to start microphone: ${err.message}`);
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
async function playAudioChunks(chunks) {
|
|
1005
|
+
try {
|
|
1006
|
+
// Combine all chunks into one buffer
|
|
1007
|
+
const audioBuffer = Buffer.concat(chunks);
|
|
1008
|
+
|
|
1009
|
+
// Save as raw PCM and convert to playable format
|
|
1010
|
+
const rawPath = `/tmp/ramp-voice-${Date.now()}.raw`;
|
|
1011
|
+
const wavPath = `/tmp/ramp-voice-${Date.now()}.wav`;
|
|
1012
|
+
|
|
1013
|
+
await fs.writeFile(rawPath, audioBuffer);
|
|
1014
|
+
|
|
1015
|
+
// Convert raw PCM to WAV using sox or ffmpeg
|
|
1016
|
+
if (process.platform === 'darwin') {
|
|
1017
|
+
try {
|
|
1018
|
+
// Try sox first
|
|
1019
|
+
await execAsync(`sox -r 24000 -c 1 -b 16 -e signed-integer "${rawPath}" "${wavPath}" 2>/dev/null`);
|
|
1020
|
+
await execAsync(`afplay "${wavPath}"`);
|
|
1021
|
+
} catch {
|
|
1022
|
+
// Try ffmpeg as fallback
|
|
1023
|
+
try {
|
|
1024
|
+
await execAsync(`ffmpeg -f s16le -ar 24000 -ac 1 -i "${rawPath}" "${wavPath}" -y 2>/dev/null`);
|
|
1025
|
+
await execAsync(`afplay "${wavPath}"`);
|
|
1026
|
+
} catch {
|
|
1027
|
+
// Just try to play raw with afplay (may not work)
|
|
1028
|
+
}
|
|
1029
|
+
}
|
|
811
1030
|
}
|
|
812
1031
|
|
|
813
|
-
|
|
1032
|
+
// Clean up temp files
|
|
1033
|
+
await fs.unlink(rawPath).catch(() => {});
|
|
1034
|
+
await fs.unlink(wavPath).catch(() => {});
|
|
1035
|
+
} catch (err) {
|
|
1036
|
+
// Silently fail audio playback
|
|
814
1037
|
}
|
|
1038
|
+
}
|
|
1039
|
+
|
|
1040
|
+
async function cleanup() {
|
|
1041
|
+
clearInterval(sessionTimer);
|
|
815
1042
|
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
1043
|
+
if (micInstance) {
|
|
1044
|
+
try {
|
|
1045
|
+
micInstance.stop();
|
|
1046
|
+
} catch {}
|
|
1047
|
+
}
|
|
1048
|
+
|
|
1049
|
+
if (ws.readyState === WebSocket.OPEN) {
|
|
1050
|
+
ws.close();
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
// Report session end to our API
|
|
1054
|
+
try {
|
|
1055
|
+
await fetch(`${RAMP_API_URL}/api/ramp/realtime/session/${sessionId}/end`, {
|
|
819
1056
|
method: 'POST',
|
|
820
1057
|
headers: {
|
|
821
1058
|
'Authorization': `Bearer ${authToken}`,
|
|
822
1059
|
'Content-Type': 'application/json',
|
|
823
1060
|
},
|
|
824
|
-
body: JSON.stringify({
|
|
825
|
-
product: 'ramp',
|
|
826
|
-
text,
|
|
827
|
-
voice: 'nova',
|
|
828
|
-
}),
|
|
1061
|
+
body: JSON.stringify({ durationSeconds: sessionDurationSeconds }),
|
|
829
1062
|
});
|
|
1063
|
+
} catch {}
|
|
830
1064
|
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
1065
|
+
// Save usage
|
|
1066
|
+
const totalSessionMinutes = sessionDurationSeconds / 60;
|
|
1067
|
+
usage.totalMinutes += totalSessionMinutes;
|
|
1068
|
+
usage.sessions.push({
|
|
1069
|
+
date: new Date().toISOString(),
|
|
1070
|
+
project: path.basename(projectPath),
|
|
1071
|
+
minutes: totalSessionMinutes,
|
|
1072
|
+
type: 'realtime',
|
|
1073
|
+
});
|
|
1074
|
+
await fs.writeFile(usageFile, JSON.stringify(usage, null, 2));
|
|
1075
|
+
|
|
1076
|
+
console.log(chalk.cyan('\nš Ending voice session...'));
|
|
1077
|
+
console.log(chalk.dim(`Session: ${totalSessionMinutes.toFixed(2)} min`));
|
|
1078
|
+
console.log(chalk.dim(`Total usage: ${usage.totalMinutes.toFixed(2)} min\n`));
|
|
1079
|
+
|
|
1080
|
+
process.exit(0);
|
|
1081
|
+
}
|
|
1082
|
+
|
|
1083
|
+
// Handle Ctrl+C
|
|
1084
|
+
process.on('SIGINT', cleanup);
|
|
1085
|
+
|
|
1086
|
+
// Keep process alive
|
|
1087
|
+
await new Promise(() => {});
|
|
1088
|
+
}
|
|
834
1089
|
|
|
835
|
-
|
|
1090
|
+
// Text input with voice output (fallback mode)
|
|
1091
|
+
async function runTextVoiceMode(authToken, context, projectPath, usage, usageFile, sessionStart, RAMP_API_URL) {
|
|
1092
|
+
const API_URL = process.env.ENTITLEMENT_API_URL || 'https://entitlement-service.rian-19c.workers.dev';
|
|
1093
|
+
const conversationHistory = [];
|
|
1094
|
+
let sessionMinutes = 0;
|
|
1095
|
+
|
|
1096
|
+
console.log(chalk.dim('Using text input with voice output.\n'));
|
|
1097
|
+
|
|
1098
|
+
// Helper function to call backend chat API
|
|
1099
|
+
async function chatWithBackend(messages, systemPrompt) {
|
|
1100
|
+
const response = await fetch(`${API_URL}/ai/chat`, {
|
|
1101
|
+
method: 'POST',
|
|
1102
|
+
headers: {
|
|
1103
|
+
'Authorization': `Bearer ${authToken}`,
|
|
1104
|
+
'Content-Type': 'application/json',
|
|
1105
|
+
},
|
|
1106
|
+
body: JSON.stringify({
|
|
1107
|
+
product: 'ramp',
|
|
1108
|
+
messages,
|
|
1109
|
+
system: systemPrompt,
|
|
1110
|
+
max_tokens: 500,
|
|
1111
|
+
}),
|
|
1112
|
+
});
|
|
1113
|
+
|
|
1114
|
+
if (!response.ok) {
|
|
1115
|
+
const error = await response.json().catch(() => ({}));
|
|
1116
|
+
throw new Error(error.message || `API error: ${response.status}`);
|
|
836
1117
|
}
|
|
837
1118
|
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
1119
|
+
return await response.json();
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
// Helper function to call backend TTS API
|
|
1123
|
+
async function textToSpeech(text) {
|
|
1124
|
+
const response = await fetch(`${API_URL}/ai/tts`, {
|
|
1125
|
+
method: 'POST',
|
|
1126
|
+
headers: {
|
|
1127
|
+
'Authorization': `Bearer ${authToken}`,
|
|
1128
|
+
'Content-Type': 'application/json',
|
|
1129
|
+
},
|
|
1130
|
+
body: JSON.stringify({
|
|
1131
|
+
product: 'ramp',
|
|
1132
|
+
text,
|
|
1133
|
+
voice: 'nova',
|
|
1134
|
+
}),
|
|
1135
|
+
});
|
|
1136
|
+
|
|
1137
|
+
if (!response.ok) {
|
|
1138
|
+
throw new Error(`TTS error: ${response.status}`);
|
|
1139
|
+
}
|
|
1140
|
+
|
|
1141
|
+
return Buffer.from(await response.arrayBuffer());
|
|
1142
|
+
}
|
|
1143
|
+
|
|
1144
|
+
// Voice interaction loop
|
|
1145
|
+
async function voiceLoop() {
|
|
1146
|
+
while (true) {
|
|
1147
|
+
try {
|
|
1148
|
+
const { input } = await inquirer.prompt([{
|
|
1149
|
+
type: 'input',
|
|
1150
|
+
name: 'input',
|
|
1151
|
+
message: chalk.green('š¤ You:'),
|
|
1152
|
+
prefix: ''
|
|
1153
|
+
}]);
|
|
855
1154
|
|
|
856
|
-
|
|
857
|
-
|
|
1155
|
+
if (!input.trim()) continue;
|
|
1156
|
+
if (input.toLowerCase() === 'exit' || input.toLowerCase() === 'quit') {
|
|
1157
|
+
break;
|
|
1158
|
+
}
|
|
858
1159
|
|
|
859
|
-
|
|
860
|
-
|
|
1160
|
+
const startTime = Date.now();
|
|
1161
|
+
conversationHistory.push({ role: 'user', content: input });
|
|
861
1162
|
|
|
862
|
-
|
|
1163
|
+
// Get AI response
|
|
1164
|
+
const thinkingSpinner = ora('Thinking...').start();
|
|
1165
|
+
|
|
1166
|
+
const systemPrompt = `You are Ramp, a voice assistant helping a developer understand a codebase.
|
|
863
1167
|
Keep responses concise (2-3 sentences) since they'll be spoken aloud.
|
|
864
1168
|
|
|
865
1169
|
Project context:
|
|
@@ -867,77 +1171,78 @@ ${context}
|
|
|
867
1171
|
|
|
868
1172
|
Be helpful, friendly, and practical. Reference specific files when relevant.`;
|
|
869
1173
|
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
thinkingSpinner.stop();
|
|
1174
|
+
const chatResponse = await chatWithBackend(conversationHistory, systemPrompt);
|
|
1175
|
+
const answer = chatResponse.content || chatResponse.text || '';
|
|
1176
|
+
conversationHistory.push({ role: 'assistant', content: answer });
|
|
875
1177
|
|
|
876
|
-
|
|
877
|
-
const speechSpinner = ora('Speaking...').start();
|
|
878
|
-
|
|
879
|
-
try {
|
|
880
|
-
const audioBuffer = await textToSpeech(answer);
|
|
1178
|
+
thinkingSpinner.stop();
|
|
881
1179
|
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
await fs.writeFile(audioPath, audioBuffer);
|
|
1180
|
+
// Generate speech
|
|
1181
|
+
const speechSpinner = ora('Speaking...').start();
|
|
885
1182
|
|
|
886
|
-
|
|
887
|
-
|
|
1183
|
+
try {
|
|
1184
|
+
const audioBuffer = await textToSpeech(answer);
|
|
888
1185
|
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
} else if (process.platform === 'linux') {
|
|
893
|
-
await execAsync(`mpg123 "${audioPath}" 2>/dev/null || play "${audioPath}" 2>/dev/null`).catch(() => {});
|
|
894
|
-
}
|
|
1186
|
+
// Save and play audio
|
|
1187
|
+
const audioPath = `/tmp/ramp-voice-${Date.now()}.mp3`;
|
|
1188
|
+
await fs.writeFile(audioPath, audioBuffer);
|
|
895
1189
|
|
|
896
|
-
|
|
897
|
-
|
|
1190
|
+
speechSpinner.stop();
|
|
1191
|
+
console.log(chalk.cyan(`\nš Ramp: ${answer}\n`));
|
|
898
1192
|
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
1193
|
+
// Play audio (macOS)
|
|
1194
|
+
if (process.platform === 'darwin') {
|
|
1195
|
+
await execAsync(`afplay "${audioPath}"`).catch(() => {});
|
|
1196
|
+
} else if (process.platform === 'linux') {
|
|
1197
|
+
await execAsync(`mpg123 "${audioPath}" 2>/dev/null || play "${audioPath}" 2>/dev/null`).catch(() => {});
|
|
903
1198
|
}
|
|
904
1199
|
|
|
905
|
-
//
|
|
906
|
-
|
|
907
|
-
sessionMinutes += elapsed;
|
|
1200
|
+
// Clean up
|
|
1201
|
+
await fs.unlink(audioPath).catch(() => {});
|
|
908
1202
|
|
|
909
|
-
} catch (
|
|
910
|
-
|
|
911
|
-
|
|
1203
|
+
} catch (ttsError) {
|
|
1204
|
+
speechSpinner.stop();
|
|
1205
|
+
// Fallback to text if TTS fails
|
|
1206
|
+
console.log(chalk.cyan(`\nš¬ Ramp: ${answer}\n`));
|
|
912
1207
|
}
|
|
913
|
-
}
|
|
914
|
-
}
|
|
915
|
-
|
|
916
|
-
// Handle exit
|
|
917
|
-
process.on('SIGINT', async () => {
|
|
918
|
-
console.log(chalk.cyan('\n\nš Ending voice session...\n'));
|
|
919
|
-
await saveUsage();
|
|
920
|
-
process.exit(0);
|
|
921
|
-
});
|
|
922
1208
|
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
usage.sessions.push({
|
|
927
|
-
date: new Date().toISOString(),
|
|
928
|
-
project: path.basename(projectPath),
|
|
929
|
-
minutes: totalSessionMinutes
|
|
930
|
-
});
|
|
931
|
-
await fs.writeFile(usageFile, JSON.stringify(usage, null, 2));
|
|
1209
|
+
// Track usage
|
|
1210
|
+
const elapsed = (Date.now() - startTime) / 1000 / 60;
|
|
1211
|
+
sessionMinutes += elapsed;
|
|
932
1212
|
|
|
933
|
-
|
|
934
|
-
|
|
1213
|
+
} catch (error) {
|
|
1214
|
+
if (error.name === 'ExitPromptError') break;
|
|
1215
|
+
console.error(chalk.red(`Error: ${error.message}`));
|
|
1216
|
+
}
|
|
935
1217
|
}
|
|
1218
|
+
}
|
|
936
1219
|
|
|
937
|
-
|
|
1220
|
+
// Handle exit
|
|
1221
|
+
process.on('SIGINT', async () => {
|
|
1222
|
+
console.log(chalk.cyan('\n\nš Ending voice session...\n'));
|
|
938
1223
|
await saveUsage();
|
|
1224
|
+
process.exit(0);
|
|
939
1225
|
});
|
|
940
1226
|
|
|
1227
|
+
async function saveUsage() {
|
|
1228
|
+
const totalSessionMinutes = (Date.now() - sessionStart) / 1000 / 60;
|
|
1229
|
+
usage.totalMinutes += totalSessionMinutes;
|
|
1230
|
+
usage.sessions.push({
|
|
1231
|
+
date: new Date().toISOString(),
|
|
1232
|
+
project: path.basename(projectPath),
|
|
1233
|
+
minutes: totalSessionMinutes,
|
|
1234
|
+
type: 'text',
|
|
1235
|
+
});
|
|
1236
|
+
await fs.writeFile(usageFile, JSON.stringify(usage, null, 2));
|
|
1237
|
+
|
|
1238
|
+
console.log(chalk.dim(`Session: ${totalSessionMinutes.toFixed(2)} min`));
|
|
1239
|
+
console.log(chalk.dim(`Total usage: ${usage.totalMinutes.toFixed(2)} min\n`));
|
|
1240
|
+
}
|
|
1241
|
+
|
|
1242
|
+
await voiceLoop();
|
|
1243
|
+
await saveUsage();
|
|
1244
|
+
}
|
|
1245
|
+
|
|
941
1246
|
// Voice usage stats
|
|
942
1247
|
program
|
|
943
1248
|
.command('voice:usage')
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "rampup",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.5",
|
|
4
4
|
"description": "Ramp - Understand any codebase in hours. AI-powered developer onboarding CLI.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -42,8 +42,11 @@
|
|
|
42
42
|
"commander": "^11.1.0",
|
|
43
43
|
"firebase": "^10.14.1",
|
|
44
44
|
"inquirer": "^8.2.6",
|
|
45
|
+
"mic": "^2.1.2",
|
|
45
46
|
"open": "^9.1.0",
|
|
46
47
|
"openai": "^4.0.0",
|
|
47
|
-
"ora": "^5.4.1"
|
|
48
|
+
"ora": "^5.4.1",
|
|
49
|
+
"speaker": "^0.5.5",
|
|
50
|
+
"ws": "^8.18.0"
|
|
48
51
|
}
|
|
49
52
|
}
|