clawdex-mobile 1.3.2 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/.github/workflows/ci.yml +1 -1
  2. package/.github/workflows/npm-release.yml +18 -0
  3. package/AGENTS.md +3 -3
  4. package/README.md +112 -541
  5. package/apps/mobile/.env.example +1 -2
  6. package/apps/mobile/App.tsx +261 -68
  7. package/apps/mobile/app.json +31 -5
  8. package/apps/mobile/assets/brand/splash-icon-white.png +0 -0
  9. package/apps/mobile/eas.json +30 -0
  10. package/apps/mobile/package.json +22 -21
  11. package/apps/mobile/plugins/withAndroidCleartextTraffic.js +14 -0
  12. package/apps/mobile/src/api/__tests__/ws.test.ts +44 -6
  13. package/apps/mobile/src/api/chatMapping.ts +48 -8
  14. package/apps/mobile/src/api/client.ts +6 -0
  15. package/apps/mobile/src/api/types.ts +11 -0
  16. package/apps/mobile/src/api/ws.ts +52 -10
  17. package/apps/mobile/src/bridgeUrl.ts +105 -0
  18. package/apps/mobile/src/components/ActivityBar.tsx +32 -13
  19. package/apps/mobile/src/components/ChatHeader.tsx +3 -2
  20. package/apps/mobile/src/components/ChatInput.tsx +246 -91
  21. package/apps/mobile/src/components/ChatMessage.tsx +108 -4
  22. package/apps/mobile/src/config.ts +11 -29
  23. package/apps/mobile/src/hooks/useVoiceRecorder.ts +264 -0
  24. package/apps/mobile/src/navigation/DrawerContent.tsx +18 -8
  25. package/apps/mobile/src/screens/GitScreen.tsx +1 -1
  26. package/apps/mobile/src/screens/MainScreen.tsx +906 -268
  27. package/apps/mobile/src/screens/OnboardingScreen.tsx +1132 -0
  28. package/apps/mobile/src/screens/PrivacyScreen.tsx +1 -1
  29. package/apps/mobile/src/screens/SettingsScreen.tsx +65 -1
  30. package/apps/mobile/src/screens/TerminalScreen.tsx +1 -1
  31. package/apps/mobile/src/screens/TermsScreen.tsx +1 -1
  32. package/docs/app-review-notes.md +7 -2
  33. package/docs/eas-builds.md +91 -0
  34. package/docs/realtime-streaming-limitations.md +84 -0
  35. package/docs/setup-and-operations.md +239 -0
  36. package/docs/troubleshooting.md +121 -0
  37. package/docs/voice-transcription.md +87 -0
  38. package/package.json +8 -16
  39. package/scripts/setup-secure-dev.sh +122 -8
  40. package/scripts/setup-wizard.sh +342 -122
  41. package/scripts/start-bridge-secure.sh +7 -1
  42. package/scripts/sync-versions.js +63 -0
  43. package/services/rust-bridge/.env.example +1 -1
  44. package/services/rust-bridge/Cargo.lock +778 -11
  45. package/services/rust-bridge/Cargo.toml +3 -1
  46. package/services/rust-bridge/package.json +1 -1
  47. package/services/rust-bridge/src/main.rs +587 -12
  48. package/apps/mobile/metro.config.js +0 -3
@@ -0,0 +1,264 @@
1
+ import {
2
+ type AudioRecorder,
3
+ type RecordingOptions,
4
+ AudioQuality,
5
+ IOSOutputFormat,
6
+ requestRecordingPermissionsAsync,
7
+ setAudioModeAsync,
8
+ useAudioRecorder,
9
+ } from 'expo-audio';
10
+ import * as FileSystem from 'expo-file-system/legacy';
11
+ import { useCallback, useEffect, useRef, useState } from 'react';
12
+ import { Platform } from 'react-native';
13
+
14
+ export type VoiceState = 'idle' | 'recording' | 'transcribing';
15
+
16
+ interface UseVoiceRecorderOptions {
17
+ transcribe: (
18
+ dataBase64: string,
19
+ prompt?: string,
20
+ options?: {
21
+ fileName?: string;
22
+ mimeType?: string;
23
+ }
24
+ ) => Promise<{ text: string }>;
25
+ composerContext?: string;
26
+ onTranscript: (text: string) => void;
27
+ onError: (message: string) => void;
28
+ }
29
+
30
+ const MIN_RECORDING_DURATION_MS = 1_000;
31
+ const MAX_RECORDING_FILE_BYTES = 20 * 1024 * 1024;
32
+ const MAX_RECORDING_FILE_MB = MAX_RECORDING_FILE_BYTES / (1024 * 1024);
33
+
34
+ const RECORDING_OPTIONS: RecordingOptions = {
35
+ isMeteringEnabled: false,
36
+ extension: '.m4a',
37
+ sampleRate: 16_000,
38
+ numberOfChannels: 1,
39
+ bitRate: 256_000,
40
+ android: {
41
+ extension: '.m4a',
42
+ outputFormat: 'mpeg4',
43
+ audioEncoder: 'aac',
44
+ sampleRate: 16_000,
45
+ },
46
+ ios: {
47
+ extension: '.wav',
48
+ outputFormat: IOSOutputFormat.LINEARPCM,
49
+ audioQuality: AudioQuality.HIGH,
50
+ sampleRate: 16_000,
51
+ linearPCMBitDepth: 16,
52
+ linearPCMIsBigEndian: false,
53
+ linearPCMIsFloat: false,
54
+ },
55
+ web: {
56
+ mimeType: 'audio/webm',
57
+ bitsPerSecond: 256_000,
58
+ },
59
+ };
60
+
61
+ function estimateBase64DecodedSize(base64: string): number {
62
+ const payload = base64.split(',').pop()?.trim() ?? '';
63
+ if (!payload) {
64
+ return 0;
65
+ }
66
+
67
+ const padding = payload.endsWith('==') ? 2 : payload.endsWith('=') ? 1 : 0;
68
+ const blockCount = Math.ceil(payload.length / 4);
69
+ return Math.max(0, blockCount * 3 - padding);
70
+ }
71
+
72
+ function getTranscriptionUploadMetadata(): { fileName: string; mimeType: string } {
73
+ if (Platform.OS === 'ios') {
74
+ return { fileName: 'audio.wav', mimeType: 'audio/wav' };
75
+ }
76
+ if (Platform.OS === 'android') {
77
+ return { fileName: 'audio.m4a', mimeType: 'audio/mp4' };
78
+ }
79
+ return { fileName: 'audio.webm', mimeType: 'audio/webm' };
80
+ }
81
+
82
+ async function deleteRecordingFile(uri: string | null | undefined): Promise<void> {
83
+ const normalized = uri?.trim();
84
+ if (!normalized) {
85
+ return;
86
+ }
87
+ await FileSystem.deleteAsync(normalized, { idempotent: true }).catch(() => {});
88
+ }
89
+
90
+ function safeGetRecorderUri(recorder: AudioRecorder): string | null {
91
+ try {
92
+ return recorder.uri ?? null;
93
+ } catch {
94
+ return null;
95
+ }
96
+ }
97
+
98
+ function safeIsRecording(recorder: AudioRecorder): boolean {
99
+ try {
100
+ return recorder.isRecording;
101
+ } catch {
102
+ return false;
103
+ }
104
+ }
105
+
106
+ async function safeStopRecorder(recorder: AudioRecorder): Promise<void> {
107
+ try {
108
+ await recorder.stop();
109
+ } catch {
110
+ // Ignore stale/released recorder objects and already-stopped recordings.
111
+ }
112
+ }
113
+
114
+ export function useVoiceRecorder({
115
+ transcribe,
116
+ composerContext,
117
+ onTranscript,
118
+ onError,
119
+ }: UseVoiceRecorderOptions) {
120
+ const [voiceState, setVoiceState] = useState<VoiceState>('idle');
121
+ const recorder = useAudioRecorder(RECORDING_OPTIONS);
122
+ const startTimeRef = useRef<number>(0);
123
+ const recorderRef = useRef<AudioRecorder>(recorder);
124
+ recorderRef.current = recorder;
125
+
126
+ const startRecording = useCallback(async () => {
127
+ try {
128
+ const permission = await requestRecordingPermissionsAsync();
129
+ if (!permission.granted) {
130
+ onError('Microphone permission is required for voice input.');
131
+ return;
132
+ }
133
+
134
+ await setAudioModeAsync({
135
+ allowsRecording: true,
136
+ playsInSilentMode: true,
137
+ });
138
+
139
+ await recorderRef.current.prepareToRecordAsync();
140
+ recorderRef.current.record();
141
+ startTimeRef.current = Date.now();
142
+ setVoiceState('recording');
143
+ } catch (err) {
144
+ await setAudioModeAsync({
145
+ allowsRecording: false,
146
+ }).catch(() => {});
147
+ onError(`Failed to start recording: ${err instanceof Error ? err.message : String(err)}`);
148
+ }
149
+ }, [onError]);
150
+
151
+ const stopRecordingAndTranscribe = useCallback(async () => {
152
+ const currentRecorder = recorderRef.current;
153
+ if (!safeIsRecording(currentRecorder)) {
154
+ setVoiceState('idle');
155
+ return;
156
+ }
157
+
158
+ let recordingUriToClean: string | null = null;
159
+
160
+ try {
161
+ const elapsed = Date.now() - startTimeRef.current;
162
+ await safeStopRecorder(currentRecorder);
163
+
164
+ await setAudioModeAsync({
165
+ allowsRecording: false,
166
+ });
167
+
168
+ if (elapsed < MIN_RECORDING_DURATION_MS) {
169
+ onError('Recording too short — hold longer to record.');
170
+ setVoiceState('idle');
171
+ return;
172
+ }
173
+
174
+ const uri = safeGetRecorderUri(currentRecorder);
175
+ if (!uri) {
176
+ onError('Recording failed — no audio file produced.');
177
+ setVoiceState('idle');
178
+ return;
179
+ }
180
+ recordingUriToClean = uri;
181
+
182
+ const fileInfo = await FileSystem.getInfoAsync(uri);
183
+ if (!fileInfo.exists || fileInfo.isDirectory) {
184
+ onError('Recording failed — audio file is unavailable.');
185
+ setVoiceState('idle');
186
+ return;
187
+ }
188
+ if (fileInfo.size > MAX_RECORDING_FILE_BYTES) {
189
+ onError(`Recording too long — maximum size is ${String(MAX_RECORDING_FILE_MB)}MB.`);
190
+ setVoiceState('idle');
191
+ return;
192
+ }
193
+
194
+ setVoiceState('transcribing');
195
+
196
+ const base64 = await FileSystem.readAsStringAsync(uri, {
197
+ encoding: FileSystem.EncodingType.Base64,
198
+ });
199
+ if (estimateBase64DecodedSize(base64) > MAX_RECORDING_FILE_BYTES) {
200
+ onError(`Recording too long — maximum size is ${String(MAX_RECORDING_FILE_MB)}MB.`);
201
+ setVoiceState('idle');
202
+ return;
203
+ }
204
+
205
+ const prompt = composerContext?.trim() || undefined;
206
+ const result = await transcribe(base64, prompt, getTranscriptionUploadMetadata());
207
+
208
+ const text = result.text.trim();
209
+ if (text) {
210
+ onTranscript(text);
211
+ }
212
+ } catch (err) {
213
+ onError(
214
+ `Transcription failed: ${err instanceof Error ? err.message : String(err)}`
215
+ );
216
+ } finally {
217
+ await deleteRecordingFile(recordingUriToClean);
218
+ setVoiceState('idle');
219
+ }
220
+ }, [composerContext, onError, onTranscript, transcribe]);
221
+
222
+ const cancelRecording = useCallback(async () => {
223
+ const currentRecorder = recorderRef.current;
224
+ const recordingUri = safeGetRecorderUri(currentRecorder);
225
+ await safeStopRecorder(currentRecorder);
226
+
227
+ await setAudioModeAsync({
228
+ allowsRecording: false,
229
+ }).catch(() => {});
230
+ await deleteRecordingFile(recordingUri);
231
+
232
+ setVoiceState('idle');
233
+ }, []);
234
+
235
+ const toggleRecording = useCallback(() => {
236
+ if (voiceState === 'recording') {
237
+ void stopRecordingAndTranscribe();
238
+ } else if (voiceState === 'idle') {
239
+ void startRecording();
240
+ }
241
+ }, [voiceState, startRecording, stopRecordingAndTranscribe]);
242
+
243
+ useEffect(() => {
244
+ return () => {
245
+ const currentRecorder = recorderRef.current;
246
+ void (async () => {
247
+ const recordingUri = safeGetRecorderUri(currentRecorder);
248
+ await safeStopRecorder(currentRecorder);
249
+ await deleteRecordingFile(recordingUri);
250
+ })();
251
+ void setAudioModeAsync({
252
+ allowsRecording: false,
253
+ }).catch(() => {});
254
+ };
255
+ }, []);
256
+
257
+ return {
258
+ voiceState,
259
+ startRecording,
260
+ stopRecordingAndTranscribe,
261
+ cancelRecording,
262
+ toggleRecording,
263
+ };
264
+ }
@@ -6,20 +6,22 @@ import {
6
6
  Modal,
7
7
  Pressable,
8
8
  RefreshControl,
9
- SafeAreaView,
10
9
  SectionList,
11
10
  ScrollView,
11
+ type StyleProp,
12
12
  StyleSheet,
13
13
  Text,
14
+ type ViewStyle,
14
15
  View,
15
16
  } from 'react-native';
17
+ import { SafeAreaView } from 'react-native-safe-area-context';
16
18
  import type { HostBridgeApiClient } from '../api/client';
17
19
  import type { ChatSummary, RpcNotification } from '../api/types';
18
20
  import type { HostBridgeWsClient } from '../api/ws';
19
21
  import { BrandMark } from '../components/BrandMark';
20
22
  import { colors, spacing, typography } from '../theme';
21
23
 
22
- type Screen = 'Main' | 'Terminal' | 'Settings' | 'Privacy' | 'Terms';
24
+ type Screen = 'Main' | 'Settings' | 'Privacy' | 'Terms';
23
25
 
24
26
  interface DrawerContentProps {
25
27
  api: HostBridgeApiClient;
@@ -319,9 +321,6 @@ export function DrawerContent({
319
321
  </Pressable>
320
322
  </View>
321
323
 
322
- {/* Nav items */}
323
- <NavItem icon="terminal-outline" label="Terminal" onPress={() => onNavigate('Terminal')} />
324
-
325
324
  {/* Chats section */}
326
325
  <View style={styles.sectionHeader}>
327
326
  <Text style={styles.sectionTitle}>Chats</Text>
@@ -423,6 +422,7 @@ export function DrawerContent({
423
422
  label="Settings"
424
423
  onPress={() => onNavigate('Settings')}
425
424
  style={styles.settingsItem}
425
+ pressableStyle={styles.footerNavItem}
426
426
  />
427
427
  </View>
428
428
  </SafeAreaView>
@@ -482,16 +482,22 @@ function NavItem({
482
482
  label,
483
483
  onPress,
484
484
  style,
485
+ pressableStyle,
485
486
  }: {
486
487
  icon: keyof typeof Ionicons.glyphMap;
487
488
  label: string;
488
489
  onPress: () => void;
489
- style?: object;
490
+ style?: StyleProp<ViewStyle>;
491
+ pressableStyle?: StyleProp<ViewStyle>;
490
492
  }) {
491
493
  return (
492
494
  <View style={style}>
493
495
  <Pressable
494
- style={({ pressed }) => [styles.navItem, pressed && styles.navItemPressed]}
496
+ style={({ pressed }) => [
497
+ styles.navItem,
498
+ pressableStyle,
499
+ pressed && styles.navItemPressed,
500
+ ]}
495
501
  onPress={onPress}
496
502
  >
497
503
  <Ionicons name={icon} size={18} color={colors.textPrimary} />
@@ -890,11 +896,15 @@ const styles = StyleSheet.create({
890
896
  settingsItem: {
891
897
  marginBottom: 0,
892
898
  },
899
+ footerNavItem: {
900
+ marginBottom: 0,
901
+ },
893
902
  footer: {
903
+ marginTop: 'auto',
894
904
  borderTopWidth: StyleSheet.hairlineWidth,
895
905
  borderTopColor: colors.borderLight,
896
906
  paddingTop: spacing.md,
897
- paddingBottom: spacing.md,
907
+ paddingBottom: 0,
898
908
  },
899
909
  workspaceModalBackdrop: {
900
910
  flex: 1,
@@ -3,7 +3,6 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
3
3
  import {
4
4
  ActivityIndicator,
5
5
  Pressable,
6
- SafeAreaView,
7
6
  ScrollView,
8
7
  StyleSheet,
9
8
  Text,
@@ -11,6 +10,7 @@ import {
11
10
  useWindowDimensions,
12
11
  View,
13
12
  } from 'react-native';
13
+ import { SafeAreaView } from 'react-native-safe-area-context';
14
14
 
15
15
  import type { HostBridgeApiClient } from '../api/client';
16
16
  import type {