react-native-ai-hooks 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/.github/workflows/ci.yml +34 -0
  2. package/CONTRIBUTING.md +122 -0
  3. package/README.md +73 -20
  4. package/docs/ARCHITECTURE.md +301 -0
  5. package/docs/ARCHITECTURE_GUIDE.md +467 -0
  6. package/docs/IMPLEMENTATION_COMPLETE.md +349 -0
  7. package/docs/README.md +17 -0
  8. package/docs/TECHNICAL_SPECIFICATION.md +748 -0
  9. package/example/App.tsx +95 -0
  10. package/example/README.md +27 -0
  11. package/example/index.js +5 -0
  12. package/example/package.json +22 -0
  13. package/example/src/components/ProviderPicker.tsx +62 -0
  14. package/example/src/context/APIKeysContext.tsx +96 -0
  15. package/example/src/screens/ChatScreen.tsx +205 -0
  16. package/example/src/screens/SettingsScreen.tsx +124 -0
  17. package/example/tsconfig.json +7 -0
  18. package/jest.config.cjs +7 -0
  19. package/jest.setup.ts +28 -0
  20. package/package.json +17 -3
  21. package/src/hooks/__tests__/useAIForm.test.ts +345 -0
  22. package/src/hooks/__tests__/useAIStream.test.ts +427 -0
  23. package/src/hooks/useAIChat.ts +111 -51
  24. package/src/hooks/useAICode.ts +8 -0
  25. package/src/hooks/useAIForm.ts +92 -202
  26. package/src/hooks/useAIStream.ts +114 -58
  27. package/src/hooks/useAISummarize.ts +8 -0
  28. package/src/hooks/useAITranslate.ts +9 -0
  29. package/src/hooks/useAIVoice.ts +8 -0
  30. package/src/hooks/useImageAnalysis.ts +134 -79
  31. package/src/index.ts +25 -1
  32. package/src/types/index.ts +178 -4
  33. package/src/utils/__tests__/fetchWithRetry.test.ts +168 -0
  34. package/src/utils/__tests__/providerFactory.test.ts +493 -0
  35. package/src/utils/fetchWithRetry.ts +100 -0
  36. package/src/utils/index.ts +8 -0
  37. package/src/utils/providerFactory.ts +288 -0
@@ -0,0 +1,427 @@
1
+ import React from 'react';
2
+ import { act, create } from 'react-test-renderer';
3
+ import { useAIStream } from '../useAIStream';
4
+ import { fetchWithRetry } from '../../utils/fetchWithRetry';
5
+
6
+ jest.mock('../../utils/fetchWithRetry', () => ({
7
+ fetchWithRetry: jest.fn(),
8
+ }));
9
+
10
+ type HookRenderResult<T> = {
11
+ result: {
12
+ readonly current: T;
13
+ };
14
+ unmount: () => void;
15
+ };
16
+
17
+ function renderHook<T>(hook: () => T): HookRenderResult<T> {
18
+ let hookValue: T | undefined;
19
+ let renderer: ReturnType<typeof create> | undefined;
20
+
21
+ function TestComponent() {
22
+ hookValue = hook();
23
+ return null;
24
+ }
25
+
26
+ act(() => {
27
+ renderer = create(React.createElement(TestComponent));
28
+ });
29
+
30
+ return {
31
+ result: {
32
+ get current(): T {
33
+ if (hookValue === undefined) {
34
+ throw new Error('Hook value is not available yet');
35
+ }
36
+ return hookValue;
37
+ },
38
+ },
39
+ unmount: () => {
40
+ if (!renderer) {
41
+ return;
42
+ }
43
+ act(() => {
44
+ renderer.unmount();
45
+ });
46
+ },
47
+ };
48
+ }
49
+
50
+ function createControlledStreamResponse(): {
51
+ response: Response;
52
+ pushLine: (line: string) => Promise<void>;
53
+ close: () => Promise<void>;
54
+ } {
55
+ const encoder = new TextEncoder();
56
+ const pendingReads: Array<(result: ReadableStreamReadResult<Uint8Array>) => void> = [];
57
+
58
+ const reader = {
59
+ read: jest.fn(
60
+ () =>
61
+ new Promise<ReadableStreamReadResult<Uint8Array>>(resolve => {
62
+ pendingReads.push(resolve);
63
+ }),
64
+ ),
65
+ };
66
+
67
+ const response = {
68
+ ok: true,
69
+ status: 200,
70
+ body: {
71
+ getReader: () => reader,
72
+ },
73
+ text: async () => '',
74
+ } as unknown as Response;
75
+
76
+ const resolveNextRead = async (result: ReadableStreamReadResult<Uint8Array>) => {
77
+ const resolver = pendingReads.shift();
78
+ if (!resolver) {
79
+ throw new Error('No pending read to resolve');
80
+ }
81
+ resolver(result);
82
+ await Promise.resolve();
83
+ };
84
+
85
+ return {
86
+ response,
87
+ pushLine: async (line: string) => {
88
+ await resolveNextRead({ done: false, value: encoder.encode(line) });
89
+ },
90
+ close: async () => {
91
+ await resolveNextRead({ done: true, value: undefined });
92
+ },
93
+ };
94
+ }
95
+
96
+ describe('useAIStream', () => {
97
+ const mockedFetchWithRetry = fetchWithRetry as jest.MockedFunction<typeof fetchWithRetry>;
98
+
99
+ beforeEach(() => {
100
+ mockedFetchWithRetry.mockReset();
101
+ });
102
+
103
+ it('streams OpenAI chunks token-by-token into response state', async () => {
104
+ const stream = createControlledStreamResponse();
105
+ mockedFetchWithRetry.mockResolvedValueOnce(stream.response);
106
+
107
+ const { result, unmount } = renderHook(() =>
108
+ useAIStream({
109
+ provider: 'openai',
110
+ apiKey: 'openai-key',
111
+ model: 'gpt-4o-mini',
112
+ }),
113
+ );
114
+
115
+ let streamPromise: Promise<void> | undefined;
116
+
117
+ await act(async () => {
118
+ streamPromise = result.current.streamResponse('Say hello');
119
+ });
120
+
121
+ expect(result.current.isLoading).toBe(true);
122
+ expect(result.current.response).toBe('');
123
+
124
+ await act(async () => {
125
+ await stream.pushLine('data: {"choices":[{"delta":{"content":"Hel"}}]}\n');
126
+ });
127
+ expect(result.current.response).toBe('Hel');
128
+
129
+ await act(async () => {
130
+ await stream.pushLine('data: {"choices":[{"delta":{"content":"lo"}}]}\n');
131
+ });
132
+ expect(result.current.response).toBe('Hello');
133
+
134
+ await act(async () => {
135
+ await stream.pushLine('data: [DONE]\n');
136
+ });
137
+
138
+ await act(async () => {
139
+ await stream.close();
140
+ await streamPromise;
141
+ });
142
+
143
+ expect(result.current.response).toBe('Hello');
144
+ expect(result.current.error).toBeNull();
145
+ expect(result.current.isLoading).toBe(false);
146
+
147
+ expect(mockedFetchWithRetry).toHaveBeenCalledTimes(1);
148
+
149
+ const [url, requestInit] = mockedFetchWithRetry.mock.calls[0];
150
+ expect(url).toBe('https://api.openai.com/v1/chat/completions');
151
+
152
+ const parsedBody = JSON.parse(String((requestInit as RequestInit).body));
153
+ expect(parsedBody).toMatchObject({
154
+ model: 'gpt-4o-mini',
155
+ stream: true,
156
+ messages: [{ role: 'user', content: 'Say hello' }],
157
+ });
158
+
159
+ unmount();
160
+ });
161
+
162
+ it('streams Anthropic chunks token-by-token into response state', async () => {
163
+ const stream = createControlledStreamResponse();
164
+ mockedFetchWithRetry.mockResolvedValueOnce(stream.response);
165
+
166
+ const { result, unmount } = renderHook(() =>
167
+ useAIStream({
168
+ provider: 'anthropic',
169
+ apiKey: 'anthropic-key',
170
+ model: 'claude-sonnet-4-20250514',
171
+ }),
172
+ );
173
+
174
+ let streamPromise: Promise<void> | undefined;
175
+
176
+ await act(async () => {
177
+ streamPromise = result.current.streamResponse('Describe this');
178
+ });
179
+
180
+ await act(async () => {
181
+ await stream.pushLine('data: {"type":"content_block_delta","delta":{"type":"text_delta","text":"Hi"}}\n');
182
+ });
183
+ expect(result.current.response).toBe('Hi');
184
+
185
+ await act(async () => {
186
+ await stream.pushLine('data: {"type":"content_block_delta","delta":{"type":"text_delta","text":" there"}}\n');
187
+ });
188
+ expect(result.current.response).toBe('Hi there');
189
+
190
+ await act(async () => {
191
+ await stream.close();
192
+ await streamPromise;
193
+ });
194
+
195
+ const [url, requestInit] = mockedFetchWithRetry.mock.calls[0];
196
+ expect(url).toBe('https://api.anthropic.com/v1/messages');
197
+ expect((requestInit as RequestInit).headers).toEqual(
198
+ expect.objectContaining({
199
+ 'x-api-key': 'anthropic-key',
200
+ 'anthropic-version': '2023-06-01',
201
+ }),
202
+ );
203
+
204
+ unmount();
205
+ });
206
+
207
+ it('surfaces API error text when the streaming endpoint responds with non-2xx status', async () => {
208
+ mockedFetchWithRetry.mockResolvedValueOnce({
209
+ ok: false,
210
+ status: 500,
211
+ text: async () => 'Server exploded',
212
+ } as unknown as Response);
213
+
214
+ const { result, unmount } = renderHook(() =>
215
+ useAIStream({
216
+ provider: 'openai',
217
+ apiKey: 'openai-key',
218
+ }),
219
+ );
220
+
221
+ await act(async () => {
222
+ await result.current.streamResponse('hello');
223
+ });
224
+
225
+ expect(result.current.error).toBe('Server exploded');
226
+ expect(result.current.isLoading).toBe(false);
227
+
228
+ unmount();
229
+ });
230
+
231
+ it('falls back to status-based API error when error text is empty', async () => {
232
+ mockedFetchWithRetry.mockResolvedValueOnce({
233
+ ok: false,
234
+ status: 429,
235
+ text: async () => '',
236
+ } as unknown as Response);
237
+
238
+ const { result, unmount } = renderHook(() =>
239
+ useAIStream({
240
+ provider: 'openai',
241
+ apiKey: 'openai-key',
242
+ }),
243
+ );
244
+
245
+ await act(async () => {
246
+ await result.current.streamResponse('hello');
247
+ });
248
+
249
+ expect(result.current.error).toBe('API error: 429');
250
+
251
+ unmount();
252
+ });
253
+
254
+ it('sets a descriptive error when Response.body is missing', async () => {
255
+ mockedFetchWithRetry.mockResolvedValueOnce({
256
+ ok: true,
257
+ status: 200,
258
+ body: null,
259
+ text: async () => '',
260
+ } as unknown as Response);
261
+
262
+ const { result, unmount } = renderHook(() =>
263
+ useAIStream({
264
+ provider: 'openai',
265
+ apiKey: 'openai-key',
266
+ }),
267
+ );
268
+
269
+ await act(async () => {
270
+ await result.current.streamResponse('hello');
271
+ });
272
+
273
+ expect(result.current.error).toBe('Streaming not supported in this environment');
274
+ expect(result.current.isLoading).toBe(false);
275
+
276
+ unmount();
277
+ });
278
+
279
+ it('does not set error state for AbortError rejections', async () => {
280
+ const abortError = Object.assign(new Error('aborted'), { name: 'AbortError' });
281
+ mockedFetchWithRetry.mockRejectedValueOnce(abortError);
282
+
283
+ const { result, unmount } = renderHook(() =>
284
+ useAIStream({
285
+ provider: 'openai',
286
+ apiKey: 'openai-key',
287
+ }),
288
+ );
289
+
290
+ await act(async () => {
291
+ await result.current.streamResponse('hello');
292
+ });
293
+
294
+ expect(result.current.error).toBeNull();
295
+ expect(result.current.isLoading).toBe(false);
296
+
297
+ unmount();
298
+ });
299
+
300
+ it('uses fallback message when stream request throws a non-Error value', async () => {
301
+ mockedFetchWithRetry.mockRejectedValueOnce('network down');
302
+
303
+ const { result, unmount } = renderHook(() =>
304
+ useAIStream({
305
+ provider: 'openai',
306
+ apiKey: 'openai-key',
307
+ }),
308
+ );
309
+
310
+ await act(async () => {
311
+ await result.current.streamResponse('hello');
312
+ });
313
+
314
+ expect(result.current.error).toBe('Failed to stream response');
315
+
316
+ unmount();
317
+ });
318
+
319
+ it('ignores empty and non-data lines while processing stream chunks', async () => {
320
+ const stream = createControlledStreamResponse();
321
+ mockedFetchWithRetry.mockResolvedValueOnce(stream.response);
322
+
323
+ const { result, unmount } = renderHook(() =>
324
+ useAIStream({
325
+ provider: 'openai',
326
+ apiKey: 'openai-key',
327
+ }),
328
+ );
329
+
330
+ let streamPromise: Promise<void> | undefined;
331
+ await act(async () => {
332
+ streamPromise = result.current.streamResponse('hello');
333
+ });
334
+
335
+ await act(async () => {
336
+ await stream.pushLine('\n');
337
+ });
338
+ await act(async () => {
339
+ await stream.pushLine('event: ping\n');
340
+ });
341
+ await act(async () => {
342
+ await stream.pushLine('data: {"choices":[{"delta":{"content":"ok"}}]}\n');
343
+ });
344
+
345
+ expect(result.current.response).toBe('ok');
346
+
347
+ await act(async () => {
348
+ await stream.close();
349
+ await streamPromise;
350
+ });
351
+
352
+ unmount();
353
+ });
354
+
355
+ it('defaults to anthropic provider and default model when omitted', async () => {
356
+ const stream = createControlledStreamResponse();
357
+ mockedFetchWithRetry.mockResolvedValueOnce(stream.response);
358
+
359
+ const { result, unmount } = renderHook(() =>
360
+ useAIStream({
361
+ apiKey: 'anthropic-key',
362
+ }),
363
+ );
364
+
365
+ let streamPromise: Promise<void> | undefined;
366
+ await act(async () => {
367
+ streamPromise = result.current.streamResponse('hello');
368
+ });
369
+
370
+ await act(async () => {
371
+ await stream.pushLine('data: {"type":"content_block_delta","delta":{"type":"text_delta","text":"default"}}\n');
372
+ await stream.close();
373
+ await streamPromise;
374
+ });
375
+
376
+ const [url, requestInit] = mockedFetchWithRetry.mock.calls[0];
377
+ expect(url).toBe('https://api.anthropic.com/v1/messages');
378
+
379
+ const parsedBody = JSON.parse(String((requestInit as RequestInit).body));
380
+ expect(parsedBody.model).toBe('claude-sonnet-4-20250514');
381
+ expect(result.current.response).toBe('default');
382
+
383
+ unmount();
384
+ });
385
+
386
+ it('clears response and error when clearResponse is called', async () => {
387
+ const { result, unmount } = renderHook(() =>
388
+ useAIStream({
389
+ provider: 'gemini',
390
+ apiKey: 'gemini-key',
391
+ }),
392
+ );
393
+
394
+ await act(async () => {
395
+ await result.current.streamResponse('hello');
396
+ });
397
+ expect(result.current.error).toBe('Streaming not supported for provider: gemini');
398
+
399
+ act(() => {
400
+ result.current.clearResponse();
401
+ });
402
+
403
+ expect(result.current.response).toBe('');
404
+ expect(result.current.error).toBeNull();
405
+
406
+ unmount();
407
+ });
408
+
409
+ it('sets an error when streaming is requested for an unsupported provider', async () => {
410
+ const { result, unmount } = renderHook(() =>
411
+ useAIStream({
412
+ provider: 'gemini',
413
+ apiKey: 'gemini-key',
414
+ }),
415
+ );
416
+
417
+ await act(async () => {
418
+ await result.current.streamResponse('Hello');
419
+ });
420
+
421
+ expect(result.current.error).toBe('Streaming not supported for provider: gemini');
422
+ expect(result.current.isLoading).toBe(false);
423
+ expect(mockedFetchWithRetry).not.toHaveBeenCalled();
424
+
425
+ unmount();
426
+ });
427
+ });
@@ -1,66 +1,126 @@
1
- import { useState, useCallback } from 'react';
1
+ import { useCallback, useRef, useState, useMemo } from 'react';
2
+ import type { Message, UseAIChatOptions, UseAIChatReturn } from '../types';
3
+ import { createProvider } from '../utils/providerFactory';
2
4
 
3
- interface Message {
4
- role: 'user' | 'assistant';
5
- content: string;
6
- }
7
-
8
- interface UseAIChatOptions {
9
- apiKey: string;
10
- provider?: 'claude' | 'openai';
11
- model?: string;
12
- }
13
-
14
- interface UseAIChatReturn {
15
- messages: Message[];
16
- isLoading: boolean;
17
- error: string | null;
18
- sendMessage: (content: string) => Promise<void>;
19
- clearMessages: () => void;
20
- }
5
+ const DEFAULT_MODEL_MAP = {
6
+ anthropic: 'claude-sonnet-4-20250514',
7
+ openai: 'gpt-4',
8
+ gemini: 'gemini-pro',
9
+ };
21
10
 
11
+ /**
12
+ * Manages conversational chat state and sends prompts to the configured AI provider.
13
+ *
14
+ * @param options Hook configuration including provider, API key, model, retry/timeout settings,
15
+ * and generation options such as system prompt, temperature, and max tokens.
16
+ * @returns Chat controller with current messages, loading/error state, and actions to send,
17
+ * abort, or clear chat messages.
18
+ */
22
19
  export function useAIChat(options: UseAIChatOptions): UseAIChatReturn {
23
20
  const [messages, setMessages] = useState<Message[]>([]);
24
21
  const [isLoading, setIsLoading] = useState(false);
25
22
  const [error, setError] = useState<string | null>(null);
26
23
 
27
- const sendMessage = useCallback(async (content: string) => {
28
- setIsLoading(true);
29
- setError(null);
24
+ const abortControllerRef = useRef<AbortController | null>(null);
25
+ const isMountedRef = useRef(true);
30
26
 
31
- const userMessage: Message = { role: 'user', content };
32
- setMessages(prev => [...prev, userMessage]);
27
+ // Memoize provider config to prevent unnecessary recreations
28
+ const providerConfig = useMemo(
29
+ () => ({
30
+ provider: (options.provider || 'anthropic') as 'anthropic' | 'openai' | 'gemini',
31
+ apiKey: options.apiKey,
32
+ model: options.model || DEFAULT_MODEL_MAP[options.provider || 'anthropic'],
33
+ baseUrl: options.baseUrl,
34
+ timeout: options.timeout,
35
+ maxRetries: options.maxRetries,
36
+ }),
37
+ [options],
38
+ );
33
39
 
34
- try {
35
- const response = await fetch('https://api.anthropic.com/v1/messages', {
36
- method: 'POST',
37
- headers: {
38
- 'Content-Type': 'application/json',
39
- 'x-api-key': options.apiKey,
40
- 'anthropic-version': '2023-06-01',
41
- },
42
- body: JSON.stringify({
43
- model: options.model || 'claude-sonnet-4-20250514',
44
- max_tokens: 1024,
45
- messages: [...messages, userMessage],
46
- }),
47
- });
40
+ const provider = useMemo(() => createProvider(providerConfig), [providerConfig]);
48
41
 
49
- const data = await response.json();
50
- const assistantMessage: Message = {
51
- role: 'assistant',
52
- content: data.content[0].text,
53
- };
54
-
55
- setMessages(prev => [...prev, assistantMessage]);
56
- } catch (err) {
57
- setError('Failed to send message');
58
- } finally {
42
+ const abort = useCallback(() => {
43
+ abortControllerRef.current?.abort();
44
+ abortControllerRef.current = null;
45
+ if (isMountedRef.current) {
59
46
  setIsLoading(false);
60
47
  }
61
- }, [messages, options]);
48
+ }, []);
49
+
50
+ const clearMessages = useCallback(() => {
51
+ setMessages([]);
52
+ setError(null);
53
+ }, []);
54
+
55
+ const sendMessage = useCallback(
56
+ async (content: string) => {
57
+ if (!content.trim()) {
58
+ setError('Message cannot be empty');
59
+ return;
60
+ }
61
+
62
+ setError(null);
63
+ const userMessage: Message = {
64
+ role: 'user',
65
+ content: content.trim(),
66
+ timestamp: Date.now(),
67
+ };
68
+
69
+ setMessages((prev: Message[]) => [...prev, userMessage]);
70
+ setIsLoading(true);
71
+
72
+ try {
73
+ const aiResponse = await provider.makeRequest({
74
+ prompt: content,
75
+ options: {
76
+ system: options.system,
77
+ temperature: options.temperature,
78
+ maxTokens: options.maxTokens,
79
+ },
80
+ context: messages.map((msg: Message) => ({
81
+ role: msg.role,
82
+ content: msg.content,
83
+ })),
84
+ });
85
+
86
+ const assistantMessage: Message = {
87
+ role: 'assistant',
88
+ content: aiResponse.text,
89
+ timestamp: Date.now(),
90
+ };
91
+
92
+ if (isMountedRef.current) {
93
+ setMessages((prev: Message[]) => [...prev, assistantMessage]);
94
+ }
95
+ } catch (err) {
96
+ if (isMountedRef.current) {
97
+ const message = err instanceof Error ? err.message : 'Failed to send message';
98
+ setError(message);
99
+ }
100
+ } finally {
101
+ if (isMountedRef.current) {
102
+ setIsLoading(false);
103
+ }
104
+ }
105
+ },
106
+ [provider, messages, options],
107
+ );
62
108
 
63
- const clearMessages = useCallback(() => setMessages([]), []);
109
+ // Cleanup on unmount
110
+ useState(() => {
111
+ isMountedRef.current = true;
112
+ return () => {
113
+ isMountedRef.current = false;
114
+ abortControllerRef.current?.abort();
115
+ };
116
+ }, []);
64
117
 
65
- return { messages, isLoading, error, sendMessage, clearMessages };
118
+ return {
119
+ messages,
120
+ isLoading,
121
+ error,
122
+ sendMessage,
123
+ abort,
124
+ clearMessages,
125
+ };
66
126
  }
@@ -57,6 +57,14 @@ function getClaudeTextContent(data: unknown): string {
57
57
  .trim();
58
58
  }
59
59
 
60
+ /**
61
+ * Generates and explains code using an AI model while tracking language and request state.
62
+ *
63
+ * @param options Code assistant configuration including API key, model/system prompt,
64
+ * token/temperature controls, and default programming language.
65
+ * @returns Code assistant state with selected language, generated code, explanation text,
66
+ * loading/error indicators, and actions to generate code, explain code, or clear outputs.
67
+ */
60
68
  export function useAICode(options: UseAICodeOptions): UseAICodeReturn {
61
69
  const [language, setLanguage] = useState(options.defaultLanguage || 'typescript');
62
70
  const [generatedCode, setGeneratedCode] = useState('');