expo-ai-kit 0.1.0 β†’ 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +229 -15
  2. package/package.json +11 -4
package/README.md CHANGED
@@ -1,35 +1,249 @@
1
1
  # expo-ai-kit
2
2
 
3
- Expo AI Kit module
3
+ On-device AI for Expo apps. Run language models locally on iOS using Apple's Foundation Models frameworkβ€”no API keys, no cloud, just native intelligence.
4
4
 
5
- # API documentation
5
+ ## Features
6
6
 
7
- - [Documentation for the latest stable release](https://docs.expo.dev/versions/latest/sdk/ai-kit/)
8
- - [Documentation for the main branch](https://docs.expo.dev/versions/unversioned/sdk/ai-kit/)
7
+ - πŸ”’ **Privacy-first** β€” All inference happens on-device
8
+ - ⚑ **Zero latency** β€” No network round-trips
9
+ - πŸ†“ **Free** β€” No API costs or rate limits
10
+ - πŸ“± **Native** β€” Built on Apple's Foundation Models (iOS 26+)
9
11
 
10
- # Installation in managed Expo projects
12
+ ## Requirements
11
13
 
12
- For [managed](https://docs.expo.dev/archive/managed-vs-bare/) Expo projects, please follow the installation instructions in the [API documentation for the latest stable release](#api-documentation). If you follow the link and there is no documentation available then this library is not yet usable within managed projects — it is likely to be included in an upcoming Expo SDK release.
14
+ - iOS 26.0 or later
15
+ - Expo SDK 54+
16
+ - A device with Apple Silicon (M-series or A17 Pro+)
13
17
 
14
- # Installation in bare React Native projects
18
+ ## Installation
15
19
 
16
- For bare React Native projects, you must ensure that you have [installed and configured the `expo` package](https://docs.expo.dev/bare/installing-expo-modules/) before continuing.
20
+ ```bash
21
+ npx expo install expo-ai-kit
22
+ ```
23
+
24
+ For bare React Native projects, run `npx pod-install` after installing.
25
+
26
+ ## Quick Start
27
+
28
+ ```tsx
29
+ import { createSession, sendMessage } from 'expo-ai-kit';
30
+
31
+ // Create a chat session
32
+ const sessionId = await createSession({
33
+ systemPrompt: 'You are a helpful assistant.',
34
+ });
35
+
36
+ // Send a message and get a response
37
+ const { reply } = await sendMessage(
38
+ sessionId,
39
+ [{ role: 'user', content: 'Hello! What can you do?' }]
40
+ );
41
+
42
+ console.log(reply);
43
+ ```
44
+
45
+ ## Usage
46
+
47
+ ### Creating a Session
17
48
 
18
- ### Add the package to your npm dependencies
49
+ Start by creating a session with an optional system prompt:
19
50
 
51
+ ```tsx
52
+ import { createSession } from 'expo-ai-kit';
53
+
54
+ const sessionId = await createSession({
55
+ systemPrompt: 'You are a friendly cooking assistant. Help users with recipes and meal planning.',
56
+ });
57
+ ```
58
+
59
+ ### Sending Messages
60
+
61
+ Send messages and receive AI responses:
62
+
63
+ ```tsx
64
+ import { sendMessage, type LLMMessage } from 'expo-ai-kit';
65
+
66
+ const messages: LLMMessage[] = [
67
+ { role: 'user', content: 'What can I make with eggs and cheese?' }
68
+ ];
69
+
70
+ const { reply } = await sendMessage(sessionId, messages);
71
+ // reply: "You can make a delicious omelette! Here's how..."
20
72
  ```
21
- npm install expo-ai-kit
73
+
74
+ ### Multi-turn Conversations
75
+
76
+ Keep track of the conversation history for context-aware responses:
77
+
78
+ ```tsx
79
+ const [messages, setMessages] = useState<LLMMessage[]>([]);
80
+
81
+ async function chat(userMessage: string) {
82
+ const newMessages = [
83
+ ...messages,
84
+ { role: 'user', content: userMessage }
85
+ ];
86
+
87
+ const { reply } = await sendMessage(sessionId, newMessages);
88
+
89
+ setMessages([
90
+ ...newMessages,
91
+ { role: 'assistant', content: reply }
92
+ ]);
93
+
94
+ return reply;
95
+ }
96
+ ```
97
+
98
+ ### Complete Chat Example
99
+
100
+ Here's a full example of a chat component:
101
+
102
+ ```tsx
103
+ import React, { useState } from 'react';
104
+ import { View, TextInput, Button, Text, FlatList } from 'react-native';
105
+ import { createSession, sendMessage, type LLMMessage } from 'expo-ai-kit';
106
+
107
+ export default function ChatScreen() {
108
+ const [sessionId, setSessionId] = useState<string | null>(null);
109
+ const [messages, setMessages] = useState<LLMMessage[]>([]);
110
+ const [input, setInput] = useState('');
111
+ const [loading, setLoading] = useState(false);
112
+
113
+ const ensureSession = async () => {
114
+ if (sessionId) return sessionId;
115
+ const id = await createSession({
116
+ systemPrompt: 'You are a helpful assistant.',
117
+ });
118
+ setSessionId(id);
119
+ return id;
120
+ };
121
+
122
+ const handleSend = async () => {
123
+ if (!input.trim() || loading) return;
124
+
125
+ const id = await ensureSession();
126
+ const userMessage: LLMMessage = { role: 'user', content: input.trim() };
127
+ const updatedMessages = [...messages, userMessage];
128
+
129
+ setMessages(updatedMessages);
130
+ setInput('');
131
+ setLoading(true);
132
+
133
+ try {
134
+ const { reply } = await sendMessage(id, updatedMessages);
135
+ setMessages(prev => [...prev, { role: 'assistant', content: reply }]);
136
+ } catch (error) {
137
+ console.error('Error:', error);
138
+ } finally {
139
+ setLoading(false);
140
+ }
141
+ };
142
+
143
+ return (
144
+ <View style={{ flex: 1, padding: 16 }}>
145
+ <FlatList
146
+ data={messages}
147
+ keyExtractor={(_, i) => i.toString()}
148
+ renderItem={({ item }) => (
149
+ <View style={{
150
+ padding: 12,
151
+ marginVertical: 4,
152
+ backgroundColor: item.role === 'user' ? '#007AFF' : '#E5E5EA',
153
+ borderRadius: 16,
154
+ alignSelf: item.role === 'user' ? 'flex-end' : 'flex-start',
155
+ maxWidth: '80%',
156
+ }}>
157
+ <Text style={{ color: item.role === 'user' ? '#fff' : '#000' }}>
158
+ {item.content}
159
+ </Text>
160
+ </View>
161
+ )}
162
+ />
163
+ <View style={{ flexDirection: 'row', gap: 8 }}>
164
+ <TextInput
165
+ value={input}
166
+ onChangeText={setInput}
167
+ placeholder="Type a message..."
168
+ style={{ flex: 1, borderWidth: 1, borderRadius: 8, padding: 12 }}
169
+ />
170
+ <Button title={loading ? '...' : 'Send'} onPress={handleSend} />
171
+ </View>
172
+ </View>
173
+ );
174
+ }
22
175
  ```
23
176
 
24
- ### Configure for Android
177
+ ## API Reference
25
178
 
179
+ ### `createSession(options?)`
180
+
181
+ Creates a new chat session.
182
+
183
+ | Parameter | Type | Description |
184
+ |-----------|------|-------------|
185
+ | `options.systemPrompt` | `string` | Optional system prompt to guide the AI's behavior |
186
+
187
+ **Returns:** `Promise<string>` β€” A unique session ID
188
+
189
+ ---
190
+
191
+ ### `sendMessage(sessionId, messages, options?)`
192
+
193
+ Sends messages and gets a response from the on-device model.
194
+
195
+ | Parameter | Type | Description |
196
+ |-----------|------|-------------|
197
+ | `sessionId` | `string` | The session ID from `createSession` |
198
+ | `messages` | `LLMMessage[]` | Array of conversation messages |
199
+ | `options.temperature` | `number` | Controls randomness (0-1) |
200
+ | `options.maxTokens` | `number` | Maximum response length |
201
+
202
+ **Returns:** `Promise<{ reply: string }>` β€” The AI's response
203
+
204
+ ---
205
+
206
+ ### `prepareModel(options?)`
207
+
208
+ Pre-loads the model for faster first response.
209
+
210
+ | Parameter | Type | Description |
211
+ |-----------|------|-------------|
212
+ | `options.model` | `string` | Model identifier (optional) |
213
+
214
+ **Returns:** `Promise<void>`
215
+
216
+ ---
217
+
218
+ ### Types
219
+
220
+ ```typescript
221
+ type LLMRole = 'system' | 'user' | 'assistant';
222
+
223
+ type LLMMessage = {
224
+ role: LLMRole;
225
+ content: string;
226
+ };
227
+
228
+ type LLMOptions = {
229
+ temperature?: number;
230
+ maxTokens?: number;
231
+ model?: string;
232
+ };
233
+ ```
26
234
 
235
+ ## Platform Support
27
236
 
237
+ | Platform | Status |
238
+ |----------|--------|
239
+ | iOS 26+ | βœ… Full support |
240
+ | iOS < 26 | ⚠️ Returns mock responses |
241
+ | Android | 🚧 Coming soon |
28
242
 
29
- ### Configure for iOS
243
+ ## License
30
244
 
31
- Run `npx pod-install` after installing the npm package.
245
+ MIT
32
246
 
33
- # Contributing
247
+ ## Contributing
34
248
 
35
- Contributions are very welcome! Please refer to guidelines described in the [contributing guide]( https://github.com/expo/expo#contributing).
249
+ Contributions are welcome! Please refer to guidelines described in the [contributing guide](https://github.com/expo/expo#contributing).
package/package.json CHANGED
@@ -1,10 +1,13 @@
1
1
  {
2
2
  "name": "expo-ai-kit",
3
- "version": "0.1.0",
3
+ "version": "0.1.2",
4
4
  "description": "Expo AI Kit module",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",
7
- "files": ["build"], "scripts": {
7
+ "files": [
8
+ "build"
9
+ ],
10
+ "scripts": {
8
11
  "build": "expo-module build",
9
12
  "clean": "expo-module clean",
10
13
  "lint": "expo-module lint",
@@ -15,7 +18,11 @@
15
18
  "open:ios": "xed example/ios",
16
19
  "open:android": "open -a \"Android Studio\" example/android",
17
20
  "android": "expo run:android",
18
- "ios": "expo run:ios"
21
+ "ios": "expo run:ios",
22
+ "publish:patch": "npm version patch && npm run build && npm publish",
23
+ "publish:minor": "npm version minor && npm run build && npm publish",
24
+ "publish:major": "npm version major && npm run build && npm publish",
25
+ "publish:only": "npm run build && npm publish"
19
26
  },
20
27
  "keywords": [
21
28
  "react-native",
@@ -27,7 +34,7 @@
27
34
  "bugs": {
28
35
  "url": "https://github.com/laraelmas/expo-ai-kit/issues"
29
36
  },
30
- "author": "laraelmas <laraaelmas@gmail.com> (https://github.com/laraelmas)",
37
+ "author": "Lara Elmas (https://github.com/laraelmas)",
31
38
  "license": "MIT",
32
39
  "homepage": "https://github.com/laraelmas/expo-ai-kit#readme",
33
40
  "dependencies": {