chat-nest-sdk 1.0.1 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -4
- package/dist/index.d.ts +3 -2
- package/dist/index.js +145 -31
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,26 +1,29 @@
|
|
|
1
1
|
# chat-nest-sdk
|
|
2
2
|
|
|
3
|
-
> Frontend SDK for Chat Nest providing a simple React hook to consume streaming AI APIs safely.
|
|
3
|
+
> Frontend SDK for Chat Nest providing a simple React hook to consume streaming AI APIs safely using Server-Side Events (SSE).
|
|
4
4
|
|
|
5
5
|
This package handles:
|
|
6
|
-
-
|
|
6
|
+
- Server-Side Events (SSE) streaming response handling
|
|
7
|
+
- Real-time token streaming via SSE protocol
|
|
7
8
|
- Cancellation propagation
|
|
8
9
|
- Intelligent retry behavior
|
|
9
10
|
- Error normalization
|
|
10
11
|
- Message state management
|
|
11
12
|
|
|
12
|
-
Designed for production usage in React applications.
|
|
13
|
+
Designed for production usage in React applications. Uses SSE for efficient, bidirectional communication with the backend.
|
|
13
14
|
|
|
14
15
|
---
|
|
15
16
|
|
|
16
17
|
## ✨ Features
|
|
17
18
|
|
|
18
|
-
-
|
|
19
|
+
- Server-Side Events (SSE) streaming protocol
|
|
20
|
+
- Real-time token streaming via SSE events
|
|
19
21
|
- Abort-safe cancellation
|
|
20
22
|
- Retry only on network / server failures
|
|
21
23
|
- No retries on client or policy errors
|
|
22
24
|
- Message state management
|
|
23
25
|
- Lightweight and framework-friendly
|
|
26
|
+
- Efficient SSE event parsing (`token`, `done`, `error`, `ping`, `start` events)
|
|
24
27
|
|
|
25
28
|
---
|
|
26
29
|
|
|
@@ -96,6 +99,8 @@ Cancel immediately stops streaming and billing.
|
|
|
96
99
|
|
|
97
100
|
Network failures retry automatically.
|
|
98
101
|
|
|
102
|
+
**Server-Side Events (SSE)**: The SDK communicates with the backend using the SSE protocol. The backend must send events in SSE format (`event: <type>\ndata: <data>\n\n`). Supported event types: `start`, `token`, `done`, `error`, `ping`.
|
|
103
|
+
|
|
99
104
|
---
|
|
100
105
|
|
|
101
106
|
## 📄 License
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import { UseAIChatOptions, UseAIChatReturn, AIClient, AiClientConfig, Message, StreamCallbacks } from 'chat-nest-core';
|
|
1
|
+
import { UseAIChatOptions, UseAIChatReturn, AIClient, AiClientConfig, Message, StreamCallbacks, AiUsageProfile, ChatRequestOptions } from 'chat-nest-core';
|
|
2
|
+
export { AiUsageProfile } from 'chat-nest-core';
|
|
2
3
|
|
|
3
4
|
declare function useAiChat(options: UseAIChatOptions): UseAIChatReturn;
|
|
4
5
|
|
|
@@ -6,7 +7,7 @@ declare class FetchAiClient implements AIClient {
|
|
|
6
7
|
private abortController?;
|
|
7
8
|
private config;
|
|
8
9
|
constructor(config: AiClientConfig);
|
|
9
|
-
streamChat(messages: Message[], callbacks: StreamCallbacks): Promise<void>;
|
|
10
|
+
streamChat(messages: Message[], callbacks: StreamCallbacks, profile: AiUsageProfile, options?: ChatRequestOptions): Promise<void>;
|
|
10
11
|
cancel(): void;
|
|
11
12
|
private executeStream;
|
|
12
13
|
private normalizeError;
|
package/dist/index.js
CHANGED
|
@@ -4,6 +4,16 @@ import {
|
|
|
4
4
|
MessageRole
|
|
5
5
|
} from "chat-nest-core";
|
|
6
6
|
|
|
7
|
+
// src/core/promptPresets.ts
|
|
8
|
+
var PROFILE_SYSTEM_PROMPTS = {
|
|
9
|
+
constrained: "You are a concise assistant. Answer in the fewest words possible. No explanations or preamble. Direct answers only. No examples needed",
|
|
10
|
+
balanced: "You are a helpful assistant. Be clear and concise. Give brief explanations when they add value, but keep responses focused.",
|
|
11
|
+
expanded: "You are a thorough assistant. Explain your reasoning step by step when useful. Include relevant examples and detail. Prioritize clarity and completeness."
|
|
12
|
+
};
|
|
13
|
+
function getSystemPromptForProfile(profile) {
|
|
14
|
+
return PROFILE_SYSTEM_PROMPTS[profile];
|
|
15
|
+
}
|
|
16
|
+
|
|
7
17
|
// src/core/aiClient.ts
|
|
8
18
|
var DEFAULT_TIMEOUT = 3e4;
|
|
9
19
|
var DEFAULT_RETRIES = 2;
|
|
@@ -22,11 +32,11 @@ var FetchAiClient = class {
|
|
|
22
32
|
maxRetries: config.maxRetries ?? DEFAULT_RETRIES
|
|
23
33
|
};
|
|
24
34
|
}
|
|
25
|
-
async streamChat(messages, callbacks) {
|
|
35
|
+
async streamChat(messages, callbacks, profile, options) {
|
|
26
36
|
let attempt = 0;
|
|
27
37
|
while (attempt <= this.config.maxRetries) {
|
|
28
38
|
try {
|
|
29
|
-
await this.executeStream(messages, callbacks);
|
|
39
|
+
await this.executeStream(messages, callbacks, profile, options);
|
|
30
40
|
return;
|
|
31
41
|
} catch (error) {
|
|
32
42
|
if (error?.name === "AbortError") {
|
|
@@ -48,12 +58,13 @@ var FetchAiClient = class {
|
|
|
48
58
|
cancel() {
|
|
49
59
|
this.abortController?.abort();
|
|
50
60
|
}
|
|
51
|
-
async executeStream(messages, callbacks) {
|
|
61
|
+
async executeStream(messages, callbacks, profile, options) {
|
|
52
62
|
this.abortController = new AbortController();
|
|
53
63
|
const timeoutId = setTimeout(
|
|
54
64
|
() => this.abortController?.abort(),
|
|
55
65
|
this.config.timeoutMs
|
|
56
66
|
);
|
|
67
|
+
let completed = false;
|
|
57
68
|
try {
|
|
58
69
|
const response = await fetch(this.config.endpoint, {
|
|
59
70
|
method: "POST",
|
|
@@ -62,7 +73,17 @@ var FetchAiClient = class {
|
|
|
62
73
|
...this.config.headers
|
|
63
74
|
},
|
|
64
75
|
signal: this.abortController.signal,
|
|
65
|
-
body: JSON.stringify({
|
|
76
|
+
body: JSON.stringify({
|
|
77
|
+
messages,
|
|
78
|
+
profile,
|
|
79
|
+
systemPrompt: getSystemPromptForProfile(profile),
|
|
80
|
+
...options?.dailyTokenLimit != null && {
|
|
81
|
+
dailyTokenLimit: options.dailyTokenLimit
|
|
82
|
+
},
|
|
83
|
+
...options?.maxTokensPerRequest != null && {
|
|
84
|
+
maxTokensPerRequest: options.maxTokensPerRequest
|
|
85
|
+
}
|
|
86
|
+
})
|
|
66
87
|
});
|
|
67
88
|
if (!response.ok) {
|
|
68
89
|
if (response.status >= 400 && response.status < 500) {
|
|
@@ -75,13 +96,64 @@ var FetchAiClient = class {
|
|
|
75
96
|
}
|
|
76
97
|
const reader = response.body.getReader();
|
|
77
98
|
const decoder = new TextDecoder();
|
|
99
|
+
let buffer = "";
|
|
78
100
|
while (true) {
|
|
101
|
+
if (this.abortController.signal.aborted) {
|
|
102
|
+
throw new DOMException("Aborted", "AbortError");
|
|
103
|
+
}
|
|
79
104
|
const { value, done } = await reader.read();
|
|
105
|
+
if (value) {
|
|
106
|
+
buffer += decoder.decode(value, { stream: true });
|
|
107
|
+
}
|
|
108
|
+
const events = buffer.split("\n\n");
|
|
109
|
+
buffer = events.pop() || "";
|
|
110
|
+
for (const eventText of events) {
|
|
111
|
+
if (!eventText.trim()) continue;
|
|
112
|
+
let eventType = "";
|
|
113
|
+
let eventDataParts = [];
|
|
114
|
+
for (const line of eventText.split("\n")) {
|
|
115
|
+
if (line.startsWith("event:")) {
|
|
116
|
+
eventType = line.substring(6).trim();
|
|
117
|
+
} else if (line.startsWith("data:")) {
|
|
118
|
+
eventDataParts.push(line.substring(5).trim());
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
const eventData = eventDataParts.join("\n");
|
|
122
|
+
switch (eventType) {
|
|
123
|
+
case "token": {
|
|
124
|
+
try {
|
|
125
|
+
const token = JSON.parse(eventData);
|
|
126
|
+
callbacks.onToken(token);
|
|
127
|
+
} catch {
|
|
128
|
+
callbacks.onToken(eventData);
|
|
129
|
+
}
|
|
130
|
+
break;
|
|
131
|
+
}
|
|
132
|
+
case "done":
|
|
133
|
+
completed = true;
|
|
134
|
+
callbacks.onComplete();
|
|
135
|
+
return;
|
|
136
|
+
case "error":
|
|
137
|
+
completed = true;
|
|
138
|
+
try {
|
|
139
|
+
const errorObj = JSON.parse(eventData);
|
|
140
|
+
callbacks.onError(
|
|
141
|
+
new Error(errorObj.message || "Stream error")
|
|
142
|
+
);
|
|
143
|
+
} catch {
|
|
144
|
+
callbacks.onError(new Error(eventData || "Stream error"));
|
|
145
|
+
}
|
|
146
|
+
return;
|
|
147
|
+
case "ping":
|
|
148
|
+
case "start":
|
|
149
|
+
break;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
80
152
|
if (done) break;
|
|
81
|
-
const chunk = decoder.decode(value, { stream: true });
|
|
82
|
-
callbacks.onToken(chunk);
|
|
83
153
|
}
|
|
84
|
-
|
|
154
|
+
if (!completed) {
|
|
155
|
+
callbacks.onComplete();
|
|
156
|
+
}
|
|
85
157
|
} finally {
|
|
86
158
|
clearTimeout(timeoutId);
|
|
87
159
|
}
|
|
@@ -107,15 +179,42 @@ function generateId() {
|
|
|
107
179
|
}
|
|
108
180
|
|
|
109
181
|
// src/react/useAiChat.ts
|
|
182
|
+
var STORAGE_KEY = "aiUsageProfile";
|
|
183
|
+
var VALID_PROFILES = ["constrained", "balanced", "expanded"];
|
|
184
|
+
var DEFAULT_PROFILE = "balanced";
|
|
185
|
+
function readStoredProfile() {
|
|
186
|
+
if (typeof window === "undefined") return DEFAULT_PROFILE;
|
|
187
|
+
try {
|
|
188
|
+
const raw = localStorage.getItem(STORAGE_KEY);
|
|
189
|
+
if (raw && VALID_PROFILES.includes(raw)) {
|
|
190
|
+
return raw;
|
|
191
|
+
}
|
|
192
|
+
} catch {
|
|
193
|
+
}
|
|
194
|
+
return DEFAULT_PROFILE;
|
|
195
|
+
}
|
|
110
196
|
function useAiChat(options) {
|
|
111
197
|
const {
|
|
112
198
|
endpoint,
|
|
113
199
|
initialMessages = [],
|
|
114
|
-
maxMessages = 10
|
|
200
|
+
maxMessages = 10,
|
|
201
|
+
initialProfile,
|
|
202
|
+
dailyTokenLimit,
|
|
203
|
+
maxTokensPerRequest
|
|
115
204
|
} = options;
|
|
116
|
-
const [
|
|
117
|
-
|
|
205
|
+
const [profile, setProfileState] = useState(
|
|
206
|
+
() => initialProfile ?? readStoredProfile()
|
|
118
207
|
);
|
|
208
|
+
const setProfile = useCallback((next) => {
|
|
209
|
+
setProfileState(next);
|
|
210
|
+
try {
|
|
211
|
+
if (typeof window !== "undefined") {
|
|
212
|
+
localStorage.setItem(STORAGE_KEY, next);
|
|
213
|
+
}
|
|
214
|
+
} catch {
|
|
215
|
+
}
|
|
216
|
+
}, []);
|
|
217
|
+
const [messages, setMessages] = useState(initialMessages);
|
|
119
218
|
const [isStreaming, setIsStreaming] = useState(false);
|
|
120
219
|
const [error, setError] = useState();
|
|
121
220
|
const clientRef = useRef(null);
|
|
@@ -144,30 +243,43 @@ function useAiChat(options) {
|
|
|
144
243
|
setIsStreaming(true);
|
|
145
244
|
const history = [...messages, userMessage];
|
|
146
245
|
try {
|
|
147
|
-
await clientRef.current.streamChat(
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
246
|
+
await clientRef.current.streamChat(
|
|
247
|
+
history,
|
|
248
|
+
{
|
|
249
|
+
onToken(token) {
|
|
250
|
+
setMessages(
|
|
251
|
+
(prev) => prev.map(
|
|
252
|
+
(msg) => msg.id === assistantMessage.id ? {
|
|
253
|
+
...msg,
|
|
254
|
+
content: msg.content + token
|
|
255
|
+
} : msg
|
|
256
|
+
)
|
|
257
|
+
);
|
|
258
|
+
},
|
|
259
|
+
onComplete() {
|
|
260
|
+
setIsStreaming(false);
|
|
261
|
+
},
|
|
262
|
+
onError(err) {
|
|
263
|
+
setError(err.message);
|
|
264
|
+
setIsStreaming(false);
|
|
265
|
+
}
|
|
157
266
|
},
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
onError(err) {
|
|
162
|
-
setError(err.message);
|
|
163
|
-
setIsStreaming(false);
|
|
164
|
-
}
|
|
165
|
-
});
|
|
267
|
+
profile,
|
|
268
|
+
dailyTokenLimit != null || maxTokensPerRequest != null ? { dailyTokenLimit: dailyTokenLimit ?? void 0, maxTokensPerRequest: maxTokensPerRequest ?? void 0 } : void 0
|
|
269
|
+
);
|
|
166
270
|
} catch (err) {
|
|
167
271
|
setError(err.message);
|
|
168
272
|
setIsStreaming(false);
|
|
169
273
|
}
|
|
170
|
-
}, [
|
|
274
|
+
}, [
|
|
275
|
+
endpoint,
|
|
276
|
+
isStreaming,
|
|
277
|
+
maxMessages,
|
|
278
|
+
messages,
|
|
279
|
+
profile,
|
|
280
|
+
dailyTokenLimit,
|
|
281
|
+
maxTokensPerRequest
|
|
282
|
+
]);
|
|
171
283
|
const cancel = useCallback(() => {
|
|
172
284
|
clientRef.current?.cancel();
|
|
173
285
|
setIsStreaming(false);
|
|
@@ -184,9 +296,11 @@ function useAiChat(options) {
|
|
|
184
296
|
cancel,
|
|
185
297
|
reset,
|
|
186
298
|
isStreaming,
|
|
187
|
-
error
|
|
299
|
+
error,
|
|
300
|
+
profile,
|
|
301
|
+
setProfile
|
|
188
302
|
}),
|
|
189
|
-
[messages, sendMessage, cancel, reset, isStreaming, error]
|
|
303
|
+
[messages, sendMessage, cancel, reset, isStreaming, error, profile, setProfile]
|
|
190
304
|
);
|
|
191
305
|
}
|
|
192
306
|
export {
|
package/package.json
CHANGED