chat-nest-sdk 1.1.0 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -45,6 +45,9 @@ import { useAiChat } from "chat-nest-sdk";
45
45
  function App() {
46
46
  const chat = useAiChat({
47
47
  endpoint: "http://localhost:3001/api/chat",
48
+ initialProfile: "balanced",
49
+ dailyTokenLimit: 50_000, // optional: cap daily tokens
50
+ maxTokensPerRequest: 4096, // optional: cap per request
48
51
  });
49
52
 
50
53
  return (
@@ -64,6 +67,11 @@ function App() {
64
67
  <button onClick={chat.cancel}>
65
68
  Cancel
66
69
  </button>
70
+ <select value={chat.profile} onChange={(e) => chat.setProfile(e.target.value)}>
71
+ <option value="constrained">Constrained</option>
72
+ <option value="balanced">Balanced</option>
73
+ <option value="expanded">Expanded</option>
74
+ </select>
67
75
  </>
68
76
  );
69
77
  }
@@ -75,17 +83,27 @@ function App() {
75
83
 
76
84
  ### useAiChat(options)
77
85
 
78
- | Field | Type | Description |
79
- | -------- | ------ | -------------------- |
80
- | endpoint | string | Backend API endpoint |
81
-
82
- | Field | Description |
83
- | ----------------- | ------------------------ |
84
- | messages | Chat message list |
85
- | sendMessage(text) | Sends user message |
86
- | cancel() | Cancels active request |
87
- | isStreaming | Whether stream is active |
88
- | error | Last error |
86
+ | Field | Type | Description |
87
+ | ------------------- | ------ | --------------------------------------------------------------------------- |
88
+ | endpoint | string | Backend API endpoint |
89
+ | initialMessages | Message[] | Optional. Initial messages |
90
+ | maxMessages | number | Optional. Max messages to keep in context (default 10) |
91
+ | initialProfile | AiUsageProfile | Optional. Profile: `constrained`, `balanced`, `expanded` |
92
+ | dailyTokenLimit | number | Optional. Cap daily tokens; server applies min(profile limit, this) |
93
+ | maxTokensPerRequest | number | Optional. Cap tokens per request (input + output); server applies min |
94
+
95
+ ### Return value
96
+
97
+ | Field | Description |
98
+ | ----------------- | ----------------------------- |
99
+ | messages | Chat message list |
100
+ | sendMessage(text) | Sends user message |
101
+ | cancel() | Cancels active request |
102
+ | isStreaming | Whether stream is active |
103
+ | error | Last error |
104
+ | profile | Current profile |
105
+ | setProfile(p) | Set profile; persisted to localStorage |
106
+ | reset() | Reset messages and error |
89
107
 
90
108
  ---
91
109
 
package/dist/index.d.ts CHANGED
@@ -1,4 +1,5 @@
1
- import { UseAIChatOptions, UseAIChatReturn, AIClient, AiClientConfig, Message, StreamCallbacks } from 'chat-nest-core';
1
+ import { UseAIChatOptions, UseAIChatReturn, AIClient, AiClientConfig, Message, StreamCallbacks, AiUsageProfile, ChatRequestOptions } from 'chat-nest-core';
2
+ export { AiUsageProfile } from 'chat-nest-core';
2
3
 
3
4
  declare function useAiChat(options: UseAIChatOptions): UseAIChatReturn;
4
5
 
@@ -6,7 +7,7 @@ declare class FetchAiClient implements AIClient {
6
7
  private abortController?;
7
8
  private config;
8
9
  constructor(config: AiClientConfig);
9
- streamChat(messages: Message[], callbacks: StreamCallbacks): Promise<void>;
10
+ streamChat(messages: Message[], callbacks: StreamCallbacks, profile: AiUsageProfile, options?: ChatRequestOptions): Promise<void>;
10
11
  cancel(): void;
11
12
  private executeStream;
12
13
  private normalizeError;
package/dist/index.js CHANGED
@@ -4,6 +4,16 @@ import {
4
4
  MessageRole
5
5
  } from "chat-nest-core";
6
6
 
7
+ // src/core/promptPresets.ts
8
+ var PROFILE_SYSTEM_PROMPTS = {
9
+ constrained: "You are a concise assistant. Answer in the fewest words possible. No explanations or preamble. Direct answers only. No examples needed",
10
+ balanced: "You are a helpful assistant. Be clear and concise. Give brief explanations when they add value, but keep responses focused.",
11
+ expanded: "You are a thorough assistant. Explain your reasoning step by step when useful. Include relevant examples and detail. Prioritize clarity and completeness."
12
+ };
13
+ function getSystemPromptForProfile(profile) {
14
+ return PROFILE_SYSTEM_PROMPTS[profile];
15
+ }
16
+
7
17
  // src/core/aiClient.ts
8
18
  var DEFAULT_TIMEOUT = 3e4;
9
19
  var DEFAULT_RETRIES = 2;
@@ -22,11 +32,11 @@ var FetchAiClient = class {
22
32
  maxRetries: config.maxRetries ?? DEFAULT_RETRIES
23
33
  };
24
34
  }
25
- async streamChat(messages, callbacks) {
35
+ async streamChat(messages, callbacks, profile, options) {
26
36
  let attempt = 0;
27
37
  while (attempt <= this.config.maxRetries) {
28
38
  try {
29
- await this.executeStream(messages, callbacks);
39
+ await this.executeStream(messages, callbacks, profile, options);
30
40
  return;
31
41
  } catch (error) {
32
42
  if (error?.name === "AbortError") {
@@ -48,7 +58,7 @@ var FetchAiClient = class {
48
58
  cancel() {
49
59
  this.abortController?.abort();
50
60
  }
51
- async executeStream(messages, callbacks) {
61
+ async executeStream(messages, callbacks, profile, options) {
52
62
  this.abortController = new AbortController();
53
63
  const timeoutId = setTimeout(
54
64
  () => this.abortController?.abort(),
@@ -63,7 +73,17 @@ var FetchAiClient = class {
63
73
  ...this.config.headers
64
74
  },
65
75
  signal: this.abortController.signal,
66
- body: JSON.stringify({ messages })
76
+ body: JSON.stringify({
77
+ messages,
78
+ profile,
79
+ systemPrompt: getSystemPromptForProfile(profile),
80
+ ...options?.dailyTokenLimit != null && {
81
+ dailyTokenLimit: options.dailyTokenLimit
82
+ },
83
+ ...options?.maxTokensPerRequest != null && {
84
+ maxTokensPerRequest: options.maxTokensPerRequest
85
+ }
86
+ })
67
87
  });
68
88
  if (!response.ok) {
69
89
  if (response.status >= 400 && response.status < 500) {
@@ -159,15 +179,42 @@ function generateId() {
159
179
  }
160
180
 
161
181
  // src/react/useAiChat.ts
182
+ var STORAGE_KEY = "aiUsageProfile";
183
+ var VALID_PROFILES = ["constrained", "balanced", "expanded"];
184
+ var DEFAULT_PROFILE = "balanced";
185
+ function readStoredProfile() {
186
+ if (typeof window === "undefined") return DEFAULT_PROFILE;
187
+ try {
188
+ const raw = localStorage.getItem(STORAGE_KEY);
189
+ if (raw && VALID_PROFILES.includes(raw)) {
190
+ return raw;
191
+ }
192
+ } catch {
193
+ }
194
+ return DEFAULT_PROFILE;
195
+ }
162
196
  function useAiChat(options) {
163
197
  const {
164
198
  endpoint,
165
199
  initialMessages = [],
166
- maxMessages = 10
200
+ maxMessages = 10,
201
+ initialProfile,
202
+ dailyTokenLimit,
203
+ maxTokensPerRequest
167
204
  } = options;
168
- const [messages, setMessages] = useState(
169
- initialMessages
205
+ const [profile, setProfileState] = useState(
206
+ () => initialProfile ?? readStoredProfile()
170
207
  );
208
+ const setProfile = useCallback((next) => {
209
+ setProfileState(next);
210
+ try {
211
+ if (typeof window !== "undefined") {
212
+ localStorage.setItem(STORAGE_KEY, next);
213
+ }
214
+ } catch {
215
+ }
216
+ }, []);
217
+ const [messages, setMessages] = useState(initialMessages);
171
218
  const [isStreaming, setIsStreaming] = useState(false);
172
219
  const [error, setError] = useState();
173
220
  const clientRef = useRef(null);
@@ -196,30 +243,43 @@ function useAiChat(options) {
196
243
  setIsStreaming(true);
197
244
  const history = [...messages, userMessage];
198
245
  try {
199
- await clientRef.current.streamChat(history, {
200
- onToken(token) {
201
- setMessages(
202
- (prev) => prev.map(
203
- (msg) => msg.id === assistantMessage.id ? {
204
- ...msg,
205
- content: msg.content + token
206
- } : msg
207
- )
208
- );
209
- },
210
- onComplete() {
211
- setIsStreaming(false);
246
+ await clientRef.current.streamChat(
247
+ history,
248
+ {
249
+ onToken(token) {
250
+ setMessages(
251
+ (prev) => prev.map(
252
+ (msg) => msg.id === assistantMessage.id ? {
253
+ ...msg,
254
+ content: msg.content + token
255
+ } : msg
256
+ )
257
+ );
258
+ },
259
+ onComplete() {
260
+ setIsStreaming(false);
261
+ },
262
+ onError(err) {
263
+ setError(err.message);
264
+ setIsStreaming(false);
265
+ }
212
266
  },
213
- onError(err) {
214
- setError(err.message);
215
- setIsStreaming(false);
216
- }
217
- });
267
+ profile,
268
+ dailyTokenLimit != null || maxTokensPerRequest != null ? { dailyTokenLimit: dailyTokenLimit ?? void 0, maxTokensPerRequest: maxTokensPerRequest ?? void 0 } : void 0
269
+ );
218
270
  } catch (err) {
219
271
  setError(err.message);
220
272
  setIsStreaming(false);
221
273
  }
222
- }, [endpoint, isStreaming, maxMessages, messages]);
274
+ }, [
275
+ endpoint,
276
+ isStreaming,
277
+ maxMessages,
278
+ messages,
279
+ profile,
280
+ dailyTokenLimit,
281
+ maxTokensPerRequest
282
+ ]);
223
283
  const cancel = useCallback(() => {
224
284
  clientRef.current?.cancel();
225
285
  setIsStreaming(false);
@@ -236,9 +296,11 @@ function useAiChat(options) {
236
296
  cancel,
237
297
  reset,
238
298
  isStreaming,
239
- error
299
+ error,
300
+ profile,
301
+ setProfile
240
302
  }),
241
- [messages, sendMessage, cancel, reset, isStreaming, error]
303
+ [messages, sendMessage, cancel, reset, isStreaming, error, profile, setProfile]
242
304
  );
243
305
  }
244
306
  export {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "chat-nest-sdk",
3
3
  "description": "Frontend React SDK for consuming streaming AI APIs with cancellation and retry safety.",
4
- "version": "1.1.0",
4
+ "version": "1.1.2",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",