@djangocfg/layouts 2.0.6 → 2.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,487 @@
1
+ 'use client';
2
+
3
+ import { useState, useCallback, useRef, useEffect } from 'react';
4
+ import { mcpEndpoints, type AIChatMessage, type AIChatSource, type UseAIChatOptions, type UseAIChatReturn } from '../types';
5
+
6
+ function generateId(): string {
7
+ return `msg_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`;
8
+ }
9
+
10
+ function generateThreadId(): string {
11
+ return `thread_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`;
12
+ }
13
+
14
+ function generateUserId(): string {
15
+ return `user_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`;
16
+ }
17
+
18
+ const STORAGE_KEY = 'djangocfg_chat';
19
+
20
+ /**
21
+ * Get or create persistent chat IDs from localStorage
22
+ */
23
+ function getPersistedIds(): { threadId: string; userId: string } {
24
+ if (typeof window === 'undefined') {
25
+ return { threadId: generateThreadId(), userId: generateUserId() };
26
+ }
27
+
28
+ try {
29
+ const stored = localStorage.getItem(STORAGE_KEY);
30
+ if (stored) {
31
+ const data = JSON.parse(stored);
32
+ if (data.threadId && data.userId) {
33
+ return data;
34
+ }
35
+ }
36
+ } catch {
37
+ // Ignore parse errors
38
+ }
39
+
40
+ // Generate new IDs and persist
41
+ const ids = { threadId: generateThreadId(), userId: generateUserId() };
42
+ try {
43
+ localStorage.setItem(STORAGE_KEY, JSON.stringify(ids));
44
+ } catch {
45
+ // Ignore storage errors
46
+ }
47
+ return ids;
48
+ }
49
+
50
+ /**
51
+ * Update persisted thread ID (after clear)
52
+ */
53
+ function persistThreadId(threadId: string, userId: string): void {
54
+ if (typeof window === 'undefined') return;
55
+ try {
56
+ localStorage.setItem(STORAGE_KEY, JSON.stringify({ threadId, userId }));
57
+ } catch {
58
+ // Ignore storage errors
59
+ }
60
+ }
61
+
62
+ /**
63
+ * Save message to server
64
+ */
65
+ async function saveMessageToServer(threadId: string, userId: string, message: AIChatMessage): Promise<void> {
66
+ try {
67
+ await fetch(`${mcpEndpoints.conversations}/${threadId}/messages`, {
68
+ method: 'POST',
69
+ headers: { 'Content-Type': 'application/json' },
70
+ body: JSON.stringify({
71
+ userId,
72
+ message: {
73
+ id: message.id,
74
+ role: message.role,
75
+ content: message.content,
76
+ timestamp: message.timestamp.getTime(),
77
+ sources: message.sources,
78
+ },
79
+ }),
80
+ });
81
+ } catch (error) {
82
+ console.warn('[Chat] Failed to save message to server:', error);
83
+ }
84
+ }
85
+
86
+ /**
87
+ * Load conversation from server
88
+ */
89
+ async function loadConversationFromServer(threadId: string): Promise<AIChatMessage[]> {
90
+ try {
91
+ const response = await fetch(`${mcpEndpoints.conversations}/${threadId}`);
92
+ if (!response.ok) return [];
93
+
94
+ const data = await response.json();
95
+ if (!data.messages || !Array.isArray(data.messages)) return [];
96
+
97
+ return data.messages.map((m: {
98
+ id: string;
99
+ role: 'user' | 'assistant';
100
+ content: string;
101
+ timestamp: number;
102
+ sources?: AIChatSource[];
103
+ }) => ({
104
+ id: m.id,
105
+ role: m.role,
106
+ content: m.content,
107
+ timestamp: new Date(m.timestamp),
108
+ sources: m.sources,
109
+ }));
110
+ } catch (error) {
111
+ console.warn('[Chat] Failed to load conversation from server:', error);
112
+ return [];
113
+ }
114
+ }
115
+
116
+ /**
117
+ * Delete conversation from server
118
+ */
119
+ async function deleteConversationFromServer(threadId: string): Promise<void> {
120
+ try {
121
+ await fetch(`${mcpEndpoints.conversations}/${threadId}`, {
122
+ method: 'DELETE',
123
+ });
124
+ } catch (error) {
125
+ console.warn('[Chat] Failed to delete conversation from server:', error);
126
+ }
127
+ }
128
+
129
+ /**
130
+ * AI Chat hook with streaming support and server-side history
131
+ * All persistence is handled through API endpoints - no localStorage
132
+ */
133
+ export function useAIChat(options: UseAIChatOptions): UseAIChatReturn {
134
+ const {
135
+ apiEndpoint = mcpEndpoints.chat,
136
+ initialMessages = [],
137
+ onError,
138
+ enableStreaming = true,
139
+ threadId: initialThreadId,
140
+ userId: initialUserId,
141
+ } = options;
142
+
143
+ const [messages, setMessages] = useState<AIChatMessage[]>(initialMessages);
144
+ const [isLoadingHistory, setIsLoadingHistory] = useState(true);
145
+
146
+ // Get persisted IDs from localStorage (or generate new ones)
147
+ const persistedIds = useRef<{ threadId: string; userId: string } | null>(null);
148
+ if (persistedIds.current === null && typeof window !== 'undefined') {
149
+ persistedIds.current = getPersistedIds();
150
+ }
151
+
152
+ const [threadId, setThreadId] = useState<string>(() =>
153
+ initialThreadId || persistedIds.current?.threadId || generateThreadId()
154
+ );
155
+ const [userId] = useState<string>(() =>
156
+ initialUserId || persistedIds.current?.userId || generateUserId()
157
+ );
158
+
159
+ const [isLoading, setIsLoading] = useState(false);
160
+ const [error, setError] = useState<Error | null>(null);
161
+
162
+ const abortControllerRef = useRef<AbortController | null>(null);
163
+
164
+ // Load conversation from server on mount (always try to restore from persisted threadId)
165
+ useEffect(() => {
166
+ if (typeof window === 'undefined') {
167
+ setIsLoadingHistory(false);
168
+ return;
169
+ }
170
+
171
+ const loadHistory = async () => {
172
+ const serverMessages = await loadConversationFromServer(threadId);
173
+ if (serverMessages.length > 0) {
174
+ setMessages(serverMessages);
175
+ }
176
+ setIsLoadingHistory(false);
177
+ };
178
+ loadHistory();
179
+ }, [threadId]);
180
+
181
+ /**
182
+ * Send message with streaming support
183
+ */
184
+ const sendMessage = useCallback(
185
+ async (content: string) => {
186
+ if (!content.trim() || isLoading) return;
187
+
188
+ // Abort any previous request
189
+ if (abortControllerRef.current) {
190
+ abortControllerRef.current.abort();
191
+ }
192
+ abortControllerRef.current = new AbortController();
193
+
194
+ // Add user message
195
+ const userMessage: AIChatMessage = {
196
+ id: generateId(),
197
+ role: 'user',
198
+ content: content.trim(),
199
+ timestamp: new Date(),
200
+ };
201
+
202
+ // Add placeholder for assistant message
203
+ const assistantMessageId = generateId();
204
+ const assistantMessage: AIChatMessage = {
205
+ id: assistantMessageId,
206
+ role: 'assistant',
207
+ content: '',
208
+ timestamp: new Date(),
209
+ isStreaming: true,
210
+ };
211
+
212
+ setMessages((prev) => [...prev, userMessage, assistantMessage]);
213
+ setIsLoading(true);
214
+ setError(null);
215
+
216
+ // Save user message to server
217
+ saveMessageToServer(threadId, userId, userMessage);
218
+
219
+ try {
220
+ // Build messages array from history + current message (for OpenAI format)
221
+ const chatMessages = [
222
+ ...messages
223
+ .filter((m) => m.role !== 'system')
224
+ .slice(-10) // Keep last 10 messages for context
225
+ .map((m) => ({
226
+ role: m.role as 'user' | 'assistant',
227
+ content: m.content,
228
+ })),
229
+ { role: 'user' as const, content },
230
+ ];
231
+
232
+ const response = await fetch(apiEndpoint, {
233
+ method: 'POST',
234
+ headers: {
235
+ 'Content-Type': 'application/json',
236
+ },
237
+ body: JSON.stringify({
238
+ messages: chatMessages,
239
+ stream: enableStreaming,
240
+ }),
241
+ signal: abortControllerRef.current.signal,
242
+ });
243
+
244
+ if (!response.ok) {
245
+ throw new Error(`HTTP error: ${response.status}`);
246
+ }
247
+
248
+ if (enableStreaming && response.headers.get('content-type')?.includes('text/event-stream')) {
249
+ // Handle streaming response
250
+ await handleStreamingResponse(response, assistantMessageId);
251
+ } else {
252
+ // Handle non-streaming response
253
+ const data = await response.json();
254
+
255
+ if (!data.success) {
256
+ throw new Error(data.error || 'Failed to get response');
257
+ }
258
+
259
+ // Update thread ID if returned
260
+ if (data.threadId && data.threadId !== threadId) {
261
+ setThreadId(data.threadId);
262
+ }
263
+
264
+ // Extract sources
265
+ const sources: AIChatSource[] =
266
+ data.sources?.map((s: { title: string; path: string; url?: string; section?: string; score?: number }) => ({
267
+ title: s.title,
268
+ path: s.path,
269
+ url: s.url,
270
+ section: s.section,
271
+ score: s.score,
272
+ })) || [];
273
+
274
+ const finalContent = data.content || 'I found some relevant documentation.';
275
+
276
+ // Update assistant message
277
+ setMessages((prev) =>
278
+ prev.map((m) =>
279
+ m.id === assistantMessageId
280
+ ? {
281
+ ...m,
282
+ content: finalContent,
283
+ sources,
284
+ isStreaming: false,
285
+ }
286
+ : m
287
+ )
288
+ );
289
+
290
+ // Save assistant message to server
291
+ saveMessageToServer(threadId, userId, {
292
+ id: assistantMessageId,
293
+ role: 'assistant',
294
+ content: finalContent,
295
+ timestamp: new Date(),
296
+ sources,
297
+ });
298
+ }
299
+ } catch (err) {
300
+ if (err instanceof Error && err.name === 'AbortError') {
301
+ // Request was aborted, remove the assistant message
302
+ setMessages((prev) => prev.filter((m) => m.id !== assistantMessageId));
303
+ return;
304
+ }
305
+
306
+ const error = err instanceof Error ? err : new Error('Unknown error');
307
+ setError(error);
308
+ onError?.(error);
309
+
310
+ // Update assistant message with error
311
+ setMessages((prev) =>
312
+ prev.map((m) =>
313
+ m.id === assistantMessageId
314
+ ? {
315
+ ...m,
316
+ content: `Sorry, I encountered an error: ${error.message}. Please try again.`,
317
+ isStreaming: false,
318
+ }
319
+ : m
320
+ )
321
+ );
322
+ } finally {
323
+ setIsLoading(false);
324
+ abortControllerRef.current = null;
325
+ }
326
+ },
327
+ [apiEndpoint, isLoading, messages, threadId, userId, enableStreaming, onError]
328
+ );
329
+
330
+ /**
331
+ * Handle streaming SSE response
332
+ */
333
+ const handleStreamingResponse = async (response: Response, messageId: string) => {
334
+ const reader = response.body?.getReader();
335
+ if (!reader) {
336
+ throw new Error('No response body');
337
+ }
338
+
339
+ const decoder = new TextDecoder();
340
+ let buffer = '';
341
+ let fullContent = '';
342
+ const sources: AIChatSource[] = [];
343
+
344
+ try {
345
+ while (true) {
346
+ const { done, value } = await reader.read();
347
+ if (done) break;
348
+
349
+ buffer += decoder.decode(value, { stream: true });
350
+
351
+ // Process complete events
352
+ const lines = buffer.split('\n');
353
+ buffer = lines.pop() || ''; // Keep incomplete line in buffer
354
+
355
+ for (const line of lines) {
356
+ if (line.startsWith('data: ')) {
357
+ const data = line.slice(6);
358
+
359
+ if (data === '[DONE]') {
360
+ // Stream complete
361
+ setMessages((prev) =>
362
+ prev.map((m) =>
363
+ m.id === messageId
364
+ ? {
365
+ ...m,
366
+ content: fullContent,
367
+ sources,
368
+ isStreaming: false,
369
+ }
370
+ : m
371
+ )
372
+ );
373
+
374
+ // Save final assistant message to server
375
+ saveMessageToServer(threadId, userId, {
376
+ id: messageId,
377
+ role: 'assistant',
378
+ content: fullContent,
379
+ timestamp: new Date(),
380
+ sources,
381
+ });
382
+ return;
383
+ }
384
+
385
+ try {
386
+ const parsed = JSON.parse(data);
387
+
388
+ if (parsed.type === 'text' && parsed.content) {
389
+ fullContent += parsed.content;
390
+ // Update message with current content
391
+ setMessages((prev) =>
392
+ prev.map((m) =>
393
+ m.id === messageId
394
+ ? {
395
+ ...m,
396
+ content: fullContent,
397
+ isStreaming: true,
398
+ }
399
+ : m
400
+ )
401
+ );
402
+ } else if (parsed.type === 'source' && parsed.source) {
403
+ sources.push({
404
+ title: parsed.source.title,
405
+ path: parsed.source.path,
406
+ url: parsed.source.url,
407
+ section: parsed.source.section,
408
+ score: parsed.source.score,
409
+ });
410
+ } else if (parsed.type === 'done') {
411
+ // Update with final usage info if needed
412
+ setMessages((prev) =>
413
+ prev.map((m) =>
414
+ m.id === messageId
415
+ ? {
416
+ ...m,
417
+ content: fullContent,
418
+ sources,
419
+ isStreaming: false,
420
+ }
421
+ : m
422
+ )
423
+ );
424
+
425
+ // Save final assistant message to server
426
+ saveMessageToServer(threadId, userId, {
427
+ id: messageId,
428
+ role: 'assistant',
429
+ content: fullContent,
430
+ timestamp: new Date(),
431
+ sources,
432
+ });
433
+ } else if (parsed.type === 'error') {
434
+ throw new Error(parsed.error || 'Stream error');
435
+ }
436
+ } catch {
437
+ // Ignore parse errors for individual events
438
+ }
439
+ }
440
+ }
441
+ }
442
+ } finally {
443
+ reader.releaseLock();
444
+ }
445
+ };
446
+
447
+ /**
448
+ * Clear all messages and start new conversation
449
+ */
450
+ const clearMessages = useCallback(async () => {
451
+ // Abort any ongoing request
452
+ if (abortControllerRef.current) {
453
+ abortControllerRef.current.abort();
454
+ }
455
+
456
+ // Delete conversation from server
457
+ await deleteConversationFromServer(threadId);
458
+
459
+ setMessages([]);
460
+ setError(null);
461
+
462
+ // Generate new thread ID for fresh conversation and persist it
463
+ const newThreadId = generateThreadId();
464
+ setThreadId(newThreadId);
465
+ persistThreadId(newThreadId, userId);
466
+ }, [threadId, userId]);
467
+
468
+ /**
469
+ * Stop current streaming response
470
+ */
471
+ const stopStreaming = useCallback(() => {
472
+ if (abortControllerRef.current) {
473
+ abortControllerRef.current.abort();
474
+ }
475
+ }, []);
476
+
477
+ return {
478
+ messages,
479
+ isLoading: isLoading || isLoadingHistory,
480
+ error,
481
+ threadId,
482
+ userId,
483
+ sendMessage,
484
+ clearMessages,
485
+ stopStreaming,
486
+ };
487
+ }