@curaious/uno-converse 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,860 @@
1
+ import axios from 'axios';
2
+ import { useMemo, useState, useRef, useCallback, useEffect } from 'react';
3
+
4
+ // Message Roles
5
+ var Role;
6
+ (function (Role) {
7
+ Role["User"] = "user";
8
+ Role["Developer"] = "developer";
9
+ Role["System"] = "system";
10
+ Role["Assistant"] = "assistant";
11
+ })(Role || (Role = {}));
12
+ // Message Types
13
+ var MessageType;
14
+ (function (MessageType) {
15
+ MessageType["Message"] = "message";
16
+ MessageType["FunctionCall"] = "function_call";
17
+ MessageType["FunctionCallOutput"] = "function_call_output";
18
+ MessageType["Reasoning"] = "reasoning";
19
+ MessageType["ImageGenerationCall"] = "image_generation_call";
20
+ MessageType["FunctionCallApprovalResponse"] = "function_call_approval_response";
21
+ })(MessageType || (MessageType = {}));
22
+ // Content Types
23
+ var ContentType;
24
+ (function (ContentType) {
25
+ ContentType["InputText"] = "input_text";
26
+ ContentType["OutputText"] = "output_text";
27
+ ContentType["SummaryText"] = "summary_text";
28
+ ContentType["InputImage"] = "input_image";
29
+ })(ContentType || (ContentType = {}));
30
+ // Chunk Types
31
+ var ChunkType;
32
+ (function (ChunkType) {
33
+ ChunkType["ChunkTypeRunCreated"] = "run.created";
34
+ ChunkType["ChunkTypeRunInProgress"] = "run.in_progress";
35
+ ChunkType["ChunkTypeRunPaused"] = "run.paused";
36
+ ChunkType["ChunkTypeRunCompleted"] = "run.completed";
37
+ ChunkType["ChunkTypeResponseCreated"] = "response.created";
38
+ ChunkType["ChunkTypeResponseInProgress"] = "response.in_progress";
39
+ ChunkType["ChunkTypeResponseCompleted"] = "response.completed";
40
+ ChunkType["ChunkTypeOutputItemAdded"] = "response.output_item.added";
41
+ ChunkType["ChunkTypeOutputItemDone"] = "response.output_item.done";
42
+ ChunkType["ChunkTypeContentPartAdded"] = "response.content_part.added";
43
+ ChunkType["ChunkTypeContentPartDone"] = "response.content_part.done";
44
+ ChunkType["ChunkTypeOutputTextDelta"] = "response.output_text.delta";
45
+ ChunkType["ChunkTypeOutputTextDone"] = "response.output_text.done";
46
+ ChunkType["ChunkTypeFunctionCallArgumentsDelta"] = "response.function_call_arguments.delta";
47
+ ChunkType["ChunkTypeFunctionCallArgumentsDone"] = "response.function_call_arguments.done";
48
+ ChunkType["ChunkTypeReasoningSummaryPartAdded"] = "response.reasoning_summary_part.added";
49
+ ChunkType["ChunkTypeReasoningSummaryPartDone"] = "response.reasoning_summary_part.done";
50
+ ChunkType["ChunkTypeReasoningSummaryTextDelta"] = "response.reasoning_summary_text.delta";
51
+ ChunkType["ChunkTypeReasoningSummaryTextDone"] = "response.reasoning_summary_text.done";
52
+ // Image generation
53
+ ChunkType["ChunkTypeImageGenerationCallInProgress"] = "response.image_generation_call.in_progress";
54
+ ChunkType["ChunkTypeImageGenerationCallGenerating"] = "response.image_generation_call.generating";
55
+ ChunkType["ChunkTypeImageGenerationCallPartialImage"] = "response.image_generation_call.partial_image";
56
+ // Extra
57
+ ChunkType["ChunkTypeFunctionCallOutput"] = "function_call_output";
58
+ })(ChunkType || (ChunkType = {}));
59
+ // Type guards
60
+ function isEasyMessage(msg) {
61
+ return msg.type === MessageType.Message && 'content' in msg && (typeof msg.content === 'string' || Array.isArray(msg.content));
62
+ }
63
+ function isInputMessage(msg) {
64
+ return msg.type === MessageType.Message && 'content' in msg && Array.isArray(msg.content);
65
+ }
66
+ function isFunctionCallMessage(msg) {
67
+ return msg.type === MessageType.FunctionCall;
68
+ }
69
+ function isFunctionCallOutputMessage(msg) {
70
+ return msg.type === MessageType.FunctionCallOutput;
71
+ }
72
+ function isReasoningMessage(msg) {
73
+ return msg.type === MessageType.Reasoning;
74
+ }
75
+ function isImageGenerationCallMessage(msg) {
76
+ return msg.type === MessageType.ImageGenerationCall;
77
+ }
78
+
79
+ /**
80
+ * Processes streaming chunks from the LLM response.
81
+ * Builds up messages incrementally as chunks arrive.
82
+ *
83
+ * @example
84
+ * ```ts
85
+ * const processor = new ChunkProcessor(
86
+ * 'conv-123',
87
+ * 'thread-456',
88
+ * (conversation) => {
89
+ * // Update UI with new conversation state
90
+ * setConversation(conversation);
91
+ * }
92
+ * );
93
+ *
94
+ * // Process incoming chunks
95
+ * processor.processChunk(jsonData);
96
+ *
97
+ * // Get final conversation when done
98
+ * const finalConversation = processor.getConversation();
99
+ * ```
100
+ */
101
+ class ChunkProcessor {
102
+ constructor(conversationId, threadId, onChange) {
103
+ this.messages = [];
104
+ this.currentOutputItem = null;
105
+ this.conversation = {
106
+ conversation_id: conversationId,
107
+ thread_id: threadId,
108
+ message_id: '',
109
+ messages: [],
110
+ meta: {},
111
+ };
112
+ this._onChange = onChange;
113
+ }
114
+ /**
115
+ * Get all processed messages
116
+ */
117
+ getMessages() {
118
+ return this.messages;
119
+ }
120
+ /**
121
+ * Get the current conversation state
122
+ */
123
+ getConversation() {
124
+ return this.conversation;
125
+ }
126
+ emitChange() {
127
+ this.conversation.messages = [...this.messages];
128
+ this._onChange({ ...this.conversation });
129
+ }
130
+ /**
131
+ * Process a raw JSON chunk from the SSE stream
132
+ */
133
+ processChunk(data) {
134
+ try {
135
+ const chunk = JSON.parse(data);
136
+ this.handleChunk(chunk);
137
+ }
138
+ catch (e) {
139
+ console.error('Failed to parse chunk:', e, data);
140
+ }
141
+ }
142
+ handleChunk(chunk) {
143
+ switch (chunk.type) {
144
+ // Run lifecycle
145
+ case ChunkType.ChunkTypeRunCreated:
146
+ case ChunkType.ChunkTypeRunInProgress:
147
+ case ChunkType.ChunkTypeRunCompleted:
148
+ case ChunkType.ChunkTypeRunPaused:
149
+ this.conversation.meta.run_state = chunk.run_state;
150
+ this.conversation.message_id = chunk.run_state.id;
151
+ if (chunk.type !== ChunkType.ChunkTypeRunCreated && chunk.type !== ChunkType.ChunkTypeRunInProgress) {
152
+ this.emitChange();
153
+ }
154
+ break;
155
+ // Response lifecycle
156
+ case ChunkType.ChunkTypeResponseCreated:
157
+ case ChunkType.ChunkTypeResponseInProgress:
158
+ break;
159
+ case ChunkType.ChunkTypeResponseCompleted:
160
+ if (chunk.response?.usage) {
161
+ this.conversation.meta.usage = chunk.response.usage;
162
+ this.emitChange();
163
+ }
164
+ break;
165
+ // Output item lifecycle
166
+ case ChunkType.ChunkTypeOutputItemAdded:
167
+ this.handleOutputItemAdded(chunk);
168
+ break;
169
+ case ChunkType.ChunkTypeOutputItemDone:
170
+ break;
171
+ // Content parts
172
+ case ChunkType.ChunkTypeContentPartAdded:
173
+ this.handleContentPartAdded(chunk);
174
+ break;
175
+ case ChunkType.ChunkTypeContentPartDone:
176
+ break;
177
+ // Text deltas
178
+ case ChunkType.ChunkTypeOutputTextDelta:
179
+ this.handleOutputTextDelta(chunk);
180
+ break;
181
+ case ChunkType.ChunkTypeOutputTextDone:
182
+ break;
183
+ // Reasoning summary
184
+ case ChunkType.ChunkTypeReasoningSummaryPartAdded:
185
+ this.handleReasoningSummaryPartAdded(chunk);
186
+ break;
187
+ case ChunkType.ChunkTypeReasoningSummaryPartDone:
188
+ break;
189
+ case ChunkType.ChunkTypeReasoningSummaryTextDelta:
190
+ this.handleReasoningSummaryTextDelta(chunk);
191
+ break;
192
+ case ChunkType.ChunkTypeReasoningSummaryTextDone:
193
+ break;
194
+ // Function calls
195
+ case ChunkType.ChunkTypeFunctionCallArgumentsDelta:
196
+ this.handleFunctionCallArgumentsDelta(chunk);
197
+ break;
198
+ case ChunkType.ChunkTypeFunctionCallArgumentsDone:
199
+ break;
200
+ case ChunkType.ChunkTypeFunctionCallOutput:
201
+ this.handleFunctionCallOutput(chunk);
202
+ break;
203
+ // Image Generation Calls
204
+ case ChunkType.ChunkTypeImageGenerationCallInProgress:
205
+ break;
206
+ case ChunkType.ChunkTypeImageGenerationCallGenerating:
207
+ break;
208
+ case ChunkType.ChunkTypeImageGenerationCallPartialImage:
209
+ this.handleImageGenerationCallPartialImage(chunk);
210
+ break;
211
+ }
212
+ }
213
+ handleOutputItemAdded(chunk) {
214
+ if (!chunk.item)
215
+ return;
216
+ switch (chunk.item.type) {
217
+ case "message":
218
+ this.currentOutputItem = {
219
+ id: chunk.item.id,
220
+ type: "message",
221
+ role: chunk.item.role || "assistant",
222
+ content: [],
223
+ };
224
+ break;
225
+ case "function_call":
226
+ this.currentOutputItem = {
227
+ id: chunk.item.id,
228
+ type: "function_call",
229
+ name: chunk.item.name || "",
230
+ call_id: chunk.item.call_id || "",
231
+ arguments: "",
232
+ };
233
+ break;
234
+ case "reasoning":
235
+ this.currentOutputItem = {
236
+ id: chunk.item.id,
237
+ type: "reasoning",
238
+ summary: [],
239
+ };
240
+ break;
241
+ case "image_generation_call":
242
+ this.currentOutputItem = {
243
+ id: chunk.item.id,
244
+ type: "image_generation_call",
245
+ status: chunk.item.status,
246
+ };
247
+ break;
248
+ }
249
+ if (this.currentOutputItem) {
250
+ this.messages.push(this.currentOutputItem);
251
+ this.emitChange();
252
+ }
253
+ }
254
+ handleContentPartAdded(chunk) {
255
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "message")
256
+ return;
257
+ const message = this.currentOutputItem;
258
+ if (chunk.part?.type === ContentType.OutputText) {
259
+ message.content = message.content || [];
260
+ message.content.push({
261
+ type: ContentType.OutputText,
262
+ text: "",
263
+ });
264
+ this.emitChange();
265
+ }
266
+ }
267
+ handleOutputTextDelta(chunk) {
268
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "message")
269
+ return;
270
+ const message = this.currentOutputItem;
271
+ const contents = message.content;
272
+ if (!contents?.length || !chunk.delta)
273
+ return;
274
+ const lastContent = contents[contents.length - 1];
275
+ if (lastContent && 'text' in lastContent) {
276
+ lastContent.text += chunk.delta;
277
+ this.emitChange();
278
+ }
279
+ }
280
+ handleReasoningSummaryPartAdded(chunk) {
281
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "reasoning")
282
+ return;
283
+ const reasoning = this.currentOutputItem;
284
+ if (chunk.part?.type === ContentType.SummaryText) {
285
+ reasoning.summary = reasoning.summary || [];
286
+ reasoning.summary.push({
287
+ type: ContentType.SummaryText,
288
+ text: "",
289
+ });
290
+ this.emitChange();
291
+ }
292
+ }
293
+ handleReasoningSummaryTextDelta(chunk) {
294
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "reasoning")
295
+ return;
296
+ const reasoning = this.currentOutputItem;
297
+ const summaries = reasoning.summary;
298
+ if (!summaries?.length || !chunk.delta)
299
+ return;
300
+ const lastSummary = summaries[summaries.length - 1];
301
+ if (lastSummary) {
302
+ lastSummary.text += chunk.delta;
303
+ this.emitChange();
304
+ }
305
+ }
306
+ handleFunctionCallArgumentsDelta(chunk) {
307
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "function_call")
308
+ return;
309
+ const functionCall = this.currentOutputItem;
310
+ functionCall.arguments += chunk.delta || "";
311
+ this.emitChange();
312
+ }
313
+ handleFunctionCallOutput(chunk) {
314
+ this.currentOutputItem = chunk;
315
+ this.messages.push(this.currentOutputItem);
316
+ this.emitChange();
317
+ }
318
+ handleImageGenerationCallPartialImage(chunk) {
319
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "image_generation_call")
320
+ return;
321
+ const image = this.currentOutputItem;
322
+ image.result = chunk.partial_image_b64;
323
+ image.quality = chunk.quality;
324
+ image.size = chunk.size;
325
+ image.output_format = chunk.output_format;
326
+ image.background = chunk.background;
327
+ this.emitChange();
328
+ }
329
+ }
330
+
331
+ /**
332
+ * Streams Server-Sent Events (SSE) from a URL.
333
+ * Parses SSE frames and calls onChunk for each data payload.
334
+ *
335
+ * @param url - The URL to stream from
336
+ * @param requestOptions - Fetch request options
337
+ * @param callbacks - SSE event callbacks
338
+ * @param abortSignal - Optional signal to abort the stream
339
+ *
340
+ * @example
341
+ * ```ts
342
+ * await streamSSE(
343
+ * 'https://api.example.com/stream',
344
+ * { method: 'POST', body: JSON.stringify({ message: 'Hello' }) },
345
+ * {
346
+ * onChunk: (data) => console.log('Received:', data),
347
+ * onComplete: () => console.log('Done'),
348
+ * onError: (err) => console.error('Error:', err),
349
+ * }
350
+ * );
351
+ * ```
352
+ */
353
+ async function streamSSE(url, requestOptions, callbacks, abortSignal) {
354
+ try {
355
+ const response = await fetch(url, {
356
+ ...requestOptions,
357
+ signal: abortSignal,
358
+ });
359
+ if (!response.ok) {
360
+ throw new Error(`HTTP error! status: ${response.status}`);
361
+ }
362
+ if (!response.body) {
363
+ throw new Error('Response body is null');
364
+ }
365
+ const reader = response.body.getReader();
366
+ const decoder = new TextDecoder();
367
+ let buffer = '';
368
+ while (true) {
369
+ const { value, done } = await reader.read();
370
+ if (done)
371
+ break;
372
+ buffer += decoder.decode(value, { stream: true });
373
+ // Parse SSE frames: split on double newline
374
+ let idx;
375
+ while ((idx = buffer.indexOf('\n\n')) !== -1) {
376
+ const frame = buffer.slice(0, idx);
377
+ buffer = buffer.slice(idx + 2);
378
+ // Join all data: lines in the frame
379
+ const data = frame
380
+ .split('\n')
381
+ .filter(line => line.startsWith('data:'))
382
+ .map(line => line.slice(5).trim())
383
+ .join('\n');
384
+ if (data) {
385
+ callbacks.onChunk(data);
386
+ }
387
+ }
388
+ }
389
+ // Final flush of decoder state
390
+ decoder.decode();
391
+ callbacks.onComplete?.();
392
+ }
393
+ catch (error) {
394
+ if (error.name === 'AbortError') {
395
+ return;
396
+ }
397
+ callbacks.onError?.(error);
398
+ }
399
+ }
400
+
401
+ /**
402
+ * Simple ID generator for message IDs
403
+ */
404
+ function generateId() {
405
+ return `${Date.now()}-${Math.random().toString(36).substring(2, 11)}`;
406
+ }
407
+ /**
408
+ * A comprehensive hook for managing conversations, threads, messages, and streaming
409
+ * with Uno Agent Server.
410
+ *
411
+ * @example
412
+ * ```tsx
413
+ * import { useConversation } from '@praveen001/uno-converse';
414
+ *
415
+ * function ChatComponent() {
416
+ * const {
417
+ * allMessages,
418
+ * isStreaming,
419
+ * sendMessage,
420
+ * startNewChat,
421
+ * } = useConversation({
422
+ * namespace: 'my-app',
423
+ * projectName: 'my-project',
424
+ * baseUrl: 'https://my-uno-server.com/api/agent-server',
425
+ * getHeaders: () => ({
426
+ * 'Authorization': `Bearer ${getToken()}`,
427
+ * }),
428
+ * });
429
+ *
430
+ * const handleSend = async (text: string) => {
431
+ * await sendMessage(
432
+ * [{ type: 'message', id: '1', content: text }],
433
+ * {
434
+ * namespace: 'my-app',
435
+ * agentName: 'my-agent',
436
+ * }
437
+ * );
438
+ * };
439
+ *
440
+ * return (
441
+ * <div>
442
+ * {allMessages.map(msg => (
443
+ * <MessageComponent key={msg.message_id} message={msg} />
444
+ * ))}
445
+ * {isStreaming && <LoadingIndicator />}
446
+ * </div>
447
+ * );
448
+ * }
449
+ * ```
450
+ */
451
+ function useConversation(options) {
452
+ const { namespace, projectName, baseUrl, getHeaders, autoLoad = true } = options;
453
+ // Create axios instance with request interceptor for custom headers
454
+ const axiosInstance = useMemo(() => {
455
+ const instance = axios.create({
456
+ baseURL: baseUrl,
457
+ headers: {
458
+ 'Content-Type': 'application/json',
459
+ },
460
+ });
461
+ // Add request interceptor to inject custom headers
462
+ if (getHeaders) {
463
+ instance.interceptors.request.use(async (config) => {
464
+ const customHeaders = await getHeaders();
465
+ Object.assign(config.headers, customHeaders);
466
+ return config;
467
+ });
468
+ }
469
+ return instance;
470
+ }, [baseUrl, getHeaders]);
471
+ // Project state
472
+ const [projectId, setProjectId] = useState('');
473
+ const [projectLoading, setProjectLoading] = useState(false);
474
+ // Conversation list state
475
+ const [conversations, setConversations] = useState([]);
476
+ const [conversationsLoading, setConversationsLoading] = useState(false);
477
+ // Thread state
478
+ const [threads, setThreads] = useState([]);
479
+ const [threadsLoading, setThreadsLoading] = useState(false);
480
+ // Message state
481
+ const [messages, setMessages] = useState([]);
482
+ const [streamingMessage, setStreamingMessage] = useState(null);
483
+ const [messagesLoading, setMessagesLoading] = useState(false);
484
+ const [isStreaming, setIsStreaming] = useState(false);
485
+ const [isThinking, setIsThinking] = useState(false);
486
+ // Current selection
487
+ const [currentConversationId, setCurrentConversationId] = useState(null);
488
+ const [currentThreadId, setCurrentThreadId] = useState(null);
489
+ const [previousMessageId, setPreviousMessageId] = useState('');
490
+ // Refs
491
+ const processorRef = useRef(null);
492
+ const abortControllerRef = useRef(null);
493
+ // ============================================
494
+ // API Helper Functions
495
+ // ============================================
496
+ /**
497
+ * Build query params with project_id if available
498
+ */
499
+ const buildParams = useCallback((params) => {
500
+ const result = {};
501
+ if (projectId) {
502
+ result.project_id = projectId;
503
+ }
504
+ if (params) {
505
+ Object.assign(result, params);
506
+ }
507
+ return result;
508
+ }, [projectId]);
509
+ /**
510
+ * Get headers for streaming requests (combines default + custom headers)
511
+ */
512
+ const getRequestHeaders = useCallback(async () => {
513
+ const headers = {
514
+ 'Content-Type': 'application/json',
515
+ };
516
+ // Add custom headers if getHeaders function is provided
517
+ if (getHeaders) {
518
+ const customHeaders = await getHeaders();
519
+ Object.assign(headers, customHeaders);
520
+ }
521
+ return headers;
522
+ }, [getHeaders]);
523
+ // ============================================
524
+ // Project Management
525
+ // ============================================
526
+ /**
527
+ * Fetch the project ID using the project name
528
+ */
529
+ const fetchProjectId = useCallback(async () => {
530
+ if (!projectName) {
531
+ return;
532
+ }
533
+ setProjectLoading(true);
534
+ try {
535
+ const response = await axiosInstance.get('/project/id', {
536
+ params: { name: projectName },
537
+ });
538
+ const id = typeof response.data === 'string' ? response.data : response.data.data;
539
+ setProjectId(id || '');
540
+ }
541
+ catch (error) {
542
+ console.error('Failed to fetch project ID:', error);
543
+ throw error;
544
+ }
545
+ finally {
546
+ setProjectLoading(false);
547
+ }
548
+ }, [axiosInstance, projectName]);
549
+ // ============================================
550
+ // Conversation Management
551
+ // ============================================
552
+ /**
553
+ * Load all conversations for the namespace
554
+ */
555
+ const loadConversations = useCallback(async () => {
556
+ setConversationsLoading(true);
557
+ try {
558
+ const response = await axiosInstance.get('/conversations', {
559
+ params: buildParams({ namespace }),
560
+ });
561
+ const data = 'data' in response.data ? response.data.data : response.data;
562
+ setConversations(data || []);
563
+ }
564
+ catch (error) {
565
+ console.error('Failed to load conversations:', error);
566
+ throw error;
567
+ }
568
+ finally {
569
+ setConversationsLoading(false);
570
+ }
571
+ }, [axiosInstance, buildParams, namespace]);
572
+ /**
573
+ * Select a conversation and load its threads
574
+ */
575
+ const selectConversation = useCallback((conversationId) => {
576
+ setCurrentConversationId(conversationId);
577
+ // Threads will be loaded via useEffect
578
+ }, []);
579
+ // ============================================
580
+ // Thread Management
581
+ // ============================================
582
+ /**
583
+ * Load threads for a conversation
584
+ */
585
+ const loadThreads = useCallback(async (conversationId) => {
586
+ setThreadsLoading(true);
587
+ try {
588
+ const response = await axiosInstance.get('/threads', {
589
+ params: buildParams({ conversation_id: conversationId, namespace }),
590
+ });
591
+ const loadedThreads = 'data' in response.data ? response.data.data : response.data;
592
+ // Sort by created_at descending
593
+ loadedThreads.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime());
594
+ setThreads(loadedThreads);
595
+ // Auto-select the latest thread
596
+ if (loadedThreads.length > 0) {
597
+ setCurrentThreadId(loadedThreads[0].thread_id);
598
+ }
599
+ }
600
+ catch (error) {
601
+ console.error('Failed to load threads:', error);
602
+ throw error;
603
+ }
604
+ finally {
605
+ setThreadsLoading(false);
606
+ }
607
+ }, [axiosInstance, buildParams, namespace]);
608
+ /**
609
+ * Select a thread and load its messages
610
+ */
611
+ const selectThread = useCallback((threadId) => {
612
+ setCurrentThreadId(threadId);
613
+ // Messages will be loaded via useEffect
614
+ }, []);
615
+ // ============================================
616
+ // Message Management
617
+ // ============================================
618
+ /**
619
+ * Load messages for a thread
620
+ */
621
+ const loadMessages = useCallback(async (threadId) => {
622
+ setMessagesLoading(true);
623
+ try {
624
+ const response = await axiosInstance.get('/messages', {
625
+ params: buildParams({ thread_id: threadId, namespace }),
626
+ });
627
+ const loadedMessages = 'data' in response.data ? response.data.data : response.data;
628
+ // Extract last message ID for continuation
629
+ if (loadedMessages.length > 0) {
630
+ const lastMsgId = loadedMessages[loadedMessages.length - 1].message_id;
631
+ setPreviousMessageId(lastMsgId);
632
+ }
633
+ else {
634
+ setPreviousMessageId('');
635
+ }
636
+ setMessages(loadedMessages);
637
+ }
638
+ catch (error) {
639
+ console.error('Failed to load messages:', error);
640
+ throw error;
641
+ }
642
+ finally {
643
+ setMessagesLoading(false);
644
+ }
645
+ }, [axiosInstance, buildParams, namespace]);
646
+ /**
647
+ * Send a user message and stream the response
648
+ */
649
+ const sendMessage = useCallback(async (userMessages, config) => {
650
+ const messageId = generateId();
651
+ // Check if this is a tool approval response (resuming a run)
652
+ const isToolApproval = userMessages.length === 1 &&
653
+ userMessages[0].type === MessageType.FunctionCallApprovalResponse;
654
+ // Only add user message for regular messages, not for tool approvals
655
+ if (!isToolApproval) {
656
+ const userConversation = {
657
+ conversation_id: currentConversationId || '',
658
+ thread_id: currentThreadId || '',
659
+ message_id: messageId + '-user',
660
+ messages: userMessages,
661
+ meta: {},
662
+ };
663
+ setMessages(prev => [...prev, userConversation]);
664
+ }
665
+ // Initialize the chunk processor for the assistant response
666
+ processorRef.current = new ChunkProcessor(currentConversationId || '', currentThreadId || '', (conversation) => {
667
+ setIsThinking(isThinking);
668
+ setStreamingMessage({ ...conversation, isStreaming: true });
669
+ });
670
+ setIsStreaming(true);
671
+ setIsThinking(true);
672
+ // Build URL with query parameters
673
+ const params = new URLSearchParams();
674
+ if (projectId) {
675
+ params.append('project_id', projectId);
676
+ }
677
+ params.append('agent_name', config.agentName);
678
+ let url = `${baseUrl}/converse?${params.toString()}`;
679
+ if (!!config.baseUrl) {
680
+ url = config.baseUrl;
681
+ }
682
+ // Prepare request body
683
+ const body = JSON.stringify({
684
+ namespace: config.namespace,
685
+ previous_message_id: previousMessageId,
686
+ message: userMessages[0],
687
+ context: config.context || {},
688
+ });
689
+ // Abort any existing stream
690
+ if (abortControllerRef.current) {
691
+ abortControllerRef.current.abort();
692
+ }
693
+ abortControllerRef.current = new AbortController();
694
+ try {
695
+ // Get headers (supports async getHeaders function)
696
+ const requestHeaders = await getRequestHeaders();
697
+ await streamSSE(url, {
698
+ method: 'POST',
699
+ body,
700
+ headers: {
701
+ ...requestHeaders,
702
+ ...(config.headers || {}),
703
+ },
704
+ }, {
705
+ onChunk: (data) => {
706
+ processorRef.current?.processChunk(data);
707
+ },
708
+ onComplete: () => {
709
+ // Move streaming message to messages list
710
+ if (processorRef.current) {
711
+ const finalConversation = processorRef.current.getConversation();
712
+ if (isToolApproval) {
713
+ // For tool approvals, update the last message instead of appending
714
+ setMessages(prev => {
715
+ const newMessages = [...prev];
716
+ if (newMessages.length > 0) {
717
+ const lastMsg = newMessages[newMessages.length - 1];
718
+ newMessages[newMessages.length - 1] = {
719
+ ...lastMsg,
720
+ messages: [...lastMsg.messages, ...finalConversation.messages],
721
+ meta: finalConversation.meta,
722
+ isStreaming: false,
723
+ };
724
+ }
725
+ return newMessages;
726
+ });
727
+ }
728
+ else {
729
+ setMessages(prev => [...prev, { ...finalConversation, isStreaming: false }]);
730
+ }
731
+ setStreamingMessage(null);
732
+ setPreviousMessageId(finalConversation.message_id);
733
+ }
734
+ },
735
+ onError: (error) => {
736
+ console.error('Streaming error:', error);
737
+ setStreamingMessage(null);
738
+ },
739
+ }, abortControllerRef.current.signal);
740
+ // If this was a new conversation, fetch the conversation info
741
+ if (previousMessageId === '' && processorRef.current) {
742
+ try {
743
+ const response = await axiosInstance.get(`/messages/${processorRef.current.getConversation().message_id}`, { params: buildParams({ namespace }) });
744
+ const messageData = 'data' in response.data ? response.data.data : response.data;
745
+ const newConversationId = messageData?.conversation_id;
746
+ if (newConversationId) {
747
+ // Add new conversation to list
748
+ setConversations(prev => [{
749
+ conversation_id: newConversationId,
750
+ name: "New Conversation",
751
+ namespace_id: namespace,
752
+ created_at: new Date().toISOString(),
753
+ last_updated: new Date().toISOString(),
754
+ }, ...prev.filter(c => c.conversation_id !== newConversationId)]);
755
+ setCurrentConversationId(newConversationId);
756
+ }
757
+ }
758
+ catch (e) {
759
+ console.error('Failed to get conversation ID:', e);
760
+ }
761
+ }
762
+ }
763
+ finally {
764
+ setIsStreaming(false);
765
+ abortControllerRef.current = null;
766
+ }
767
+ }, [currentConversationId, currentThreadId, previousMessageId, namespace, baseUrl, projectId, axiosInstance, buildParams, getRequestHeaders]);
768
+ // ============================================
769
+ // Utility Actions
770
+ // ============================================
771
+ /**
772
+ * Start a new chat (reset all state)
773
+ */
774
+ const startNewChat = useCallback(() => {
775
+ // Abort any ongoing stream
776
+ if (abortControllerRef.current) {
777
+ abortControllerRef.current.abort();
778
+ abortControllerRef.current = null;
779
+ }
780
+ setCurrentConversationId(null);
781
+ setCurrentThreadId(null);
782
+ setThreads([]);
783
+ setMessages([]);
784
+ setStreamingMessage(null);
785
+ setPreviousMessageId('');
786
+ setIsStreaming(false);
787
+ setIsThinking(false);
788
+ processorRef.current = null;
789
+ }, []);
790
+ // ============================================
791
+ // Effects for auto-loading
792
+ // ============================================
793
+ // Fetch project ID on mount
794
+ useEffect(() => {
795
+ if (autoLoad && projectName) {
796
+ fetchProjectId();
797
+ }
798
+ }, [autoLoad, projectName, fetchProjectId]);
799
+ // Load conversations after project ID is fetched
800
+ useEffect(() => {
801
+ if (autoLoad && projectId) {
802
+ loadConversations();
803
+ }
804
+ }, [autoLoad, projectId, loadConversations]);
805
+ // Load threads when conversation changes
806
+ useEffect(() => {
807
+ if (currentConversationId) {
808
+ loadThreads(currentConversationId);
809
+ }
810
+ }, [currentConversationId, loadThreads]);
811
+ // Load messages when thread changes
812
+ useEffect(() => {
813
+ if (currentThreadId) {
814
+ loadMessages(currentThreadId);
815
+ }
816
+ }, [currentThreadId, loadMessages]);
817
+ // ============================================
818
+ // Computed values
819
+ // ============================================
820
+ const currentThread = threads.find(t => t.thread_id === currentThreadId) || null;
821
+ const allMessages = streamingMessage
822
+ ? [...messages, streamingMessage]
823
+ : messages;
824
+ return {
825
+ // Project state
826
+ projectId,
827
+ projectLoading,
828
+ // Conversation list state
829
+ conversations,
830
+ conversationsLoading,
831
+ // Thread state
832
+ threads,
833
+ threadsLoading,
834
+ currentThread,
835
+ // Message state
836
+ messages,
837
+ streamingMessage,
838
+ messagesLoading,
839
+ isStreaming,
840
+ isThinking,
841
+ // Current selection
842
+ currentConversationId,
843
+ currentThreadId,
844
+ // Actions - Conversations
845
+ loadConversations,
846
+ selectConversation,
847
+ // Actions - Threads
848
+ loadThreads,
849
+ selectThread,
850
+ // Actions - Messages
851
+ sendMessage,
852
+ // Actions - Utility
853
+ startNewChat,
854
+ // Combined messages
855
+ allMessages,
856
+ };
857
+ }
858
+
859
+ export { ChunkProcessor, ChunkType, ContentType, MessageType, Role, isEasyMessage, isFunctionCallMessage, isFunctionCallOutputMessage, isImageGenerationCallMessage, isInputMessage, isReasoningMessage, streamSSE, useConversation };
860
+ //# sourceMappingURL=index.js.map