@curaious/uno-converse 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,870 @@
1
+ 'use strict';
2
+
3
+ var axios = require('axios');
4
+ var react = require('react');
5
+
6
+ // Message Roles
7
+ exports.Role = void 0;
8
+ (function (Role) {
9
+ Role["User"] = "user";
10
+ Role["Developer"] = "developer";
11
+ Role["System"] = "system";
12
+ Role["Assistant"] = "assistant";
13
+ })(exports.Role || (exports.Role = {}));
14
+ // Message Types
15
+ exports.MessageType = void 0;
16
+ (function (MessageType) {
17
+ MessageType["Message"] = "message";
18
+ MessageType["FunctionCall"] = "function_call";
19
+ MessageType["FunctionCallOutput"] = "function_call_output";
20
+ MessageType["Reasoning"] = "reasoning";
21
+ MessageType["ImageGenerationCall"] = "image_generation_call";
22
+ MessageType["FunctionCallApprovalResponse"] = "function_call_approval_response";
23
+ })(exports.MessageType || (exports.MessageType = {}));
24
+ // Content Types
25
+ exports.ContentType = void 0;
26
+ (function (ContentType) {
27
+ ContentType["InputText"] = "input_text";
28
+ ContentType["OutputText"] = "output_text";
29
+ ContentType["SummaryText"] = "summary_text";
30
+ ContentType["InputImage"] = "input_image";
31
+ })(exports.ContentType || (exports.ContentType = {}));
32
+ // Chunk Types
33
+ exports.ChunkType = void 0;
34
+ (function (ChunkType) {
35
+ ChunkType["ChunkTypeRunCreated"] = "run.created";
36
+ ChunkType["ChunkTypeRunInProgress"] = "run.in_progress";
37
+ ChunkType["ChunkTypeRunPaused"] = "run.paused";
38
+ ChunkType["ChunkTypeRunCompleted"] = "run.completed";
39
+ ChunkType["ChunkTypeResponseCreated"] = "response.created";
40
+ ChunkType["ChunkTypeResponseInProgress"] = "response.in_progress";
41
+ ChunkType["ChunkTypeResponseCompleted"] = "response.completed";
42
+ ChunkType["ChunkTypeOutputItemAdded"] = "response.output_item.added";
43
+ ChunkType["ChunkTypeOutputItemDone"] = "response.output_item.done";
44
+ ChunkType["ChunkTypeContentPartAdded"] = "response.content_part.added";
45
+ ChunkType["ChunkTypeContentPartDone"] = "response.content_part.done";
46
+ ChunkType["ChunkTypeOutputTextDelta"] = "response.output_text.delta";
47
+ ChunkType["ChunkTypeOutputTextDone"] = "response.output_text.done";
48
+ ChunkType["ChunkTypeFunctionCallArgumentsDelta"] = "response.function_call_arguments.delta";
49
+ ChunkType["ChunkTypeFunctionCallArgumentsDone"] = "response.function_call_arguments.done";
50
+ ChunkType["ChunkTypeReasoningSummaryPartAdded"] = "response.reasoning_summary_part.added";
51
+ ChunkType["ChunkTypeReasoningSummaryPartDone"] = "response.reasoning_summary_part.done";
52
+ ChunkType["ChunkTypeReasoningSummaryTextDelta"] = "response.reasoning_summary_text.delta";
53
+ ChunkType["ChunkTypeReasoningSummaryTextDone"] = "response.reasoning_summary_text.done";
54
+ // Image generation
55
+ ChunkType["ChunkTypeImageGenerationCallInProgress"] = "response.image_generation_call.in_progress";
56
+ ChunkType["ChunkTypeImageGenerationCallGenerating"] = "response.image_generation_call.generating";
57
+ ChunkType["ChunkTypeImageGenerationCallPartialImage"] = "response.image_generation_call.partial_image";
58
+ // Extra
59
+ ChunkType["ChunkTypeFunctionCallOutput"] = "function_call_output";
60
+ })(exports.ChunkType || (exports.ChunkType = {}));
61
+ // Type guards
62
+ function isEasyMessage(msg) {
63
+ return msg.type === exports.MessageType.Message && 'content' in msg && (typeof msg.content === 'string' || Array.isArray(msg.content));
64
+ }
65
+ function isInputMessage(msg) {
66
+ return msg.type === exports.MessageType.Message && 'content' in msg && Array.isArray(msg.content);
67
+ }
68
+ function isFunctionCallMessage(msg) {
69
+ return msg.type === exports.MessageType.FunctionCall;
70
+ }
71
+ function isFunctionCallOutputMessage(msg) {
72
+ return msg.type === exports.MessageType.FunctionCallOutput;
73
+ }
74
+ function isReasoningMessage(msg) {
75
+ return msg.type === exports.MessageType.Reasoning;
76
+ }
77
+ function isImageGenerationCallMessage(msg) {
78
+ return msg.type === exports.MessageType.ImageGenerationCall;
79
+ }
80
+
81
+ /**
82
+ * Processes streaming chunks from the LLM response.
83
+ * Builds up messages incrementally as chunks arrive.
84
+ *
85
+ * @example
86
+ * ```ts
87
+ * const processor = new ChunkProcessor(
88
+ * 'conv-123',
89
+ * 'thread-456',
90
+ * (conversation) => {
91
+ * // Update UI with new conversation state
92
+ * setConversation(conversation);
93
+ * }
94
+ * );
95
+ *
96
+ * // Process incoming chunks
97
+ * processor.processChunk(jsonData);
98
+ *
99
+ * // Get final conversation when done
100
+ * const finalConversation = processor.getConversation();
101
+ * ```
102
+ */
103
+ class ChunkProcessor {
104
+ constructor(conversationId, threadId, onChange) {
105
+ this.messages = [];
106
+ this.currentOutputItem = null;
107
+ this.conversation = {
108
+ conversation_id: conversationId,
109
+ thread_id: threadId,
110
+ message_id: '',
111
+ messages: [],
112
+ meta: {},
113
+ };
114
+ this._onChange = onChange;
115
+ }
116
+ /**
117
+ * Get all processed messages
118
+ */
119
+ getMessages() {
120
+ return this.messages;
121
+ }
122
+ /**
123
+ * Get the current conversation state
124
+ */
125
+ getConversation() {
126
+ return this.conversation;
127
+ }
128
+ emitChange() {
129
+ this.conversation.messages = [...this.messages];
130
+ this._onChange({ ...this.conversation });
131
+ }
132
+ /**
133
+ * Process a raw JSON chunk from the SSE stream
134
+ */
135
+ processChunk(data) {
136
+ try {
137
+ const chunk = JSON.parse(data);
138
+ this.handleChunk(chunk);
139
+ }
140
+ catch (e) {
141
+ console.error('Failed to parse chunk:', e, data);
142
+ }
143
+ }
144
+ handleChunk(chunk) {
145
+ switch (chunk.type) {
146
+ // Run lifecycle
147
+ case exports.ChunkType.ChunkTypeRunCreated:
148
+ case exports.ChunkType.ChunkTypeRunInProgress:
149
+ case exports.ChunkType.ChunkTypeRunCompleted:
150
+ case exports.ChunkType.ChunkTypeRunPaused:
151
+ this.conversation.meta.run_state = chunk.run_state;
152
+ this.conversation.message_id = chunk.run_state.id;
153
+ if (chunk.type !== exports.ChunkType.ChunkTypeRunCreated && chunk.type !== exports.ChunkType.ChunkTypeRunInProgress) {
154
+ this.emitChange();
155
+ }
156
+ break;
157
+ // Response lifecycle
158
+ case exports.ChunkType.ChunkTypeResponseCreated:
159
+ case exports.ChunkType.ChunkTypeResponseInProgress:
160
+ break;
161
+ case exports.ChunkType.ChunkTypeResponseCompleted:
162
+ if (chunk.response?.usage) {
163
+ this.conversation.meta.usage = chunk.response.usage;
164
+ this.emitChange();
165
+ }
166
+ break;
167
+ // Output item lifecycle
168
+ case exports.ChunkType.ChunkTypeOutputItemAdded:
169
+ this.handleOutputItemAdded(chunk);
170
+ break;
171
+ case exports.ChunkType.ChunkTypeOutputItemDone:
172
+ break;
173
+ // Content parts
174
+ case exports.ChunkType.ChunkTypeContentPartAdded:
175
+ this.handleContentPartAdded(chunk);
176
+ break;
177
+ case exports.ChunkType.ChunkTypeContentPartDone:
178
+ break;
179
+ // Text deltas
180
+ case exports.ChunkType.ChunkTypeOutputTextDelta:
181
+ this.handleOutputTextDelta(chunk);
182
+ break;
183
+ case exports.ChunkType.ChunkTypeOutputTextDone:
184
+ break;
185
+ // Reasoning summary
186
+ case exports.ChunkType.ChunkTypeReasoningSummaryPartAdded:
187
+ this.handleReasoningSummaryPartAdded(chunk);
188
+ break;
189
+ case exports.ChunkType.ChunkTypeReasoningSummaryPartDone:
190
+ break;
191
+ case exports.ChunkType.ChunkTypeReasoningSummaryTextDelta:
192
+ this.handleReasoningSummaryTextDelta(chunk);
193
+ break;
194
+ case exports.ChunkType.ChunkTypeReasoningSummaryTextDone:
195
+ break;
196
+ // Function calls
197
+ case exports.ChunkType.ChunkTypeFunctionCallArgumentsDelta:
198
+ this.handleFunctionCallArgumentsDelta(chunk);
199
+ break;
200
+ case exports.ChunkType.ChunkTypeFunctionCallArgumentsDone:
201
+ break;
202
+ case exports.ChunkType.ChunkTypeFunctionCallOutput:
203
+ this.handleFunctionCallOutput(chunk);
204
+ break;
205
+ // Image Generation Calls
206
+ case exports.ChunkType.ChunkTypeImageGenerationCallInProgress:
207
+ break;
208
+ case exports.ChunkType.ChunkTypeImageGenerationCallGenerating:
209
+ break;
210
+ case exports.ChunkType.ChunkTypeImageGenerationCallPartialImage:
211
+ this.handleImageGenerationCallPartialImage(chunk);
212
+ break;
213
+ }
214
+ }
215
+ handleOutputItemAdded(chunk) {
216
+ if (!chunk.item)
217
+ return;
218
+ switch (chunk.item.type) {
219
+ case "message":
220
+ this.currentOutputItem = {
221
+ id: chunk.item.id,
222
+ type: "message",
223
+ role: chunk.item.role || "assistant",
224
+ content: [],
225
+ };
226
+ break;
227
+ case "function_call":
228
+ this.currentOutputItem = {
229
+ id: chunk.item.id,
230
+ type: "function_call",
231
+ name: chunk.item.name || "",
232
+ call_id: chunk.item.call_id || "",
233
+ arguments: "",
234
+ };
235
+ break;
236
+ case "reasoning":
237
+ this.currentOutputItem = {
238
+ id: chunk.item.id,
239
+ type: "reasoning",
240
+ summary: [],
241
+ };
242
+ break;
243
+ case "image_generation_call":
244
+ this.currentOutputItem = {
245
+ id: chunk.item.id,
246
+ type: "image_generation_call",
247
+ status: chunk.item.status,
248
+ };
249
+ break;
250
+ }
251
+ if (this.currentOutputItem) {
252
+ this.messages.push(this.currentOutputItem);
253
+ this.emitChange();
254
+ }
255
+ }
256
+ handleContentPartAdded(chunk) {
257
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "message")
258
+ return;
259
+ const message = this.currentOutputItem;
260
+ if (chunk.part?.type === exports.ContentType.OutputText) {
261
+ message.content = message.content || [];
262
+ message.content.push({
263
+ type: exports.ContentType.OutputText,
264
+ text: "",
265
+ });
266
+ this.emitChange();
267
+ }
268
+ }
269
+ handleOutputTextDelta(chunk) {
270
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "message")
271
+ return;
272
+ const message = this.currentOutputItem;
273
+ const contents = message.content;
274
+ if (!contents?.length || !chunk.delta)
275
+ return;
276
+ const lastContent = contents[contents.length - 1];
277
+ if (lastContent && 'text' in lastContent) {
278
+ lastContent.text += chunk.delta;
279
+ this.emitChange();
280
+ }
281
+ }
282
+ handleReasoningSummaryPartAdded(chunk) {
283
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "reasoning")
284
+ return;
285
+ const reasoning = this.currentOutputItem;
286
+ if (chunk.part?.type === exports.ContentType.SummaryText) {
287
+ reasoning.summary = reasoning.summary || [];
288
+ reasoning.summary.push({
289
+ type: exports.ContentType.SummaryText,
290
+ text: "",
291
+ });
292
+ this.emitChange();
293
+ }
294
+ }
295
+ handleReasoningSummaryTextDelta(chunk) {
296
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "reasoning")
297
+ return;
298
+ const reasoning = this.currentOutputItem;
299
+ const summaries = reasoning.summary;
300
+ if (!summaries?.length || !chunk.delta)
301
+ return;
302
+ const lastSummary = summaries[summaries.length - 1];
303
+ if (lastSummary) {
304
+ lastSummary.text += chunk.delta;
305
+ this.emitChange();
306
+ }
307
+ }
308
+ handleFunctionCallArgumentsDelta(chunk) {
309
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "function_call")
310
+ return;
311
+ const functionCall = this.currentOutputItem;
312
+ functionCall.arguments += chunk.delta || "";
313
+ this.emitChange();
314
+ }
315
+ handleFunctionCallOutput(chunk) {
316
+ this.currentOutputItem = chunk;
317
+ this.messages.push(this.currentOutputItem);
318
+ this.emitChange();
319
+ }
320
+ handleImageGenerationCallPartialImage(chunk) {
321
+ if (!this.currentOutputItem || this.currentOutputItem.type !== "image_generation_call")
322
+ return;
323
+ const image = this.currentOutputItem;
324
+ image.result = chunk.partial_image_b64;
325
+ image.quality = chunk.quality;
326
+ image.size = chunk.size;
327
+ image.output_format = chunk.output_format;
328
+ image.background = chunk.background;
329
+ this.emitChange();
330
+ }
331
+ }
332
+
333
+ /**
334
+ * Streams Server-Sent Events (SSE) from a URL.
335
+ * Parses SSE frames and calls onChunk for each data payload.
336
+ *
337
+ * @param url - The URL to stream from
338
+ * @param requestOptions - Fetch request options
339
+ * @param callbacks - SSE event callbacks
340
+ * @param abortSignal - Optional signal to abort the stream
341
+ *
342
+ * @example
343
+ * ```ts
344
+ * await streamSSE(
345
+ * 'https://api.example.com/stream',
346
+ * { method: 'POST', body: JSON.stringify({ message: 'Hello' }) },
347
+ * {
348
+ * onChunk: (data) => console.log('Received:', data),
349
+ * onComplete: () => console.log('Done'),
350
+ * onError: (err) => console.error('Error:', err),
351
+ * }
352
+ * );
353
+ * ```
354
+ */
355
+ async function streamSSE(url, requestOptions, callbacks, abortSignal) {
356
+ try {
357
+ const response = await fetch(url, {
358
+ ...requestOptions,
359
+ signal: abortSignal,
360
+ });
361
+ if (!response.ok) {
362
+ throw new Error(`HTTP error! status: ${response.status}`);
363
+ }
364
+ if (!response.body) {
365
+ throw new Error('Response body is null');
366
+ }
367
+ const reader = response.body.getReader();
368
+ const decoder = new TextDecoder();
369
+ let buffer = '';
370
+ while (true) {
371
+ const { value, done } = await reader.read();
372
+ if (done)
373
+ break;
374
+ buffer += decoder.decode(value, { stream: true });
375
+ // Parse SSE frames: split on double newline
376
+ let idx;
377
+ while ((idx = buffer.indexOf('\n\n')) !== -1) {
378
+ const frame = buffer.slice(0, idx);
379
+ buffer = buffer.slice(idx + 2);
380
+ // Join all data: lines in the frame
381
+ const data = frame
382
+ .split('\n')
383
+ .filter(line => line.startsWith('data:'))
384
+ .map(line => line.slice(5).trim())
385
+ .join('\n');
386
+ if (data) {
387
+ callbacks.onChunk(data);
388
+ }
389
+ }
390
+ }
391
+ // Final flush of decoder state
392
+ decoder.decode();
393
+ callbacks.onComplete?.();
394
+ }
395
+ catch (error) {
396
+ if (error.name === 'AbortError') {
397
+ return;
398
+ }
399
+ callbacks.onError?.(error);
400
+ }
401
+ }
402
+
403
+ /**
404
+ * Simple ID generator for message IDs
405
+ */
406
+ function generateId() {
407
+ return `${Date.now()}-${Math.random().toString(36).substring(2, 11)}`;
408
+ }
409
+ /**
410
+ * A comprehensive hook for managing conversations, threads, messages, and streaming
411
+ * with Uno Agent Server.
412
+ *
413
+ * @example
414
+ * ```tsx
415
+ * import { useConversation } from '@praveen001/uno-converse';
416
+ *
417
+ * function ChatComponent() {
418
+ * const {
419
+ * allMessages,
420
+ * isStreaming,
421
+ * sendMessage,
422
+ * startNewChat,
423
+ * } = useConversation({
424
+ * namespace: 'my-app',
425
+ * projectName: 'my-project',
426
+ * baseUrl: 'https://my-uno-server.com/api/agent-server',
427
+ * getHeaders: () => ({
428
+ * 'Authorization': `Bearer ${getToken()}`,
429
+ * }),
430
+ * });
431
+ *
432
+ * const handleSend = async (text: string) => {
433
+ * await sendMessage(
434
+ * [{ type: 'message', id: '1', content: text }],
435
+ * {
436
+ * namespace: 'my-app',
437
+ * agentName: 'my-agent',
438
+ * }
439
+ * );
440
+ * };
441
+ *
442
+ * return (
443
+ * <div>
444
+ * {allMessages.map(msg => (
445
+ * <MessageComponent key={msg.message_id} message={msg} />
446
+ * ))}
447
+ * {isStreaming && <LoadingIndicator />}
448
+ * </div>
449
+ * );
450
+ * }
451
+ * ```
452
+ */
453
+ function useConversation(options) {
454
+ const { namespace, projectName, baseUrl, getHeaders, autoLoad = true } = options;
455
+ // Create axios instance with request interceptor for custom headers
456
+ const axiosInstance = react.useMemo(() => {
457
+ const instance = axios.create({
458
+ baseURL: baseUrl,
459
+ headers: {
460
+ 'Content-Type': 'application/json',
461
+ },
462
+ });
463
+ // Add request interceptor to inject custom headers
464
+ if (getHeaders) {
465
+ instance.interceptors.request.use(async (config) => {
466
+ const customHeaders = await getHeaders();
467
+ Object.assign(config.headers, customHeaders);
468
+ return config;
469
+ });
470
+ }
471
+ return instance;
472
+ }, [baseUrl, getHeaders]);
473
+ // Project state
474
+ const [projectId, setProjectId] = react.useState('');
475
+ const [projectLoading, setProjectLoading] = react.useState(false);
476
+ // Conversation list state
477
+ const [conversations, setConversations] = react.useState([]);
478
+ const [conversationsLoading, setConversationsLoading] = react.useState(false);
479
+ // Thread state
480
+ const [threads, setThreads] = react.useState([]);
481
+ const [threadsLoading, setThreadsLoading] = react.useState(false);
482
+ // Message state
483
+ const [messages, setMessages] = react.useState([]);
484
+ const [streamingMessage, setStreamingMessage] = react.useState(null);
485
+ const [messagesLoading, setMessagesLoading] = react.useState(false);
486
+ const [isStreaming, setIsStreaming] = react.useState(false);
487
+ const [isThinking, setIsThinking] = react.useState(false);
488
+ // Current selection
489
+ const [currentConversationId, setCurrentConversationId] = react.useState(null);
490
+ const [currentThreadId, setCurrentThreadId] = react.useState(null);
491
+ const [previousMessageId, setPreviousMessageId] = react.useState('');
492
+ // Refs
493
+ const processorRef = react.useRef(null);
494
+ const abortControllerRef = react.useRef(null);
495
+ // ============================================
496
+ // API Helper Functions
497
+ // ============================================
498
+ /**
499
+ * Build query params with project_id if available
500
+ */
501
+ const buildParams = react.useCallback((params) => {
502
+ const result = {};
503
+ if (projectId) {
504
+ result.project_id = projectId;
505
+ }
506
+ if (params) {
507
+ Object.assign(result, params);
508
+ }
509
+ return result;
510
+ }, [projectId]);
511
+ /**
512
+ * Get headers for streaming requests (combines default + custom headers)
513
+ */
514
+ const getRequestHeaders = react.useCallback(async () => {
515
+ const headers = {
516
+ 'Content-Type': 'application/json',
517
+ };
518
+ // Add custom headers if getHeaders function is provided
519
+ if (getHeaders) {
520
+ const customHeaders = await getHeaders();
521
+ Object.assign(headers, customHeaders);
522
+ }
523
+ return headers;
524
+ }, [getHeaders]);
525
+ // ============================================
526
+ // Project Management
527
+ // ============================================
528
+ /**
529
+ * Fetch the project ID using the project name
530
+ */
531
+ const fetchProjectId = react.useCallback(async () => {
532
+ if (!projectName) {
533
+ return;
534
+ }
535
+ setProjectLoading(true);
536
+ try {
537
+ const response = await axiosInstance.get('/project/id', {
538
+ params: { name: projectName },
539
+ });
540
+ const id = typeof response.data === 'string' ? response.data : response.data.data;
541
+ setProjectId(id || '');
542
+ }
543
+ catch (error) {
544
+ console.error('Failed to fetch project ID:', error);
545
+ throw error;
546
+ }
547
+ finally {
548
+ setProjectLoading(false);
549
+ }
550
+ }, [axiosInstance, projectName]);
551
+ // ============================================
552
+ // Conversation Management
553
+ // ============================================
554
+ /**
555
+ * Load all conversations for the namespace
556
+ */
557
+ const loadConversations = react.useCallback(async () => {
558
+ setConversationsLoading(true);
559
+ try {
560
+ const response = await axiosInstance.get('/conversations', {
561
+ params: buildParams({ namespace }),
562
+ });
563
+ const data = 'data' in response.data ? response.data.data : response.data;
564
+ setConversations(data || []);
565
+ }
566
+ catch (error) {
567
+ console.error('Failed to load conversations:', error);
568
+ throw error;
569
+ }
570
+ finally {
571
+ setConversationsLoading(false);
572
+ }
573
+ }, [axiosInstance, buildParams, namespace]);
574
+ /**
575
+ * Select a conversation and load its threads
576
+ */
577
+ const selectConversation = react.useCallback((conversationId) => {
578
+ setCurrentConversationId(conversationId);
579
+ // Threads will be loaded via useEffect
580
+ }, []);
581
+ // ============================================
582
+ // Thread Management
583
+ // ============================================
584
+ /**
585
+ * Load threads for a conversation
586
+ */
587
+ const loadThreads = react.useCallback(async (conversationId) => {
588
+ setThreadsLoading(true);
589
+ try {
590
+ const response = await axiosInstance.get('/threads', {
591
+ params: buildParams({ conversation_id: conversationId, namespace }),
592
+ });
593
+ const loadedThreads = 'data' in response.data ? response.data.data : response.data;
594
+ // Sort by created_at descending
595
+ loadedThreads.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime());
596
+ setThreads(loadedThreads);
597
+ // Auto-select the latest thread
598
+ if (loadedThreads.length > 0) {
599
+ setCurrentThreadId(loadedThreads[0].thread_id);
600
+ }
601
+ }
602
+ catch (error) {
603
+ console.error('Failed to load threads:', error);
604
+ throw error;
605
+ }
606
+ finally {
607
+ setThreadsLoading(false);
608
+ }
609
+ }, [axiosInstance, buildParams, namespace]);
610
+ /**
611
+ * Select a thread and load its messages
612
+ */
613
+ const selectThread = react.useCallback((threadId) => {
614
+ setCurrentThreadId(threadId);
615
+ // Messages will be loaded via useEffect
616
+ }, []);
617
+ // ============================================
618
+ // Message Management
619
+ // ============================================
620
+ /**
621
+ * Load messages for a thread
622
+ */
623
+ const loadMessages = react.useCallback(async (threadId) => {
624
+ setMessagesLoading(true);
625
+ try {
626
+ const response = await axiosInstance.get('/messages', {
627
+ params: buildParams({ thread_id: threadId, namespace }),
628
+ });
629
+ const loadedMessages = 'data' in response.data ? response.data.data : response.data;
630
+ // Extract last message ID for continuation
631
+ if (loadedMessages.length > 0) {
632
+ const lastMsgId = loadedMessages[loadedMessages.length - 1].message_id;
633
+ setPreviousMessageId(lastMsgId);
634
+ }
635
+ else {
636
+ setPreviousMessageId('');
637
+ }
638
+ setMessages(loadedMessages);
639
+ }
640
+ catch (error) {
641
+ console.error('Failed to load messages:', error);
642
+ throw error;
643
+ }
644
+ finally {
645
+ setMessagesLoading(false);
646
+ }
647
+ }, [axiosInstance, buildParams, namespace]);
648
+ /**
649
+ * Send a user message and stream the response
650
+ */
651
+ const sendMessage = react.useCallback(async (userMessages, config) => {
652
+ const messageId = generateId();
653
+ // Check if this is a tool approval response (resuming a run)
654
+ const isToolApproval = userMessages.length === 1 &&
655
+ userMessages[0].type === exports.MessageType.FunctionCallApprovalResponse;
656
+ // Only add user message for regular messages, not for tool approvals
657
+ if (!isToolApproval) {
658
+ const userConversation = {
659
+ conversation_id: currentConversationId || '',
660
+ thread_id: currentThreadId || '',
661
+ message_id: messageId + '-user',
662
+ messages: userMessages,
663
+ meta: {},
664
+ };
665
+ setMessages(prev => [...prev, userConversation]);
666
+ }
667
+ // Initialize the chunk processor for the assistant response
668
+ processorRef.current = new ChunkProcessor(currentConversationId || '', currentThreadId || '', (conversation) => {
669
+ setIsThinking(isThinking);
670
+ setStreamingMessage({ ...conversation, isStreaming: true });
671
+ });
672
+ setIsStreaming(true);
673
+ setIsThinking(true);
674
+ // Build URL with query parameters
675
+ const params = new URLSearchParams();
676
+ if (projectId) {
677
+ params.append('project_id', projectId);
678
+ }
679
+ params.append('agent_name', config.agentName);
680
+ let url = `${baseUrl}/converse?${params.toString()}`;
681
+ if (!!config.baseUrl) {
682
+ url = config.baseUrl;
683
+ }
684
+ // Prepare request body
685
+ const body = JSON.stringify({
686
+ namespace: config.namespace,
687
+ previous_message_id: previousMessageId,
688
+ message: userMessages[0],
689
+ context: config.context || {},
690
+ });
691
+ // Abort any existing stream
692
+ if (abortControllerRef.current) {
693
+ abortControllerRef.current.abort();
694
+ }
695
+ abortControllerRef.current = new AbortController();
696
+ try {
697
+ // Get headers (supports async getHeaders function)
698
+ const requestHeaders = await getRequestHeaders();
699
+ await streamSSE(url, {
700
+ method: 'POST',
701
+ body,
702
+ headers: {
703
+ ...requestHeaders,
704
+ ...(config.headers || {}),
705
+ },
706
+ }, {
707
+ onChunk: (data) => {
708
+ processorRef.current?.processChunk(data);
709
+ },
710
+ onComplete: () => {
711
+ // Move streaming message to messages list
712
+ if (processorRef.current) {
713
+ const finalConversation = processorRef.current.getConversation();
714
+ if (isToolApproval) {
715
+ // For tool approvals, update the last message instead of appending
716
+ setMessages(prev => {
717
+ const newMessages = [...prev];
718
+ if (newMessages.length > 0) {
719
+ const lastMsg = newMessages[newMessages.length - 1];
720
+ newMessages[newMessages.length - 1] = {
721
+ ...lastMsg,
722
+ messages: [...lastMsg.messages, ...finalConversation.messages],
723
+ meta: finalConversation.meta,
724
+ isStreaming: false,
725
+ };
726
+ }
727
+ return newMessages;
728
+ });
729
+ }
730
+ else {
731
+ setMessages(prev => [...prev, { ...finalConversation, isStreaming: false }]);
732
+ }
733
+ setStreamingMessage(null);
734
+ setPreviousMessageId(finalConversation.message_id);
735
+ }
736
+ },
737
+ onError: (error) => {
738
+ console.error('Streaming error:', error);
739
+ setStreamingMessage(null);
740
+ },
741
+ }, abortControllerRef.current.signal);
742
+ // If this was a new conversation, fetch the conversation info
743
+ if (previousMessageId === '' && processorRef.current) {
744
+ try {
745
+ const response = await axiosInstance.get(`/messages/${processorRef.current.getConversation().message_id}`, { params: buildParams({ namespace }) });
746
+ const messageData = 'data' in response.data ? response.data.data : response.data;
747
+ const newConversationId = messageData?.conversation_id;
748
+ if (newConversationId) {
749
+ // Add new conversation to list
750
+ setConversations(prev => [{
751
+ conversation_id: newConversationId,
752
+ name: "New Conversation",
753
+ namespace_id: namespace,
754
+ created_at: new Date().toISOString(),
755
+ last_updated: new Date().toISOString(),
756
+ }, ...prev.filter(c => c.conversation_id !== newConversationId)]);
757
+ setCurrentConversationId(newConversationId);
758
+ }
759
+ }
760
+ catch (e) {
761
+ console.error('Failed to get conversation ID:', e);
762
+ }
763
+ }
764
+ }
765
+ finally {
766
+ setIsStreaming(false);
767
+ abortControllerRef.current = null;
768
+ }
769
+ }, [currentConversationId, currentThreadId, previousMessageId, namespace, baseUrl, projectId, axiosInstance, buildParams, getRequestHeaders]);
770
+ // ============================================
771
+ // Utility Actions
772
+ // ============================================
773
+ /**
774
+ * Start a new chat (reset all state)
775
+ */
776
+ const startNewChat = react.useCallback(() => {
777
+ // Abort any ongoing stream
778
+ if (abortControllerRef.current) {
779
+ abortControllerRef.current.abort();
780
+ abortControllerRef.current = null;
781
+ }
782
+ setCurrentConversationId(null);
783
+ setCurrentThreadId(null);
784
+ setThreads([]);
785
+ setMessages([]);
786
+ setStreamingMessage(null);
787
+ setPreviousMessageId('');
788
+ setIsStreaming(false);
789
+ setIsThinking(false);
790
+ processorRef.current = null;
791
+ }, []);
792
+ // ============================================
793
+ // Effects for auto-loading
794
+ // ============================================
795
+ // Fetch project ID on mount
796
+ react.useEffect(() => {
797
+ if (autoLoad && projectName) {
798
+ fetchProjectId();
799
+ }
800
+ }, [autoLoad, projectName, fetchProjectId]);
801
+ // Load conversations after project ID is fetched
802
+ react.useEffect(() => {
803
+ if (autoLoad && projectId) {
804
+ loadConversations();
805
+ }
806
+ }, [autoLoad, projectId, loadConversations]);
807
+ // Load threads when conversation changes
808
+ react.useEffect(() => {
809
+ if (currentConversationId) {
810
+ loadThreads(currentConversationId);
811
+ }
812
+ }, [currentConversationId, loadThreads]);
813
+ // Load messages when thread changes
814
+ react.useEffect(() => {
815
+ if (currentThreadId) {
816
+ loadMessages(currentThreadId);
817
+ }
818
+ }, [currentThreadId, loadMessages]);
819
+ // ============================================
820
+ // Computed values
821
+ // ============================================
822
+ const currentThread = threads.find(t => t.thread_id === currentThreadId) || null;
823
+ const allMessages = streamingMessage
824
+ ? [...messages, streamingMessage]
825
+ : messages;
826
+ return {
827
+ // Project state
828
+ projectId,
829
+ projectLoading,
830
+ // Conversation list state
831
+ conversations,
832
+ conversationsLoading,
833
+ // Thread state
834
+ threads,
835
+ threadsLoading,
836
+ currentThread,
837
+ // Message state
838
+ messages,
839
+ streamingMessage,
840
+ messagesLoading,
841
+ isStreaming,
842
+ isThinking,
843
+ // Current selection
844
+ currentConversationId,
845
+ currentThreadId,
846
+ // Actions - Conversations
847
+ loadConversations,
848
+ selectConversation,
849
+ // Actions - Threads
850
+ loadThreads,
851
+ selectThread,
852
+ // Actions - Messages
853
+ sendMessage,
854
+ // Actions - Utility
855
+ startNewChat,
856
+ // Combined messages
857
+ allMessages,
858
+ };
859
+ }
860
+
861
+ exports.ChunkProcessor = ChunkProcessor;
862
+ exports.isEasyMessage = isEasyMessage;
863
+ exports.isFunctionCallMessage = isFunctionCallMessage;
864
+ exports.isFunctionCallOutputMessage = isFunctionCallOutputMessage;
865
+ exports.isImageGenerationCallMessage = isImageGenerationCallMessage;
866
+ exports.isInputMessage = isInputMessage;
867
+ exports.isReasoningMessage = isReasoningMessage;
868
+ exports.streamSSE = streamSSE;
869
+ exports.useConversation = useConversation;
870
+ //# sourceMappingURL=index.cjs.map