@lssm/module.ai-chat 0.0.0-canary-20251219202229 → 0.0.0-canary-20251220002821

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"chat-service.d.ts","names":[],"sources":["../../src/core/chat-service.ts"],"sourcesContent":[],"mappings":";;;;;;;;;AAoBA;AAEY,UAFK,iBAAA,CAEL;EAEA;EAEF,QAAA,EAJE,QAIF;EAAiB;EA8Bd,OAAA,CAAA,EAhCD,gBAgCY;EAWF;EAYA,KAAA,CAAA,EArDZ,iBAqDY;EAA6B;EAAR,YAAA,CAAA,EAAA,MAAA;EA6EnB;EAA6B,kBAAA,CAAA,EAAA,MAAA;EAAR;EAkGhC,OAAA,CAAA,EAAA,CAAA,KAAA,EAAA;IAAR,WAAA,EAAA,MAAA;IAUS,YAAA,EAAA,MAAA;EAAR,CAAA,EAAA,GAAA,IAAA;;;AA2DN;;cA3Qa,WAAA;;;;;;;sBAWS;;;;gBAYA,qBAAqB,QAAQ;;;;kBA6E3B,qBAAqB,QAAQ;;;;2CAkGhD,QAAQ;;;;;;;MAUP,QAAQ;;;;8CAUsC;;;;;;;;;iBAiDpC,iBAAA,SAA0B,oBAAoB"}
1
+ {"version":3,"file":"chat-service.d.ts","names":[],"sources":["../../src/core/chat-service.ts"],"sourcesContent":[],"mappings":";;;;;;;;;AAmBA;AAEY,UAFK,iBAAA,CAEL;EAEA;EAEF,QAAA,EAJE,QAIF;EAAiB;EA8Bd,OAAA,CAAA,EAhCD,gBAgCY;EAWF;EAYA,KAAA,CAAA,EArDZ,iBAqDY;EAA6B;EAAR,YAAA,CAAA,EAAA,MAAA;EA6EnB;EAA6B,kBAAA,CAAA,EAAA,MAAA;EAAR;EAqGhC,OAAA,CAAA,EAAA,CAAA,KAAA,EAAA;IAAR,WAAA,EAAA,MAAA;IAUS,YAAA,EAAA,MAAA;EAAR,CAAA,EAAA,GAAA,IAAA;;;AA2DN;;cA9Qa,WAAA;;;;;;;sBAWS;;;;gBAYA,qBAAqB,QAAQ;;;;kBA6E3B,qBAAqB,QAAQ;;;;2CAqGhD,QAAQ;;;;;;;MAUP,QAAQ;;;;8CAUsC;;;;;;;;;iBAiDpC,iBAAA,SAA0B,oBAAoB"}
@@ -123,7 +123,10 @@ var ChatService = class {
123
123
  });
124
124
  const prompt = this.buildPrompt(conversation, options);
125
125
  const model = this.provider.getModel();
126
- const self = this;
126
+ const self = {
127
+ systemPrompt: this.systemPrompt,
128
+ store: this.store
129
+ };
127
130
  async function* streamGenerator() {
128
131
  let fullContent = "";
129
132
  try {
@@ -1 +1 @@
1
- {"version":3,"file":"chat-service.js","names":["conversation: ChatConversation"],"sources":["../../src/core/chat-service.ts"],"sourcesContent":["/**\n * Main chat orchestration service\n */\nimport { generateText, streamText } from 'ai';\nimport type { Provider as ChatProvider } from '@lssm/lib.ai-providers';\nimport type { WorkspaceContext } from '../context/workspace-context';\nimport type { ConversationStore } from './conversation-store';\nimport { InMemoryConversationStore } from './conversation-store';\nimport type {\n ChatConversation,\n ChatMessage,\n ChatStreamChunk,\n SendMessageOptions,\n SendMessageResult,\n StreamMessageResult,\n} from './message-types';\n\n/**\n * Configuration for ChatService\n */\nexport interface ChatServiceConfig {\n /** LLM provider to use */\n provider: ChatProvider;\n /** Optional workspace context for code-aware chat */\n context?: WorkspaceContext;\n /** Optional conversation store (defaults to in-memory) */\n store?: ConversationStore;\n /** Default system prompt */\n systemPrompt?: string;\n /** Maximum conversation history to include */\n maxHistoryMessages?: number;\n /** Callback for usage tracking */\n onUsage?: (usage: { inputTokens: number; outputTokens: number }) => void;\n}\n\n/**\n * Default system prompt for ContractSpec vibe coding\n */\nconst DEFAULT_SYSTEM_PROMPT = `You are ContractSpec AI, an expert coding assistant specialized in ContractSpec development.\n\nYour capabilities:\n- Help users create, modify, and understand ContractSpec specifications\n- Generate code that follows ContractSpec patterns and best practices\n- Explain concepts from the ContractSpec documentation\n- Suggest improvements and identify issues in specs and implementations\n\nGuidelines:\n- Be concise but thorough\n- Provide code examples when helpful\n- Reference relevant ContractSpec concepts and patterns\n- Ask clarifying questions when the user's intent is unclear\n- When suggesting code changes, explain the rationale`;\n\n/**\n * Main chat service for AI-powered conversations\n */\nexport class ChatService {\n private readonly provider: ChatProvider;\n private readonly context?: WorkspaceContext;\n private readonly store: ConversationStore;\n private readonly systemPrompt: string;\n private readonly maxHistoryMessages: number;\n private readonly onUsage?: (usage: {\n inputTokens: number;\n outputTokens: number;\n }) => void;\n\n constructor(config: ChatServiceConfig) {\n this.provider = config.provider;\n this.context = config.context;\n this.store = config.store ?? new InMemoryConversationStore();\n this.systemPrompt = config.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\n this.maxHistoryMessages = config.maxHistoryMessages ?? 20;\n this.onUsage = config.onUsage;\n }\n\n /**\n * Send a message and get a complete response\n */\n async send(options: SendMessageOptions): Promise<SendMessageResult> {\n // Get or create conversation\n let conversation: ChatConversation;\n if (options.conversationId) {\n const existing = await this.store.get(options.conversationId);\n if (!existing) {\n throw new Error(`Conversation ${options.conversationId} not found`);\n }\n conversation = existing;\n } else {\n conversation = await this.store.create({\n status: 'active',\n provider: this.provider.name,\n model: this.provider.model,\n messages: [],\n workspacePath: this.context?.workspacePath,\n });\n }\n\n // Add user message\n const userMessage = await this.store.appendMessage(conversation.id, {\n role: 'user',\n content: options.content,\n status: 'completed',\n attachments: options.attachments,\n });\n\n // Build prompt from messages\n const prompt = this.buildPrompt(conversation, options);\n\n // Get the language model\n const model = this.provider.getModel();\n\n try {\n // Generate response\n const result = await generateText({\n model,\n prompt,\n system: this.systemPrompt,\n });\n\n // Save assistant message\n const assistantMessage = await this.store.appendMessage(conversation.id, {\n role: 'assistant',\n content: result.text,\n status: 'completed',\n });\n\n // Refresh conversation\n const updatedConversation = await this.store.get(conversation.id);\n if (!updatedConversation) {\n throw new Error('Conversation lost after update');\n }\n\n return {\n message: assistantMessage,\n conversation: updatedConversation,\n };\n } catch (error) {\n // Save error message\n await this.store.appendMessage(conversation.id, {\n role: 'assistant',\n content: '',\n status: 'error',\n error: {\n code: 'generation_failed',\n message: error instanceof Error ? error.message : String(error),\n },\n });\n\n throw error;\n }\n }\n\n /**\n * Send a message and get a streaming response\n */\n async stream(options: SendMessageOptions): Promise<StreamMessageResult> {\n // Get or create conversation\n let conversation: ChatConversation;\n if (options.conversationId) {\n const existing = await this.store.get(options.conversationId);\n if (!existing) {\n throw new Error(`Conversation ${options.conversationId} not found`);\n }\n conversation = existing;\n } else {\n conversation = await this.store.create({\n status: 'active',\n provider: this.provider.name,\n model: this.provider.model,\n messages: [],\n workspacePath: this.context?.workspacePath,\n });\n }\n\n // Add user message\n await this.store.appendMessage(conversation.id, {\n role: 'user',\n content: options.content,\n status: 'completed',\n attachments: options.attachments,\n });\n\n // Create placeholder for assistant message\n const assistantMessage = await this.store.appendMessage(conversation.id, {\n role: 'assistant',\n content: '',\n status: 'streaming',\n });\n\n // Build prompt\n const prompt = this.buildPrompt(conversation, options);\n\n // Get the language model\n const model = this.provider.getModel();\n\n // Create async generator for streaming\n const self = this;\n async function* streamGenerator(): AsyncIterable<ChatStreamChunk> {\n let fullContent = '';\n\n try {\n const result = streamText({\n model,\n prompt,\n system: self.systemPrompt,\n });\n\n for await (const chunk of result.textStream) {\n fullContent += chunk;\n yield { type: 'text', content: chunk };\n }\n\n // Update message with final content\n await self.store.updateMessage(conversation.id, assistantMessage.id, {\n content: fullContent,\n status: 'completed',\n });\n\n yield {\n type: 'done',\n };\n } catch (error) {\n await self.store.updateMessage(conversation.id, assistantMessage.id, {\n content: fullContent,\n status: 'error',\n error: {\n code: 'stream_failed',\n message: error instanceof Error ? error.message : String(error),\n },\n });\n\n yield {\n type: 'error',\n error: {\n code: 'stream_failed',\n message: error instanceof Error ? error.message : String(error),\n },\n };\n }\n }\n\n return {\n conversationId: conversation.id,\n messageId: assistantMessage.id,\n stream: streamGenerator(),\n };\n }\n\n /**\n * Get a conversation by ID\n */\n async getConversation(\n conversationId: string\n ): Promise<ChatConversation | null> {\n return this.store.get(conversationId);\n }\n\n /**\n * List conversations\n */\n async listConversations(options?: {\n limit?: number;\n offset?: number;\n }): Promise<ChatConversation[]> {\n return this.store.list({\n status: 'active',\n ...options,\n });\n }\n\n /**\n * Delete a conversation\n */\n async deleteConversation(conversationId: string): Promise<boolean> {\n return this.store.delete(conversationId);\n }\n\n /**\n * Build prompt string for LLM\n */\n private buildPrompt(\n conversation: ChatConversation,\n options: SendMessageOptions\n ): string {\n let prompt = '';\n\n // Add conversation history (limited)\n const historyStart = Math.max(\n 0,\n conversation.messages.length - this.maxHistoryMessages\n );\n for (let i = historyStart; i < conversation.messages.length; i++) {\n const msg = conversation.messages[i];\n if (!msg) continue;\n if (msg.role === 'user' || msg.role === 'assistant') {\n prompt += `${msg.role === 'user' ? 'User' : 'Assistant'}: ${msg.content}\\n\\n`;\n }\n }\n\n // Add current message with attachments\n let content = options.content;\n if (options.attachments?.length) {\n const attachmentInfo = options.attachments\n .map((a) => {\n if (a.type === 'file' || a.type === 'code') {\n return `\\n\\n### ${a.name}\\n\\`\\`\\`\\n${a.content}\\n\\`\\`\\``;\n }\n return `\\n\\n[Attachment: ${a.name}]`;\n })\n .join('');\n content += attachmentInfo;\n }\n\n prompt += `User: ${content}\\n\\nAssistant:`;\n\n return prompt;\n }\n}\n\n/**\n * Create a chat service with the given configuration\n */\nexport function createChatService(config: ChatServiceConfig): ChatService {\n return new ChatService(config);\n}\n"],"mappings":";;;;;;;;;;AAsCA,MAAM,wBAAwB;;;;;;;;;;;;;;;;;AAkB9B,IAAa,cAAb,MAAyB;CACvB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CAKjB,YAAY,QAA2B;AACrC,OAAK,WAAW,OAAO;AACvB,OAAK,UAAU,OAAO;AACtB,OAAK,QAAQ,OAAO,SAAS,IAAI,2BAA2B;AAC5D,OAAK,eAAe,OAAO,gBAAgB;AAC3C,OAAK,qBAAqB,OAAO,sBAAsB;AACvD,OAAK,UAAU,OAAO;;;;;CAMxB,MAAM,KAAK,SAAyD;EAElE,IAAIA;AACJ,MAAI,QAAQ,gBAAgB;GAC1B,MAAM,WAAW,MAAM,KAAK,MAAM,IAAI,QAAQ,eAAe;AAC7D,OAAI,CAAC,SACH,OAAM,IAAI,MAAM,gBAAgB,QAAQ,eAAe,YAAY;AAErE,kBAAe;QAEf,gBAAe,MAAM,KAAK,MAAM,OAAO;GACrC,QAAQ;GACR,UAAU,KAAK,SAAS;GACxB,OAAO,KAAK,SAAS;GACrB,UAAU,EAAE;GACZ,eAAe,KAAK,SAAS;GAC9B,CAAC;AAIgB,QAAM,KAAK,MAAM,cAAc,aAAa,IAAI;GAClE,MAAM;GACN,SAAS,QAAQ;GACjB,QAAQ;GACR,aAAa,QAAQ;GACtB,CAAC;EAGF,MAAM,SAAS,KAAK,YAAY,cAAc,QAAQ;EAGtD,MAAM,QAAQ,KAAK,SAAS,UAAU;AAEtC,MAAI;GAEF,MAAM,SAAS,MAAM,aAAa;IAChC;IACA;IACA,QAAQ,KAAK;IACd,CAAC;GAGF,MAAM,mBAAmB,MAAM,KAAK,MAAM,cAAc,aAAa,IAAI;IACvE,MAAM;IACN,SAAS,OAAO;IAChB,QAAQ;IACT,CAAC;GAGF,MAAM,sBAAsB,MAAM,KAAK,MAAM,IAAI,aAAa,GAAG;AACjE,OAAI,CAAC,oBACH,OAAM,IAAI,MAAM,iCAAiC;AAGnD,UAAO;IACL,SAAS;IACT,cAAc;IACf;WACM,OAAO;AAEd,SAAM,KAAK,MAAM,cAAc,aAAa,IAAI;IAC9C,MAAM;IACN,SAAS;IACT,QAAQ;IACR,OAAO;KACL,MAAM;KACN,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;KAChE;IACF,CAAC;AAEF,SAAM;;;;;;CAOV,MAAM,OAAO,SAA2D;EAEtE,IAAIA;AACJ,MAAI,QAAQ,gBAAgB;GAC1B,MAAM,WAAW,MAAM,KAAK,MAAM,IAAI,QAAQ,eAAe;AAC7D,OAAI,CAAC,SACH,OAAM,IAAI,MAAM,gBAAgB,QAAQ,eAAe,YAAY;AAErE,kBAAe;QAEf,gBAAe,MAAM,KAAK,MAAM,OAAO;GACrC,QAAQ;GACR,UAAU,KAAK,SAAS;GACxB,OAAO,KAAK,SAAS;GACrB,UAAU,EAAE;GACZ,eAAe,KAAK,SAAS;GAC9B,CAAC;AAIJ,QAAM,KAAK,MAAM,cAAc,aAAa,IAAI;GAC9C,MAAM;GACN,SAAS,QAAQ;GACjB,QAAQ;GACR,aAAa,QAAQ;GACtB,CAAC;EAGF,MAAM,mBAAmB,MAAM,KAAK,MAAM,cAAc,aAAa,IAAI;GACvE,MAAM;GACN,SAAS;GACT,QAAQ;GACT,CAAC;EAGF,MAAM,SAAS,KAAK,YAAY,cAAc,QAAQ;EAGtD,MAAM,QAAQ,KAAK,SAAS,UAAU;EAGtC,MAAM,OAAO;EACb,gBAAgB,kBAAkD;GAChE,IAAI,cAAc;AAElB,OAAI;IACF,MAAM,SAAS,WAAW;KACxB;KACA;KACA,QAAQ,KAAK;KACd,CAAC;AAEF,eAAW,MAAM,SAAS,OAAO,YAAY;AAC3C,oBAAe;AACf,WAAM;MAAE,MAAM;MAAQ,SAAS;MAAO;;AAIxC,UAAM,KAAK,MAAM,cAAc,aAAa,IAAI,iBAAiB,IAAI;KACnE,SAAS;KACT,QAAQ;KACT,CAAC;AAEF,UAAM,EACJ,MAAM,QACP;YACM,OAAO;AACd,UAAM,KAAK,MAAM,cAAc,aAAa,IAAI,iBAAiB,IAAI;KACnE,SAAS;KACT,QAAQ;KACR,OAAO;MACL,MAAM;MACN,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;MAChE;KACF,CAAC;AAEF,UAAM;KACJ,MAAM;KACN,OAAO;MACL,MAAM;MACN,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;MAChE;KACF;;;AAIL,SAAO;GACL,gBAAgB,aAAa;GAC7B,WAAW,iBAAiB;GAC5B,QAAQ,iBAAiB;GAC1B;;;;;CAMH,MAAM,gBACJ,gBACkC;AAClC,SAAO,KAAK,MAAM,IAAI,eAAe;;;;;CAMvC,MAAM,kBAAkB,SAGQ;AAC9B,SAAO,KAAK,MAAM,KAAK;GACrB,QAAQ;GACR,GAAG;GACJ,CAAC;;;;;CAMJ,MAAM,mBAAmB,gBAA0C;AACjE,SAAO,KAAK,MAAM,OAAO,eAAe;;;;;CAM1C,AAAQ,YACN,cACA,SACQ;EACR,IAAI,SAAS;EAGb,MAAM,eAAe,KAAK,IACxB,GACA,aAAa,SAAS,SAAS,KAAK,mBACrC;AACD,OAAK,IAAI,IAAI,cAAc,IAAI,aAAa,SAAS,QAAQ,KAAK;GAChE,MAAM,MAAM,aAAa,SAAS;AAClC,OAAI,CAAC,IAAK;AACV,OAAI,IAAI,SAAS,UAAU,IAAI,SAAS,YACtC,WAAU,GAAG,IAAI,SAAS,SAAS,SAAS,YAAY,IAAI,IAAI,QAAQ;;EAK5E,IAAI,UAAU,QAAQ;AACtB,MAAI,QAAQ,aAAa,QAAQ;GAC/B,MAAM,iBAAiB,QAAQ,YAC5B,KAAK,MAAM;AACV,QAAI,EAAE,SAAS,UAAU,EAAE,SAAS,OAClC,QAAO,WAAW,EAAE,KAAK,YAAY,EAAE,QAAQ;AAEjD,WAAO,oBAAoB,EAAE,KAAK;KAClC,CACD,KAAK,GAAG;AACX,cAAW;;AAGb,YAAU,SAAS,QAAQ;AAE3B,SAAO;;;;;;AAOX,SAAgB,kBAAkB,QAAwC;AACxE,QAAO,IAAI,YAAY,OAAO"}
1
+ {"version":3,"file":"chat-service.js","names":["conversation: ChatConversation"],"sources":["../../src/core/chat-service.ts"],"sourcesContent":["/**\n * Main chat orchestration service\n */\nimport { generateText, streamText } from 'ai';\nimport type { Provider as ChatProvider } from '@lssm/lib.ai-providers';\nimport type { WorkspaceContext } from '../context/workspace-context';\nimport type { ConversationStore } from './conversation-store';\nimport { InMemoryConversationStore } from './conversation-store';\nimport type {\n ChatConversation,\n ChatStreamChunk,\n SendMessageOptions,\n SendMessageResult,\n StreamMessageResult,\n} from './message-types';\n\n/**\n * Configuration for ChatService\n */\nexport interface ChatServiceConfig {\n /** LLM provider to use */\n provider: ChatProvider;\n /** Optional workspace context for code-aware chat */\n context?: WorkspaceContext;\n /** Optional conversation store (defaults to in-memory) */\n store?: ConversationStore;\n /** Default system prompt */\n systemPrompt?: string;\n /** Maximum conversation history to include */\n maxHistoryMessages?: number;\n /** Callback for usage tracking */\n onUsage?: (usage: { inputTokens: number; outputTokens: number }) => void;\n}\n\n/**\n * Default system prompt for ContractSpec vibe coding\n */\nconst DEFAULT_SYSTEM_PROMPT = `You are ContractSpec AI, an expert coding assistant specialized in ContractSpec development.\n\nYour capabilities:\n- Help users create, modify, and understand ContractSpec specifications\n- Generate code that follows ContractSpec patterns and best practices\n- Explain concepts from the ContractSpec documentation\n- Suggest improvements and identify issues in specs and implementations\n\nGuidelines:\n- Be concise but thorough\n- Provide code examples when helpful\n- Reference relevant ContractSpec concepts and patterns\n- Ask clarifying questions when the user's intent is unclear\n- When suggesting code changes, explain the rationale`;\n\n/**\n * Main chat service for AI-powered conversations\n */\nexport class ChatService {\n private readonly provider: ChatProvider;\n private readonly context?: WorkspaceContext;\n private readonly store: ConversationStore;\n private readonly systemPrompt: string;\n private readonly maxHistoryMessages: number;\n private readonly onUsage?: (usage: {\n inputTokens: number;\n outputTokens: number;\n }) => void;\n\n constructor(config: ChatServiceConfig) {\n this.provider = config.provider;\n this.context = config.context;\n this.store = config.store ?? new InMemoryConversationStore();\n this.systemPrompt = config.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\n this.maxHistoryMessages = config.maxHistoryMessages ?? 20;\n this.onUsage = config.onUsage;\n }\n\n /**\n * Send a message and get a complete response\n */\n async send(options: SendMessageOptions): Promise<SendMessageResult> {\n // Get or create conversation\n let conversation: ChatConversation;\n if (options.conversationId) {\n const existing = await this.store.get(options.conversationId);\n if (!existing) {\n throw new Error(`Conversation ${options.conversationId} not found`);\n }\n conversation = existing;\n } else {\n conversation = await this.store.create({\n status: 'active',\n provider: this.provider.name,\n model: this.provider.model,\n messages: [],\n workspacePath: this.context?.workspacePath,\n });\n }\n\n // Add user message\n await this.store.appendMessage(conversation.id, {\n role: 'user',\n content: options.content,\n status: 'completed',\n attachments: options.attachments,\n });\n\n // Build prompt from messages\n const prompt = this.buildPrompt(conversation, options);\n\n // Get the language model\n const model = this.provider.getModel();\n\n try {\n // Generate response\n const result = await generateText({\n model,\n prompt,\n system: this.systemPrompt,\n });\n\n // Save assistant message\n const assistantMessage = await this.store.appendMessage(conversation.id, {\n role: 'assistant',\n content: result.text,\n status: 'completed',\n });\n\n // Refresh conversation\n const updatedConversation = await this.store.get(conversation.id);\n if (!updatedConversation) {\n throw new Error('Conversation lost after update');\n }\n\n return {\n message: assistantMessage,\n conversation: updatedConversation,\n };\n } catch (error) {\n // Save error message\n await this.store.appendMessage(conversation.id, {\n role: 'assistant',\n content: '',\n status: 'error',\n error: {\n code: 'generation_failed',\n message: error instanceof Error ? error.message : String(error),\n },\n });\n\n throw error;\n }\n }\n\n /**\n * Send a message and get a streaming response\n */\n async stream(options: SendMessageOptions): Promise<StreamMessageResult> {\n // Get or create conversation\n let conversation: ChatConversation;\n if (options.conversationId) {\n const existing = await this.store.get(options.conversationId);\n if (!existing) {\n throw new Error(`Conversation ${options.conversationId} not found`);\n }\n conversation = existing;\n } else {\n conversation = await this.store.create({\n status: 'active',\n provider: this.provider.name,\n model: this.provider.model,\n messages: [],\n workspacePath: this.context?.workspacePath,\n });\n }\n\n // Add user message\n await this.store.appendMessage(conversation.id, {\n role: 'user',\n content: options.content,\n status: 'completed',\n attachments: options.attachments,\n });\n\n // Create placeholder for assistant message\n const assistantMessage = await this.store.appendMessage(conversation.id, {\n role: 'assistant',\n content: '',\n status: 'streaming',\n });\n\n // Build prompt\n const prompt = this.buildPrompt(conversation, options);\n\n // Get the language model\n const model = this.provider.getModel();\n\n // Create async generator for streaming\n const self = {\n systemPrompt: this.systemPrompt,\n store: this.store,\n };\n async function* streamGenerator(): AsyncIterable<ChatStreamChunk> {\n let fullContent = '';\n\n try {\n const result = streamText({\n model,\n prompt,\n system: self.systemPrompt,\n });\n\n for await (const chunk of result.textStream) {\n fullContent += chunk;\n yield { type: 'text', content: chunk };\n }\n\n // Update message with final content\n await self.store.updateMessage(conversation.id, assistantMessage.id, {\n content: fullContent,\n status: 'completed',\n });\n\n yield {\n type: 'done',\n };\n } catch (error) {\n await self.store.updateMessage(conversation.id, assistantMessage.id, {\n content: fullContent,\n status: 'error',\n error: {\n code: 'stream_failed',\n message: error instanceof Error ? error.message : String(error),\n },\n });\n\n yield {\n type: 'error',\n error: {\n code: 'stream_failed',\n message: error instanceof Error ? error.message : String(error),\n },\n };\n }\n }\n\n return {\n conversationId: conversation.id,\n messageId: assistantMessage.id,\n stream: streamGenerator(),\n };\n }\n\n /**\n * Get a conversation by ID\n */\n async getConversation(\n conversationId: string\n ): Promise<ChatConversation | null> {\n return this.store.get(conversationId);\n }\n\n /**\n * List conversations\n */\n async listConversations(options?: {\n limit?: number;\n offset?: number;\n }): Promise<ChatConversation[]> {\n return this.store.list({\n status: 'active',\n ...options,\n });\n }\n\n /**\n * Delete a conversation\n */\n async deleteConversation(conversationId: string): Promise<boolean> {\n return this.store.delete(conversationId);\n }\n\n /**\n * Build prompt string for LLM\n */\n private buildPrompt(\n conversation: ChatConversation,\n options: SendMessageOptions\n ): string {\n let prompt = '';\n\n // Add conversation history (limited)\n const historyStart = Math.max(\n 0,\n conversation.messages.length - this.maxHistoryMessages\n );\n for (let i = historyStart; i < conversation.messages.length; i++) {\n const msg = conversation.messages[i];\n if (!msg) continue;\n if (msg.role === 'user' || msg.role === 'assistant') {\n prompt += `${msg.role === 'user' ? 'User' : 'Assistant'}: ${msg.content}\\n\\n`;\n }\n }\n\n // Add current message with attachments\n let content = options.content;\n if (options.attachments?.length) {\n const attachmentInfo = options.attachments\n .map((a) => {\n if (a.type === 'file' || a.type === 'code') {\n return `\\n\\n### ${a.name}\\n\\`\\`\\`\\n${a.content}\\n\\`\\`\\``;\n }\n return `\\n\\n[Attachment: ${a.name}]`;\n })\n .join('');\n content += attachmentInfo;\n }\n\n prompt += `User: ${content}\\n\\nAssistant:`;\n\n return prompt;\n }\n}\n\n/**\n * Create a chat service with the given configuration\n */\nexport function createChatService(config: ChatServiceConfig): ChatService {\n return new ChatService(config);\n}\n"],"mappings":";;;;;;;;;;AAqCA,MAAM,wBAAwB;;;;;;;;;;;;;;;;;AAkB9B,IAAa,cAAb,MAAyB;CACvB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CAKjB,YAAY,QAA2B;AACrC,OAAK,WAAW,OAAO;AACvB,OAAK,UAAU,OAAO;AACtB,OAAK,QAAQ,OAAO,SAAS,IAAI,2BAA2B;AAC5D,OAAK,eAAe,OAAO,gBAAgB;AAC3C,OAAK,qBAAqB,OAAO,sBAAsB;AACvD,OAAK,UAAU,OAAO;;;;;CAMxB,MAAM,KAAK,SAAyD;EAElE,IAAIA;AACJ,MAAI,QAAQ,gBAAgB;GAC1B,MAAM,WAAW,MAAM,KAAK,MAAM,IAAI,QAAQ,eAAe;AAC7D,OAAI,CAAC,SACH,OAAM,IAAI,MAAM,gBAAgB,QAAQ,eAAe,YAAY;AAErE,kBAAe;QAEf,gBAAe,MAAM,KAAK,MAAM,OAAO;GACrC,QAAQ;GACR,UAAU,KAAK,SAAS;GACxB,OAAO,KAAK,SAAS;GACrB,UAAU,EAAE;GACZ,eAAe,KAAK,SAAS;GAC9B,CAAC;AAIJ,QAAM,KAAK,MAAM,cAAc,aAAa,IAAI;GAC9C,MAAM;GACN,SAAS,QAAQ;GACjB,QAAQ;GACR,aAAa,QAAQ;GACtB,CAAC;EAGF,MAAM,SAAS,KAAK,YAAY,cAAc,QAAQ;EAGtD,MAAM,QAAQ,KAAK,SAAS,UAAU;AAEtC,MAAI;GAEF,MAAM,SAAS,MAAM,aAAa;IAChC;IACA;IACA,QAAQ,KAAK;IACd,CAAC;GAGF,MAAM,mBAAmB,MAAM,KAAK,MAAM,cAAc,aAAa,IAAI;IACvE,MAAM;IACN,SAAS,OAAO;IAChB,QAAQ;IACT,CAAC;GAGF,MAAM,sBAAsB,MAAM,KAAK,MAAM,IAAI,aAAa,GAAG;AACjE,OAAI,CAAC,oBACH,OAAM,IAAI,MAAM,iCAAiC;AAGnD,UAAO;IACL,SAAS;IACT,cAAc;IACf;WACM,OAAO;AAEd,SAAM,KAAK,MAAM,cAAc,aAAa,IAAI;IAC9C,MAAM;IACN,SAAS;IACT,QAAQ;IACR,OAAO;KACL,MAAM;KACN,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;KAChE;IACF,CAAC;AAEF,SAAM;;;;;;CAOV,MAAM,OAAO,SAA2D;EAEtE,IAAIA;AACJ,MAAI,QAAQ,gBAAgB;GAC1B,MAAM,WAAW,MAAM,KAAK,MAAM,IAAI,QAAQ,eAAe;AAC7D,OAAI,CAAC,SACH,OAAM,IAAI,MAAM,gBAAgB,QAAQ,eAAe,YAAY;AAErE,kBAAe;QAEf,gBAAe,MAAM,KAAK,MAAM,OAAO;GACrC,QAAQ;GACR,UAAU,KAAK,SAAS;GACxB,OAAO,KAAK,SAAS;GACrB,UAAU,EAAE;GACZ,eAAe,KAAK,SAAS;GAC9B,CAAC;AAIJ,QAAM,KAAK,MAAM,cAAc,aAAa,IAAI;GAC9C,MAAM;GACN,SAAS,QAAQ;GACjB,QAAQ;GACR,aAAa,QAAQ;GACtB,CAAC;EAGF,MAAM,mBAAmB,MAAM,KAAK,MAAM,cAAc,aAAa,IAAI;GACvE,MAAM;GACN,SAAS;GACT,QAAQ;GACT,CAAC;EAGF,MAAM,SAAS,KAAK,YAAY,cAAc,QAAQ;EAGtD,MAAM,QAAQ,KAAK,SAAS,UAAU;EAGtC,MAAM,OAAO;GACX,cAAc,KAAK;GACnB,OAAO,KAAK;GACb;EACD,gBAAgB,kBAAkD;GAChE,IAAI,cAAc;AAElB,OAAI;IACF,MAAM,SAAS,WAAW;KACxB;KACA;KACA,QAAQ,KAAK;KACd,CAAC;AAEF,eAAW,MAAM,SAAS,OAAO,YAAY;AAC3C,oBAAe;AACf,WAAM;MAAE,MAAM;MAAQ,SAAS;MAAO;;AAIxC,UAAM,KAAK,MAAM,cAAc,aAAa,IAAI,iBAAiB,IAAI;KACnE,SAAS;KACT,QAAQ;KACT,CAAC;AAEF,UAAM,EACJ,MAAM,QACP;YACM,OAAO;AACd,UAAM,KAAK,MAAM,cAAc,aAAa,IAAI,iBAAiB,IAAI;KACnE,SAAS;KACT,QAAQ;KACR,OAAO;MACL,MAAM;MACN,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;MAChE;KACF,CAAC;AAEF,UAAM;KACJ,MAAM;KACN,OAAO;MACL,MAAM;MACN,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;MAChE;KACF;;;AAIL,SAAO;GACL,gBAAgB,aAAa;GAC7B,WAAW,iBAAiB;GAC5B,QAAQ,iBAAiB;GAC1B;;;;;CAMH,MAAM,gBACJ,gBACkC;AAClC,SAAO,KAAK,MAAM,IAAI,eAAe;;;;;CAMvC,MAAM,kBAAkB,SAGQ;AAC9B,SAAO,KAAK,MAAM,KAAK;GACrB,QAAQ;GACR,GAAG;GACJ,CAAC;;;;;CAMJ,MAAM,mBAAmB,gBAA0C;AACjE,SAAO,KAAK,MAAM,OAAO,eAAe;;;;;CAM1C,AAAQ,YACN,cACA,SACQ;EACR,IAAI,SAAS;EAGb,MAAM,eAAe,KAAK,IACxB,GACA,aAAa,SAAS,SAAS,KAAK,mBACrC;AACD,OAAK,IAAI,IAAI,cAAc,IAAI,aAAa,SAAS,QAAQ,KAAK;GAChE,MAAM,MAAM,aAAa,SAAS;AAClC,OAAI,CAAC,IAAK;AACV,OAAI,IAAI,SAAS,UAAU,IAAI,SAAS,YACtC,WAAU,GAAG,IAAI,SAAS,SAAS,SAAS,YAAY,IAAI,IAAI,QAAQ;;EAK5E,IAAI,UAAU,QAAQ;AACtB,MAAI,QAAQ,aAAa,QAAQ;GAC/B,MAAM,iBAAiB,QAAQ,YAC5B,KAAK,MAAM;AACV,QAAI,EAAE,SAAS,UAAU,EAAE,SAAS,OAClC,QAAO,WAAW,EAAE,KAAK,YAAY,EAAE,QAAQ;AAEjD,WAAO,oBAAoB,EAAE,KAAK;KAClC,CACD,KAAK,GAAG;AACX,cAAW;;AAGb,YAAU,SAAS,QAAQ;AAE3B,SAAO;;;;;;AAOX,SAAgB,kBAAkB,QAAwC;AACxE,QAAO,IAAI,YAAY,OAAO"}
@@ -0,0 +1,19 @@
1
+ 'use client';
2
+
3
+ import { cn } from "../ui-kit-core/dist/utils.js";
4
+ import "react";
5
+ import { jsx } from "react/jsx-runtime";
6
+ import * as LabelPrimitive from "@radix-ui/react-label";
7
+
8
+ //#region ../../libs/ui-kit-web/dist/ui/label.js
9
+ function Label({ className, ...props }) {
10
+ return /* @__PURE__ */ jsx(LabelPrimitive.Root, {
11
+ "data-slot": "label",
12
+ className: cn("flex items-center gap-2 text-sm leading-none font-medium select-none group-data-[disabled=true]:pointer-events-none group-data-[disabled=true]:opacity-50 peer-disabled:cursor-not-allowed peer-disabled:opacity-50", className),
13
+ ...props
14
+ });
15
+ }
16
+
17
+ //#endregion
18
+ export { Label };
19
+ //# sourceMappingURL=label.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"label.js","names":[],"sources":["../../../../../../../libs/ui-kit-web/dist/ui/label.js"],"sourcesContent":["'use client';\n\nimport { cn } from \"../ui-kit-core/dist/utils.js\";\nimport \"react\";\nimport { jsx } from \"react/jsx-runtime\";\nimport * as LabelPrimitive from \"@radix-ui/react-label\";\n\n//#region ui/label.tsx\nfunction Label({ className, ...props }) {\n\treturn /* @__PURE__ */ jsx(LabelPrimitive.Root, {\n\t\t\"data-slot\": \"label\",\n\t\tclassName: cn(\"flex items-center gap-2 text-sm leading-none font-medium select-none group-data-[disabled=true]:pointer-events-none group-data-[disabled=true]:opacity-50 peer-disabled:cursor-not-allowed peer-disabled:opacity-50\", className),\n\t\t...props\n\t});\n}\n\n//#endregion\nexport { Label };\n//# sourceMappingURL=label.js.map"],"mappings":";;;;;;;;AAQA,SAAS,MAAM,EAAE,WAAW,GAAG,SAAS;AACvC,QAAuB,oBAAI,eAAe,MAAM;EAC/C,aAAa;EACb,WAAW,GAAG,uNAAuN,UAAU;EAC/O,GAAG;EACH,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"ModelPicker.d.ts","names":[],"sources":["../../../src/presentation/components/ModelPicker.tsx"],"sourcesContent":[],"mappings":";;;;UAqBiB,cAAA;YACL;;EADK,IAAA,EAGT,YAHuB;AAM/B;AAES,UAFQ,gBAAA,CAER;EAEW;EAGN,KAAA,EALL,cAKK;EAEJ;EAAY,QAAA,EAAA,CAAA,KAAA,EALF,cAKE,EAAA,GAAA,IAAA;EAqCN;EACd,kBAAA,CAAA,EAAA;IACA,QAAA,EAzCY,YAyCZ;IACA,SAAA,EAAA,OAAA;IACA,IAAA,EAzCQ,YAyCR;IACA,MAAA,CAAA,EAAA,MAAA;EACC,CAAA,EAAA;EAAgB;EAAA,SAAA,CAAA,EAAA,MAAA;;;;;;;iBANH,WAAA;;;;;;GAMb,mBAAgB,kBAAA,CAAA,GAAA,CAAA"}
1
+ {"version":3,"file":"ModelPicker.d.ts","names":[],"sources":["../../../src/presentation/components/ModelPicker.tsx"],"sourcesContent":[],"mappings":";;;;UAsBiB,cAAA;YACL;;EADK,IAAA,EAGT,YAHuB;AAM/B;AAES,UAFQ,gBAAA,CAER;EAEW;EAGN,KAAA,EALL,cAKK;EAEJ;EAAY,QAAA,EAAA,CAAA,KAAA,EALF,cAKE,EAAA,GAAA,IAAA;EAqCN;EACd,kBAAA,CAAA,EAAA;IACA,QAAA,EAzCY,YAyCZ;IACA,SAAA,EAAA,OAAA;IACA,IAAA,EAzCQ,YAyCR;IACA,MAAA,CAAA,EAAA,MAAA;EACC,CAAA,EAAA;EAAgB;EAAA,SAAA,CAAA,EAAA,MAAA;;;;;;;iBANH,WAAA;;;;;;GAMb,mBAAgB,kBAAA,CAAA,GAAA,CAAA"}
@@ -6,6 +6,7 @@ import { cn } from "../../libs/ui-kit-web/dist/ui/utils.js";
6
6
  import { Button } from "../../libs/design-system/dist/components/atoms/Button.js";
7
7
  import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "../../libs/ui-kit-web/dist/ui/select.js";
8
8
  import { Badge } from "../../libs/ui-kit-web/dist/ui/badge.js";
9
+ import { Label } from "../../libs/ui-kit-web/dist/ui/label.js";
9
10
  import * as React from "react";
10
11
  import { jsx, jsxs } from "react/jsx-runtime";
11
12
  import { Bot, Cloud, Cpu, Sparkles } from "lucide-react";
@@ -120,11 +121,13 @@ function ModelPicker({ value, onChange, availableProviders, className, compact =
120
121
  children: [
121
122
  /* @__PURE__ */ jsxs("div", {
122
123
  className: "flex flex-col gap-1.5",
123
- children: [/* @__PURE__ */ jsx("label", {
124
+ children: [/* @__PURE__ */ jsx(Label, {
125
+ htmlFor: "provider-selection",
124
126
  className: "text-sm font-medium",
125
127
  children: "Provider"
126
128
  }), /* @__PURE__ */ jsx("div", {
127
129
  className: "flex flex-wrap gap-2",
130
+ id: "provider-selection",
128
131
  children: providers.map((p) => /* @__PURE__ */ jsxs(Button, {
129
132
  variant: value.provider === p.provider ? "default" : "outline",
130
133
  size: "sm",
@@ -145,10 +148,12 @@ function ModelPicker({ value, onChange, availableProviders, className, compact =
145
148
  }),
146
149
  /* @__PURE__ */ jsxs("div", {
147
150
  className: "flex flex-col gap-1.5",
148
- children: [/* @__PURE__ */ jsx("label", {
151
+ children: [/* @__PURE__ */ jsx(Label, {
152
+ htmlFor: "model-picker",
149
153
  className: "text-sm font-medium",
150
154
  children: "Model"
151
155
  }), /* @__PURE__ */ jsxs(Select, {
156
+ name: "model-picker",
152
157
  value: value.model,
153
158
  onValueChange: handleModelChange,
154
159
  children: [/* @__PURE__ */ jsx(SelectTrigger, { children: /* @__PURE__ */ jsx(SelectValue, { placeholder: "Select a model" }) }), /* @__PURE__ */ jsx(SelectContent, { children: models.map((m) => /* @__PURE__ */ jsx(SelectItem, {
@@ -1 +1 @@
1
- {"version":3,"file":"ModelPicker.js","names":["PROVIDER_ICONS: Record<ProviderName, React.ReactNode>","PROVIDER_NAMES: Record<ProviderName, string>","MODE_BADGES: Record<\n ProviderMode,\n { label: string; variant: 'default' | 'secondary' | 'outline' }\n>","models: ModelInfo[]"],"sources":["../../../src/presentation/components/ModelPicker.tsx"],"sourcesContent":["'use client';\n\nimport * as React from 'react';\nimport { cn } from '@lssm/lib.ui-kit-web/ui/utils';\nimport { Button } from '@lssm/lib.design-system';\nimport {\n Select,\n SelectContent,\n SelectItem,\n SelectTrigger,\n SelectValue,\n} from '@lssm/lib.ui-kit-web/ui/select';\nimport { Badge } from '@lssm/lib.ui-kit-web/ui/badge';\nimport { Bot, Cpu, Cloud, Sparkles } from 'lucide-react';\nimport {\n type ProviderName,\n type ProviderMode,\n type ModelInfo,\n getModelsForProvider,\n} from '@lssm/lib.ai-providers';\n\nexport interface ModelSelection {\n provider: ProviderName;\n model: string;\n mode: ProviderMode;\n}\n\nexport interface ModelPickerProps {\n /** Currently selected provider/model */\n value: ModelSelection;\n /** Called when selection changes */\n onChange: (value: ModelSelection) => void;\n /** Available providers (with availability info) */\n availableProviders?: {\n provider: ProviderName;\n available: boolean;\n mode: ProviderMode;\n reason?: string;\n }[];\n /** Additional class name */\n className?: string;\n /** Compact mode (smaller) */\n compact?: boolean;\n}\n\nconst PROVIDER_ICONS: Record<ProviderName, React.ReactNode> = {\n ollama: <Cpu className=\"h-4 w-4\" />,\n openai: <Bot className=\"h-4 w-4\" />,\n anthropic: <Sparkles className=\"h-4 w-4\" />,\n mistral: <Cloud className=\"h-4 w-4\" />,\n gemini: <Sparkles className=\"h-4 w-4\" />,\n};\n\nconst PROVIDER_NAMES: Record<ProviderName, string> = {\n ollama: 'Ollama (Local)',\n openai: 'OpenAI',\n anthropic: 'Anthropic',\n mistral: 'Mistral',\n gemini: 'Google Gemini',\n};\n\nconst MODE_BADGES: Record<\n ProviderMode,\n { label: string; variant: 'default' | 'secondary' | 'outline' }\n> = {\n local: { label: 'Local', variant: 'secondary' },\n byok: { label: 'BYOK', variant: 'outline' },\n managed: { label: 'Managed', variant: 'default' },\n};\n\n/**\n * Model picker component for selecting AI provider and model\n */\nexport function ModelPicker({\n value,\n onChange,\n availableProviders,\n className,\n compact = false,\n}: ModelPickerProps) {\n const providers = availableProviders ?? [\n { provider: 'ollama' as const, available: true, mode: 'local' as const },\n { provider: 'openai' as const, available: true, mode: 'byok' as const },\n { provider: 'anthropic' as const, available: true, mode: 'byok' as const },\n { provider: 'mistral' as const, available: true, mode: 'byok' as const },\n { provider: 'gemini' as const, available: true, mode: 'byok' as const },\n ];\n\n const models: ModelInfo[] = getModelsForProvider(value.provider);\n const selectedModel = models.find((m) => m.id === value.model);\n\n const handleProviderChange = React.useCallback(\n (providerName: string) => {\n const provider = providerName as ProviderName;\n const providerInfo = providers.find((p) => p.provider === provider);\n const providerModels = getModelsForProvider(provider);\n const defaultModel = providerModels[0]?.id ?? '';\n\n onChange({\n provider,\n model: defaultModel,\n mode: providerInfo?.mode ?? 'byok',\n });\n },\n [onChange, providers]\n );\n\n const handleModelChange = React.useCallback(\n (modelId: string) => {\n onChange({\n ...value,\n model: modelId,\n });\n },\n [onChange, value]\n );\n\n if (compact) {\n return (\n <div className={cn('flex items-center gap-2', className)}>\n <Select value={value.provider} onValueChange={handleProviderChange}>\n <SelectTrigger className=\"w-[140px]\">\n <SelectValue />\n </SelectTrigger>\n <SelectContent>\n {providers.map((p) => (\n <SelectItem\n key={p.provider}\n value={p.provider}\n disabled={!p.available}\n >\n <div className=\"flex items-center gap-2\">\n {PROVIDER_ICONS[p.provider]}\n <span>{PROVIDER_NAMES[p.provider]}</span>\n </div>\n </SelectItem>\n ))}\n </SelectContent>\n </Select>\n\n <Select value={value.model} onValueChange={handleModelChange}>\n <SelectTrigger className=\"w-[160px]\">\n <SelectValue />\n </SelectTrigger>\n <SelectContent>\n {models.map((m) => (\n <SelectItem key={m.id} value={m.id}>\n {m.name}\n </SelectItem>\n ))}\n </SelectContent>\n </Select>\n </div>\n );\n }\n\n return (\n <div className={cn('flex flex-col gap-3', className)}>\n {/* Provider selection */}\n <div className=\"flex flex-col gap-1.5\">\n <label className=\"text-sm font-medium\">Provider</label>\n <div className=\"flex flex-wrap gap-2\">\n {providers.map((p) => (\n <Button\n key={p.provider}\n variant={value.provider === p.provider ? 'default' : 'outline'}\n size=\"sm\"\n onPress={() => p.available && handleProviderChange(p.provider)}\n disabled={!p.available}\n className={cn(!p.available && 'opacity-50')}\n >\n {PROVIDER_ICONS[p.provider]}\n <span>{PROVIDER_NAMES[p.provider]}</span>\n <Badge variant={MODE_BADGES[p.mode].variant} className=\"ml-1\">\n {MODE_BADGES[p.mode].label}\n </Badge>\n </Button>\n ))}\n </div>\n </div>\n\n {/* Model selection */}\n <div className=\"flex flex-col gap-1.5\">\n <label className=\"text-sm font-medium\">Model</label>\n <Select value={value.model} onValueChange={handleModelChange}>\n <SelectTrigger>\n <SelectValue placeholder=\"Select a model\" />\n </SelectTrigger>\n <SelectContent>\n {models.map((m) => (\n <SelectItem key={m.id} value={m.id}>\n <div className=\"flex items-center gap-2\">\n <span>{m.name}</span>\n <span className=\"text-muted-foreground text-xs\">\n {Math.round(m.contextWindow / 1000)}K\n </span>\n {m.capabilities.vision && (\n <Badge variant=\"outline\" className=\"text-xs\">\n Vision\n </Badge>\n )}\n {m.capabilities.reasoning && (\n <Badge variant=\"outline\" className=\"text-xs\">\n Reasoning\n </Badge>\n )}\n </div>\n </SelectItem>\n ))}\n </SelectContent>\n </Select>\n </div>\n\n {/* Model info */}\n {selectedModel && (\n <div className=\"text-muted-foreground flex flex-wrap gap-2 text-xs\">\n <span>\n Context: {Math.round(selectedModel.contextWindow / 1000)}K tokens\n </span>\n {selectedModel.capabilities.vision && <span>• Vision</span>}\n {selectedModel.capabilities.tools && <span>• Tools</span>}\n {selectedModel.capabilities.reasoning && <span>• Reasoning</span>}\n </div>\n )}\n </div>\n );\n}\n"],"mappings":";;;;;;;;;;;;;AA6CA,MAAMA,iBAAwD;CAC5D,QAAQ,oBAAC,OAAI,WAAU,YAAY;CACnC,QAAQ,oBAAC,OAAI,WAAU,YAAY;CACnC,WAAW,oBAAC,YAAS,WAAU,YAAY;CAC3C,SAAS,oBAAC,SAAM,WAAU,YAAY;CACtC,QAAQ,oBAAC,YAAS,WAAU,YAAY;CACzC;AAED,MAAMC,iBAA+C;CACnD,QAAQ;CACR,QAAQ;CACR,WAAW;CACX,SAAS;CACT,QAAQ;CACT;AAED,MAAMC,cAGF;CACF,OAAO;EAAE,OAAO;EAAS,SAAS;EAAa;CAC/C,MAAM;EAAE,OAAO;EAAQ,SAAS;EAAW;CAC3C,SAAS;EAAE,OAAO;EAAW,SAAS;EAAW;CAClD;;;;AAKD,SAAgB,YAAY,EAC1B,OACA,UACA,oBACA,WACA,UAAU,SACS;CACnB,MAAM,YAAY,sBAAsB;EACtC;GAAE,UAAU;GAAmB,WAAW;GAAM,MAAM;GAAkB;EACxE;GAAE,UAAU;GAAmB,WAAW;GAAM,MAAM;GAAiB;EACvE;GAAE,UAAU;GAAsB,WAAW;GAAM,MAAM;GAAiB;EAC1E;GAAE,UAAU;GAAoB,WAAW;GAAM,MAAM;GAAiB;EACxE;GAAE,UAAU;GAAmB,WAAW;GAAM,MAAM;GAAiB;EACxE;CAED,MAAMC,SAAsB,qBAAqB,MAAM,SAAS;CAChE,MAAM,gBAAgB,OAAO,MAAM,MAAM,EAAE,OAAO,MAAM,MAAM;CAE9D,MAAM,uBAAuB,MAAM,aAChC,iBAAyB;EACxB,MAAM,WAAW;EACjB,MAAM,eAAe,UAAU,MAAM,MAAM,EAAE,aAAa,SAAS;AAInE,WAAS;GACP;GACA,OALqB,qBAAqB,SAAS,CACjB,IAAI,MAAM;GAK5C,MAAM,cAAc,QAAQ;GAC7B,CAAC;IAEJ,CAAC,UAAU,UAAU,CACtB;CAED,MAAM,oBAAoB,MAAM,aAC7B,YAAoB;AACnB,WAAS;GACP,GAAG;GACH,OAAO;GACR,CAAC;IAEJ,CAAC,UAAU,MAAM,CAClB;AAED,KAAI,QACF,QACE,qBAAC;EAAI,WAAW,GAAG,2BAA2B,UAAU;aACtD,qBAAC;GAAO,OAAO,MAAM;GAAU,eAAe;cAC5C,oBAAC;IAAc,WAAU;cACvB,oBAAC,gBAAc;KACD,EAChB,oBAAC,2BACE,UAAU,KAAK,MACd,oBAAC;IAEC,OAAO,EAAE;IACT,UAAU,CAAC,EAAE;cAEb,qBAAC;KAAI,WAAU;gBACZ,eAAe,EAAE,WAClB,oBAAC,oBAAM,eAAe,EAAE,YAAiB;MACrC;MAPD,EAAE,SAQI,CACb,GACY;IACT,EAET,qBAAC;GAAO,OAAO,MAAM;GAAO,eAAe;cACzC,oBAAC;IAAc,WAAU;cACvB,oBAAC,gBAAc;KACD,EAChB,oBAAC,2BACE,OAAO,KAAK,MACX,oBAAC;IAAsB,OAAO,EAAE;cAC7B,EAAE;MADY,EAAE,GAEN,CACb,GACY;IACT;GACL;AAIV,QACE,qBAAC;EAAI,WAAW,GAAG,uBAAuB,UAAU;;GAElD,qBAAC;IAAI,WAAU;eACb,oBAAC;KAAM,WAAU;eAAsB;MAAgB,EACvD,oBAAC;KAAI,WAAU;eACZ,UAAU,KAAK,MACd,qBAAC;MAEC,SAAS,MAAM,aAAa,EAAE,WAAW,YAAY;MACrD,MAAK;MACL,eAAe,EAAE,aAAa,qBAAqB,EAAE,SAAS;MAC9D,UAAU,CAAC,EAAE;MACb,WAAW,GAAG,CAAC,EAAE,aAAa,aAAa;;OAE1C,eAAe,EAAE;OAClB,oBAAC,oBAAM,eAAe,EAAE,YAAiB;OACzC,oBAAC;QAAM,SAAS,YAAY,EAAE,MAAM;QAAS,WAAU;kBACpD,YAAY,EAAE,MAAM;SACf;;QAXH,EAAE,SAYA,CACT;MACE;KACF;GAGN,qBAAC;IAAI,WAAU;eACb,oBAAC;KAAM,WAAU;eAAsB;MAAa,EACpD,qBAAC;KAAO,OAAO,MAAM;KAAO,eAAe;gBACzC,oBAAC,2BACC,oBAAC,eAAY,aAAY,mBAAmB,GAC9B,EAChB,oBAAC,2BACE,OAAO,KAAK,MACX,oBAAC;MAAsB,OAAO,EAAE;gBAC9B,qBAAC;OAAI,WAAU;;QACb,oBAAC,oBAAM,EAAE,OAAY;QACrB,qBAAC;SAAK,WAAU;oBACb,KAAK,MAAM,EAAE,gBAAgB,IAAK,EAAC;UAC/B;QACN,EAAE,aAAa,UACd,oBAAC;SAAM,SAAQ;SAAU,WAAU;mBAAU;UAErC;QAET,EAAE,aAAa,aACd,oBAAC;SAAM,SAAQ;SAAU,WAAU;mBAAU;UAErC;;QAEN;QAhBS,EAAE,GAiBN,CACb,GACY;MACT;KACL;GAGL,iBACC,qBAAC;IAAI,WAAU;;KACb,qBAAC;MAAK;MACM,KAAK,MAAM,cAAc,gBAAgB,IAAK;MAAC;SACpD;KACN,cAAc,aAAa,UAAU,oBAAC,oBAAK,aAAe;KAC1D,cAAc,aAAa,SAAS,oBAAC,oBAAK,YAAc;KACxD,cAAc,aAAa,aAAa,oBAAC,oBAAK,gBAAkB;;KAC7D;;GAEJ"}
1
+ {"version":3,"file":"ModelPicker.js","names":["PROVIDER_ICONS: Record<ProviderName, React.ReactNode>","PROVIDER_NAMES: Record<ProviderName, string>","MODE_BADGES: Record<\n ProviderMode,\n { label: string; variant: 'default' | 'secondary' | 'outline' }\n>","models: ModelInfo[]"],"sources":["../../../src/presentation/components/ModelPicker.tsx"],"sourcesContent":["'use client';\n\nimport * as React from 'react';\nimport { cn } from '@lssm/lib.ui-kit-web/ui/utils';\nimport { Button } from '@lssm/lib.design-system';\nimport {\n Select,\n SelectContent,\n SelectItem,\n SelectTrigger,\n SelectValue,\n} from '@lssm/lib.ui-kit-web/ui/select';\nimport { Badge } from '@lssm/lib.ui-kit-web/ui/badge';\nimport { Label } from '@lssm/lib.ui-kit-web/ui/label';\nimport { Bot, Cloud, Cpu, Sparkles } from 'lucide-react';\nimport {\n getModelsForProvider,\n type ModelInfo,\n type ProviderMode,\n type ProviderName,\n} from '@lssm/lib.ai-providers';\n\nexport interface ModelSelection {\n provider: ProviderName;\n model: string;\n mode: ProviderMode;\n}\n\nexport interface ModelPickerProps {\n /** Currently selected provider/model */\n value: ModelSelection;\n /** Called when selection changes */\n onChange: (value: ModelSelection) => void;\n /** Available providers (with availability info) */\n availableProviders?: {\n provider: ProviderName;\n available: boolean;\n mode: ProviderMode;\n reason?: string;\n }[];\n /** Additional class name */\n className?: string;\n /** Compact mode (smaller) */\n compact?: boolean;\n}\n\nconst PROVIDER_ICONS: Record<ProviderName, React.ReactNode> = {\n ollama: <Cpu className=\"h-4 w-4\" />,\n openai: <Bot className=\"h-4 w-4\" />,\n anthropic: <Sparkles className=\"h-4 w-4\" />,\n mistral: <Cloud className=\"h-4 w-4\" />,\n gemini: <Sparkles className=\"h-4 w-4\" />,\n};\n\nconst PROVIDER_NAMES: Record<ProviderName, string> = {\n ollama: 'Ollama (Local)',\n openai: 'OpenAI',\n anthropic: 'Anthropic',\n mistral: 'Mistral',\n gemini: 'Google Gemini',\n};\n\nconst MODE_BADGES: Record<\n ProviderMode,\n { label: string; variant: 'default' | 'secondary' | 'outline' }\n> = {\n local: { label: 'Local', variant: 'secondary' },\n byok: { label: 'BYOK', variant: 'outline' },\n managed: { label: 'Managed', variant: 'default' },\n};\n\n/**\n * Model picker component for selecting AI provider and model\n */\nexport function ModelPicker({\n value,\n onChange,\n availableProviders,\n className,\n compact = false,\n}: ModelPickerProps) {\n const providers = availableProviders ?? [\n { provider: 'ollama' as const, available: true, mode: 'local' as const },\n { provider: 'openai' as const, available: true, mode: 'byok' as const },\n { provider: 'anthropic' as const, available: true, mode: 'byok' as const },\n { provider: 'mistral' as const, available: true, mode: 'byok' as const },\n { provider: 'gemini' as const, available: true, mode: 'byok' as const },\n ];\n\n const models: ModelInfo[] = getModelsForProvider(value.provider);\n const selectedModel = models.find((m) => m.id === value.model);\n\n const handleProviderChange = React.useCallback(\n (providerName: string) => {\n const provider = providerName as ProviderName;\n const providerInfo = providers.find((p) => p.provider === provider);\n const providerModels = getModelsForProvider(provider);\n const defaultModel = providerModels[0]?.id ?? '';\n\n onChange({\n provider,\n model: defaultModel,\n mode: providerInfo?.mode ?? 'byok',\n });\n },\n [onChange, providers]\n );\n\n const handleModelChange = React.useCallback(\n (modelId: string) => {\n onChange({\n ...value,\n model: modelId,\n });\n },\n [onChange, value]\n );\n\n if (compact) {\n return (\n <div className={cn('flex items-center gap-2', className)}>\n <Select value={value.provider} onValueChange={handleProviderChange}>\n <SelectTrigger className=\"w-[140px]\">\n <SelectValue />\n </SelectTrigger>\n <SelectContent>\n {providers.map((p) => (\n <SelectItem\n key={p.provider}\n value={p.provider}\n disabled={!p.available}\n >\n <div className=\"flex items-center gap-2\">\n {PROVIDER_ICONS[p.provider]}\n <span>{PROVIDER_NAMES[p.provider]}</span>\n </div>\n </SelectItem>\n ))}\n </SelectContent>\n </Select>\n\n <Select value={value.model} onValueChange={handleModelChange}>\n <SelectTrigger className=\"w-[160px]\">\n <SelectValue />\n </SelectTrigger>\n <SelectContent>\n {models.map((m) => (\n <SelectItem key={m.id} value={m.id}>\n {m.name}\n </SelectItem>\n ))}\n </SelectContent>\n </Select>\n </div>\n );\n }\n\n return (\n <div className={cn('flex flex-col gap-3', className)}>\n {/* Provider selection */}\n <div className=\"flex flex-col gap-1.5\">\n <Label htmlFor=\"provider-selection\" className=\"text-sm font-medium\">\n Provider\n </Label>\n <div className=\"flex flex-wrap gap-2\" id=\"provider-selection\">\n {providers.map((p) => (\n <Button\n key={p.provider}\n variant={value.provider === p.provider ? 'default' : 'outline'}\n size=\"sm\"\n onPress={() => p.available && handleProviderChange(p.provider)}\n disabled={!p.available}\n className={cn(!p.available && 'opacity-50')}\n >\n {PROVIDER_ICONS[p.provider]}\n <span>{PROVIDER_NAMES[p.provider]}</span>\n <Badge variant={MODE_BADGES[p.mode].variant} className=\"ml-1\">\n {MODE_BADGES[p.mode].label}\n </Badge>\n </Button>\n ))}\n </div>\n </div>\n\n {/* Model selection */}\n <div className=\"flex flex-col gap-1.5\">\n <Label htmlFor=\"model-picker\" className=\"text-sm font-medium\">\n Model\n </Label>\n <Select\n name=\"model-picker\"\n value={value.model}\n onValueChange={handleModelChange}\n >\n <SelectTrigger>\n <SelectValue placeholder=\"Select a model\" />\n </SelectTrigger>\n <SelectContent>\n {models.map((m) => (\n <SelectItem key={m.id} value={m.id}>\n <div className=\"flex items-center gap-2\">\n <span>{m.name}</span>\n <span className=\"text-muted-foreground text-xs\">\n {Math.round(m.contextWindow / 1000)}K\n </span>\n {m.capabilities.vision && (\n <Badge variant=\"outline\" className=\"text-xs\">\n Vision\n </Badge>\n )}\n {m.capabilities.reasoning && (\n <Badge variant=\"outline\" className=\"text-xs\">\n Reasoning\n </Badge>\n )}\n </div>\n </SelectItem>\n ))}\n </SelectContent>\n </Select>\n </div>\n\n {/* Model info */}\n {selectedModel && (\n <div className=\"text-muted-foreground flex flex-wrap gap-2 text-xs\">\n <span>\n Context: {Math.round(selectedModel.contextWindow / 1000)}K tokens\n </span>\n {selectedModel.capabilities.vision && <span>• Vision</span>}\n {selectedModel.capabilities.tools && <span>• Tools</span>}\n {selectedModel.capabilities.reasoning && <span>• Reasoning</span>}\n </div>\n )}\n </div>\n );\n}\n"],"mappings":";;;;;;;;;;;;;;AA8CA,MAAMA,iBAAwD;CAC5D,QAAQ,oBAAC,OAAI,WAAU,YAAY;CACnC,QAAQ,oBAAC,OAAI,WAAU,YAAY;CACnC,WAAW,oBAAC,YAAS,WAAU,YAAY;CAC3C,SAAS,oBAAC,SAAM,WAAU,YAAY;CACtC,QAAQ,oBAAC,YAAS,WAAU,YAAY;CACzC;AAED,MAAMC,iBAA+C;CACnD,QAAQ;CACR,QAAQ;CACR,WAAW;CACX,SAAS;CACT,QAAQ;CACT;AAED,MAAMC,cAGF;CACF,OAAO;EAAE,OAAO;EAAS,SAAS;EAAa;CAC/C,MAAM;EAAE,OAAO;EAAQ,SAAS;EAAW;CAC3C,SAAS;EAAE,OAAO;EAAW,SAAS;EAAW;CAClD;;;;AAKD,SAAgB,YAAY,EAC1B,OACA,UACA,oBACA,WACA,UAAU,SACS;CACnB,MAAM,YAAY,sBAAsB;EACtC;GAAE,UAAU;GAAmB,WAAW;GAAM,MAAM;GAAkB;EACxE;GAAE,UAAU;GAAmB,WAAW;GAAM,MAAM;GAAiB;EACvE;GAAE,UAAU;GAAsB,WAAW;GAAM,MAAM;GAAiB;EAC1E;GAAE,UAAU;GAAoB,WAAW;GAAM,MAAM;GAAiB;EACxE;GAAE,UAAU;GAAmB,WAAW;GAAM,MAAM;GAAiB;EACxE;CAED,MAAMC,SAAsB,qBAAqB,MAAM,SAAS;CAChE,MAAM,gBAAgB,OAAO,MAAM,MAAM,EAAE,OAAO,MAAM,MAAM;CAE9D,MAAM,uBAAuB,MAAM,aAChC,iBAAyB;EACxB,MAAM,WAAW;EACjB,MAAM,eAAe,UAAU,MAAM,MAAM,EAAE,aAAa,SAAS;AAInE,WAAS;GACP;GACA,OALqB,qBAAqB,SAAS,CACjB,IAAI,MAAM;GAK5C,MAAM,cAAc,QAAQ;GAC7B,CAAC;IAEJ,CAAC,UAAU,UAAU,CACtB;CAED,MAAM,oBAAoB,MAAM,aAC7B,YAAoB;AACnB,WAAS;GACP,GAAG;GACH,OAAO;GACR,CAAC;IAEJ,CAAC,UAAU,MAAM,CAClB;AAED,KAAI,QACF,QACE,qBAAC;EAAI,WAAW,GAAG,2BAA2B,UAAU;aACtD,qBAAC;GAAO,OAAO,MAAM;GAAU,eAAe;cAC5C,oBAAC;IAAc,WAAU;cACvB,oBAAC,gBAAc;KACD,EAChB,oBAAC,2BACE,UAAU,KAAK,MACd,oBAAC;IAEC,OAAO,EAAE;IACT,UAAU,CAAC,EAAE;cAEb,qBAAC;KAAI,WAAU;gBACZ,eAAe,EAAE,WAClB,oBAAC,oBAAM,eAAe,EAAE,YAAiB;MACrC;MAPD,EAAE,SAQI,CACb,GACY;IACT,EAET,qBAAC;GAAO,OAAO,MAAM;GAAO,eAAe;cACzC,oBAAC;IAAc,WAAU;cACvB,oBAAC,gBAAc;KACD,EAChB,oBAAC,2BACE,OAAO,KAAK,MACX,oBAAC;IAAsB,OAAO,EAAE;cAC7B,EAAE;MADY,EAAE,GAEN,CACb,GACY;IACT;GACL;AAIV,QACE,qBAAC;EAAI,WAAW,GAAG,uBAAuB,UAAU;;GAElD,qBAAC;IAAI,WAAU;eACb,oBAAC;KAAM,SAAQ;KAAqB,WAAU;eAAsB;MAE5D,EACR,oBAAC;KAAI,WAAU;KAAuB,IAAG;eACtC,UAAU,KAAK,MACd,qBAAC;MAEC,SAAS,MAAM,aAAa,EAAE,WAAW,YAAY;MACrD,MAAK;MACL,eAAe,EAAE,aAAa,qBAAqB,EAAE,SAAS;MAC9D,UAAU,CAAC,EAAE;MACb,WAAW,GAAG,CAAC,EAAE,aAAa,aAAa;;OAE1C,eAAe,EAAE;OAClB,oBAAC,oBAAM,eAAe,EAAE,YAAiB;OACzC,oBAAC;QAAM,SAAS,YAAY,EAAE,MAAM;QAAS,WAAU;kBACpD,YAAY,EAAE,MAAM;SACf;;QAXH,EAAE,SAYA,CACT;MACE;KACF;GAGN,qBAAC;IAAI,WAAU;eACb,oBAAC;KAAM,SAAQ;KAAe,WAAU;eAAsB;MAEtD,EACR,qBAAC;KACC,MAAK;KACL,OAAO,MAAM;KACb,eAAe;gBAEf,oBAAC,2BACC,oBAAC,eAAY,aAAY,mBAAmB,GAC9B,EAChB,oBAAC,2BACE,OAAO,KAAK,MACX,oBAAC;MAAsB,OAAO,EAAE;gBAC9B,qBAAC;OAAI,WAAU;;QACb,oBAAC,oBAAM,EAAE,OAAY;QACrB,qBAAC;SAAK,WAAU;oBACb,KAAK,MAAM,EAAE,gBAAgB,IAAK,EAAC;UAC/B;QACN,EAAE,aAAa,UACd,oBAAC;SAAM,SAAQ;SAAU,WAAU;mBAAU;UAErC;QAET,EAAE,aAAa,aACd,oBAAC;SAAM,SAAQ;SAAU,WAAU;mBAAU;UAErC;;QAEN;QAhBS,EAAE,GAiBN,CACb,GACY;MACT;KACL;GAGL,iBACC,qBAAC;IAAI,WAAU;;KACb,qBAAC;MAAK;MACM,KAAK,MAAM,cAAc,gBAAgB,IAAK;MAAC;SACpD;KACN,cAAc,aAAa,UAAU,oBAAC,oBAAK,aAAe;KAC1D,cAAc,aAAa,SAAS,oBAAC,oBAAK,YAAc;KACxD,cAAc,aAAa,aAAa,oBAAC,oBAAK,gBAAkB;;KAC7D;;GAEJ"}
@@ -41,6 +41,7 @@ function useChat(options = {}) {
41
41
  React.useEffect(() => {
42
42
  if (!conversationId || !chatServiceRef.current) return;
43
43
  const loadConversation = async () => {
44
+ if (!chatServiceRef.current) return;
44
45
  const conv = await chatServiceRef.current.getConversation(conversationId);
45
46
  if (conv) {
46
47
  setConversation(conv);
@@ -1 +1 @@
1
- {"version":3,"file":"useChat.js","names":["userMessage: ChatMessage","assistantMessage: ChatMessage","error"],"sources":["../../../src/presentation/hooks/useChat.tsx"],"sourcesContent":["'use client';\n\nimport * as React from 'react';\nimport type {\n ChatMessage,\n ChatConversation,\n ChatAttachment,\n} from '../../core/message-types';\nimport { ChatService } from '../../core/chat-service';\nimport {\n type ProviderName,\n type ProviderMode,\n createProvider,\n} from '@lssm/lib.ai-providers';\n\n/**\n * Options for useChat hook\n */\nexport interface UseChatOptions {\n /** Provider to use */\n provider?: ProviderName;\n /** Provider mode */\n mode?: ProviderMode;\n /** Model to use */\n model?: string;\n /** API key for BYOK mode */\n apiKey?: string;\n /** API proxy URL for managed mode */\n proxyUrl?: string;\n /** Initial conversation ID to resume */\n conversationId?: string;\n /** System prompt override */\n systemPrompt?: string;\n /** Enable streaming */\n streaming?: boolean;\n /** Called when a message is sent */\n onSend?: (message: ChatMessage) => void;\n /** Called when a response is received */\n onResponse?: (message: ChatMessage) => void;\n /** Called on error */\n onError?: (error: Error) => void;\n /** Called when usage is recorded */\n onUsage?: (usage: { inputTokens: number; outputTokens: number }) => void;\n}\n\n/**\n * Return type for useChat hook\n */\nexport interface UseChatReturn {\n /** Current messages */\n messages: ChatMessage[];\n /** Current conversation */\n conversation: ChatConversation | null;\n /** Whether currently loading/streaming */\n isLoading: boolean;\n /** Current error */\n error: Error | null;\n /** Send a message */\n sendMessage: (\n content: string,\n attachments?: ChatAttachment[]\n ) => Promise<void>;\n /** Clear conversation and start fresh */\n clearConversation: () => void;\n /** Set conversation ID to resume */\n setConversationId: (id: string | null) => void;\n /** Regenerate last response */\n regenerate: () => Promise<void>;\n /** Stop current generation */\n stop: () => void;\n}\n\n/**\n * Hook for managing AI chat state\n */\nexport function useChat(options: UseChatOptions = {}): UseChatReturn {\n const {\n provider = 'openai',\n mode = 'byok',\n model,\n apiKey,\n proxyUrl,\n conversationId: initialConversationId,\n systemPrompt,\n streaming = true,\n onSend,\n onResponse,\n onError,\n onUsage,\n } = options;\n\n const [messages, setMessages] = React.useState<ChatMessage[]>([]);\n const [conversation, setConversation] =\n React.useState<ChatConversation | null>(null);\n const [isLoading, setIsLoading] = React.useState(false);\n const [error, setError] = React.useState<Error | null>(null);\n const [conversationId, setConversationId] = React.useState<string | null>(\n initialConversationId ?? null\n );\n\n const abortControllerRef = React.useRef<AbortController | null>(null);\n const chatServiceRef = React.useRef<ChatService | null>(null);\n\n // Initialize chat service\n React.useEffect(() => {\n const chatProvider = createProvider({\n provider,\n model,\n apiKey,\n proxyUrl,\n });\n\n chatServiceRef.current = new ChatService({\n provider: chatProvider,\n systemPrompt,\n onUsage,\n });\n }, [provider, mode, model, apiKey, proxyUrl, systemPrompt, onUsage]);\n\n // Load existing conversation\n React.useEffect(() => {\n if (!conversationId || !chatServiceRef.current) return;\n\n const loadConversation = async () => {\n const conv =\n await chatServiceRef.current!.getConversation(conversationId);\n if (conv) {\n setConversation(conv);\n setMessages(conv.messages);\n }\n };\n\n loadConversation().catch(console.error);\n }, [conversationId]);\n\n const sendMessage = React.useCallback(\n async (content: string, attachments?: ChatAttachment[]) => {\n if (!chatServiceRef.current) {\n throw new Error('Chat service not initialized');\n }\n\n setIsLoading(true);\n setError(null);\n\n // Create abort controller\n abortControllerRef.current = new AbortController();\n\n try {\n // Add user message immediately\n const userMessage: ChatMessage = {\n id: `msg_${Date.now()}`,\n conversationId: conversationId ?? '',\n role: 'user',\n content,\n status: 'completed',\n createdAt: new Date(),\n updatedAt: new Date(),\n attachments,\n };\n setMessages((prev) => [...prev, userMessage]);\n onSend?.(userMessage);\n\n if (streaming) {\n // Streaming mode\n const result = await chatServiceRef.current.stream({\n conversationId: conversationId ?? undefined,\n content,\n attachments,\n });\n\n // Update conversation ID if new\n if (!conversationId) {\n setConversationId(result.conversationId);\n }\n\n // Add placeholder for assistant message\n const assistantMessage: ChatMessage = {\n id: result.messageId,\n conversationId: result.conversationId,\n role: 'assistant',\n content: '',\n status: 'streaming',\n createdAt: new Date(),\n updatedAt: new Date(),\n };\n setMessages((prev) => [...prev, assistantMessage]);\n\n // Process stream\n let fullContent = '';\n for await (const chunk of result.stream) {\n if (chunk.type === 'text' && chunk.content) {\n fullContent += chunk.content;\n setMessages((prev) =>\n prev.map((m) =>\n m.id === result.messageId ? { ...m, content: fullContent } : m\n )\n );\n } else if (chunk.type === 'done') {\n setMessages((prev) =>\n prev.map((m) =>\n m.id === result.messageId\n ? {\n ...m,\n status: 'completed',\n usage: chunk.usage,\n updatedAt: new Date(),\n }\n : m\n )\n );\n onResponse?.(\n messages.find((m) => m.id === result.messageId) ??\n assistantMessage\n );\n } else if (chunk.type === 'error') {\n setMessages((prev) =>\n prev.map((m) =>\n m.id === result.messageId\n ? {\n ...m,\n status: 'error',\n error: chunk.error,\n updatedAt: new Date(),\n }\n : m\n )\n );\n if (chunk.error) {\n const err = new Error(chunk.error.message);\n setError(err);\n onError?.(err);\n }\n }\n }\n } else {\n // Non-streaming mode\n const result = await chatServiceRef.current.send({\n conversationId: conversationId ?? undefined,\n content,\n attachments,\n });\n\n setConversation(result.conversation);\n setMessages(result.conversation.messages);\n\n if (!conversationId) {\n setConversationId(result.conversation.id);\n }\n\n onResponse?.(result.message);\n }\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n setError(error);\n onError?.(error);\n } finally {\n setIsLoading(false);\n abortControllerRef.current = null;\n }\n },\n [conversationId, streaming, onSend, onResponse, onError, messages]\n );\n\n const clearConversation = React.useCallback(() => {\n setMessages([]);\n setConversation(null);\n setConversationId(null);\n setError(null);\n }, []);\n\n const regenerate = React.useCallback(async () => {\n // Find the last user message\n const lastUserMessageIndex = messages.findLastIndex(\n (m) => m.role === 'user'\n );\n if (lastUserMessageIndex === -1) return;\n\n const lastUserMessage = messages[lastUserMessageIndex];\n if (!lastUserMessage) return;\n\n // Remove the last assistant message\n setMessages((prev) => prev.slice(0, lastUserMessageIndex + 1));\n\n // Resend\n await sendMessage(lastUserMessage.content, lastUserMessage.attachments);\n }, [messages, sendMessage]);\n\n const stop = React.useCallback(() => {\n abortControllerRef.current?.abort();\n setIsLoading(false);\n }, []);\n\n return {\n messages,\n conversation,\n isLoading,\n error,\n sendMessage,\n clearConversation,\n setConversationId,\n regenerate,\n stop,\n };\n}\n"],"mappings":";;;;;;;;;;;AA2EA,SAAgB,QAAQ,UAA0B,EAAE,EAAiB;CACnE,MAAM,EACJ,WAAW,UACX,OAAO,QACP,OACA,QACA,UACA,gBAAgB,uBAChB,cACA,YAAY,MACZ,QACA,YACA,SACA,YACE;CAEJ,MAAM,CAAC,UAAU,eAAe,MAAM,SAAwB,EAAE,CAAC;CACjE,MAAM,CAAC,cAAc,mBACnB,MAAM,SAAkC,KAAK;CAC/C,MAAM,CAAC,WAAW,gBAAgB,MAAM,SAAS,MAAM;CACvD,MAAM,CAAC,OAAO,YAAY,MAAM,SAAuB,KAAK;CAC5D,MAAM,CAAC,gBAAgB,qBAAqB,MAAM,SAChD,yBAAyB,KAC1B;CAED,MAAM,qBAAqB,MAAM,OAA+B,KAAK;CACrE,MAAM,iBAAiB,MAAM,OAA2B,KAAK;AAG7D,OAAM,gBAAgB;AAQpB,iBAAe,UAAU,IAAI,YAAY;GACvC,UARmB,eAAe;IAClC;IACA;IACA;IACA;IACD,CAAC;GAIA;GACA;GACD,CAAC;IACD;EAAC;EAAU;EAAM;EAAO;EAAQ;EAAU;EAAc;EAAQ,CAAC;AAGpE,OAAM,gBAAgB;AACpB,MAAI,CAAC,kBAAkB,CAAC,eAAe,QAAS;EAEhD,MAAM,mBAAmB,YAAY;GACnC,MAAM,OACJ,MAAM,eAAe,QAAS,gBAAgB,eAAe;AAC/D,OAAI,MAAM;AACR,oBAAgB,KAAK;AACrB,gBAAY,KAAK,SAAS;;;AAI9B,oBAAkB,CAAC,MAAM,QAAQ,MAAM;IACtC,CAAC,eAAe,CAAC;CAEpB,MAAM,cAAc,MAAM,YACxB,OAAO,SAAiB,gBAAmC;AACzD,MAAI,CAAC,eAAe,QAClB,OAAM,IAAI,MAAM,+BAA+B;AAGjD,eAAa,KAAK;AAClB,WAAS,KAAK;AAGd,qBAAmB,UAAU,IAAI,iBAAiB;AAElD,MAAI;GAEF,MAAMA,cAA2B;IAC/B,IAAI,OAAO,KAAK,KAAK;IACrB,gBAAgB,kBAAkB;IAClC,MAAM;IACN;IACA,QAAQ;IACR,2BAAW,IAAI,MAAM;IACrB,2BAAW,IAAI,MAAM;IACrB;IACD;AACD,gBAAa,SAAS,CAAC,GAAG,MAAM,YAAY,CAAC;AAC7C,YAAS,YAAY;AAErB,OAAI,WAAW;IAEb,MAAM,SAAS,MAAM,eAAe,QAAQ,OAAO;KACjD,gBAAgB,kBAAkB;KAClC;KACA;KACD,CAAC;AAGF,QAAI,CAAC,eACH,mBAAkB,OAAO,eAAe;IAI1C,MAAMC,mBAAgC;KACpC,IAAI,OAAO;KACX,gBAAgB,OAAO;KACvB,MAAM;KACN,SAAS;KACT,QAAQ;KACR,2BAAW,IAAI,MAAM;KACrB,2BAAW,IAAI,MAAM;KACtB;AACD,iBAAa,SAAS,CAAC,GAAG,MAAM,iBAAiB,CAAC;IAGlD,IAAI,cAAc;AAClB,eAAW,MAAM,SAAS,OAAO,OAC/B,KAAI,MAAM,SAAS,UAAU,MAAM,SAAS;AAC1C,oBAAe,MAAM;AACrB,kBAAa,SACX,KAAK,KAAK,MACR,EAAE,OAAO,OAAO,YAAY;MAAE,GAAG;MAAG,SAAS;MAAa,GAAG,EAC9D,CACF;eACQ,MAAM,SAAS,QAAQ;AAChC,kBAAa,SACX,KAAK,KAAK,MACR,EAAE,OAAO,OAAO,YACZ;MACE,GAAG;MACH,QAAQ;MACR,OAAO,MAAM;MACb,2BAAW,IAAI,MAAM;MACtB,GACD,EACL,CACF;AACD,kBACE,SAAS,MAAM,MAAM,EAAE,OAAO,OAAO,UAAU,IAC7C,iBACH;eACQ,MAAM,SAAS,SAAS;AACjC,kBAAa,SACX,KAAK,KAAK,MACR,EAAE,OAAO,OAAO,YACZ;MACE,GAAG;MACH,QAAQ;MACR,OAAO,MAAM;MACb,2BAAW,IAAI,MAAM;MACtB,GACD,EACL,CACF;AACD,SAAI,MAAM,OAAO;MACf,MAAM,MAAM,IAAI,MAAM,MAAM,MAAM,QAAQ;AAC1C,eAAS,IAAI;AACb,gBAAU,IAAI;;;UAIf;IAEL,MAAM,SAAS,MAAM,eAAe,QAAQ,KAAK;KAC/C,gBAAgB,kBAAkB;KAClC;KACA;KACD,CAAC;AAEF,oBAAgB,OAAO,aAAa;AACpC,gBAAY,OAAO,aAAa,SAAS;AAEzC,QAAI,CAAC,eACH,mBAAkB,OAAO,aAAa,GAAG;AAG3C,iBAAa,OAAO,QAAQ;;WAEvB,KAAK;GACZ,MAAMC,UAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,YAASA,QAAM;AACf,aAAUA,QAAM;YACR;AACR,gBAAa,MAAM;AACnB,sBAAmB,UAAU;;IAGjC;EAAC;EAAgB;EAAW;EAAQ;EAAY;EAAS;EAAS,CACnE;AA+BD,QAAO;EACL;EACA;EACA;EACA;EACA;EACA,mBAnCwB,MAAM,kBAAkB;AAChD,eAAY,EAAE,CAAC;AACf,mBAAgB,KAAK;AACrB,qBAAkB,KAAK;AACvB,YAAS,KAAK;KACb,EAAE,CAAC;EA+BJ;EACA,YA9BiB,MAAM,YAAY,YAAY;GAE/C,MAAM,uBAAuB,SAAS,eACnC,MAAM,EAAE,SAAS,OACnB;AACD,OAAI,yBAAyB,GAAI;GAEjC,MAAM,kBAAkB,SAAS;AACjC,OAAI,CAAC,gBAAiB;AAGtB,gBAAa,SAAS,KAAK,MAAM,GAAG,uBAAuB,EAAE,CAAC;AAG9D,SAAM,YAAY,gBAAgB,SAAS,gBAAgB,YAAY;KACtE,CAAC,UAAU,YAAY,CAAC;EAgBzB,MAdW,MAAM,kBAAkB;AACnC,sBAAmB,SAAS,OAAO;AACnC,gBAAa,MAAM;KAClB,EAAE,CAAC;EAYL"}
1
+ {"version":3,"file":"useChat.js","names":["userMessage: ChatMessage","assistantMessage: ChatMessage","error"],"sources":["../../../src/presentation/hooks/useChat.tsx"],"sourcesContent":["'use client';\n\nimport * as React from 'react';\nimport type {\n ChatAttachment,\n ChatConversation,\n ChatMessage,\n} from '../../core/message-types';\nimport { ChatService } from '../../core/chat-service';\nimport {\n createProvider,\n type ProviderMode,\n type ProviderName,\n} from '@lssm/lib.ai-providers';\n\n/**\n * Options for useChat hook\n */\nexport interface UseChatOptions {\n /** Provider to use */\n provider?: ProviderName;\n /** Provider mode */\n mode?: ProviderMode;\n /** Model to use */\n model?: string;\n /** API key for BYOK mode */\n apiKey?: string;\n /** API proxy URL for managed mode */\n proxyUrl?: string;\n /** Initial conversation ID to resume */\n conversationId?: string;\n /** System prompt override */\n systemPrompt?: string;\n /** Enable streaming */\n streaming?: boolean;\n /** Called when a message is sent */\n onSend?: (message: ChatMessage) => void;\n /** Called when a response is received */\n onResponse?: (message: ChatMessage) => void;\n /** Called on error */\n onError?: (error: Error) => void;\n /** Called when usage is recorded */\n onUsage?: (usage: { inputTokens: number; outputTokens: number }) => void;\n}\n\n/**\n * Return type for useChat hook\n */\nexport interface UseChatReturn {\n /** Current messages */\n messages: ChatMessage[];\n /** Current conversation */\n conversation: ChatConversation | null;\n /** Whether currently loading/streaming */\n isLoading: boolean;\n /** Current error */\n error: Error | null;\n /** Send a message */\n sendMessage: (\n content: string,\n attachments?: ChatAttachment[]\n ) => Promise<void>;\n /** Clear conversation and start fresh */\n clearConversation: () => void;\n /** Set conversation ID to resume */\n setConversationId: (id: string | null) => void;\n /** Regenerate last response */\n regenerate: () => Promise<void>;\n /** Stop current generation */\n stop: () => void;\n}\n\n/**\n * Hook for managing AI chat state\n */\nexport function useChat(options: UseChatOptions = {}): UseChatReturn {\n const {\n provider = 'openai',\n mode = 'byok',\n model,\n apiKey,\n proxyUrl,\n conversationId: initialConversationId,\n systemPrompt,\n streaming = true,\n onSend,\n onResponse,\n onError,\n onUsage,\n } = options;\n\n const [messages, setMessages] = React.useState<ChatMessage[]>([]);\n const [conversation, setConversation] =\n React.useState<ChatConversation | null>(null);\n const [isLoading, setIsLoading] = React.useState(false);\n const [error, setError] = React.useState<Error | null>(null);\n const [conversationId, setConversationId] = React.useState<string | null>(\n initialConversationId ?? null\n );\n\n const abortControllerRef = React.useRef<AbortController | null>(null);\n const chatServiceRef = React.useRef<ChatService | null>(null);\n\n // Initialize chat service\n React.useEffect(() => {\n const chatProvider = createProvider({\n provider,\n model,\n apiKey,\n proxyUrl,\n });\n\n chatServiceRef.current = new ChatService({\n provider: chatProvider,\n systemPrompt,\n onUsage,\n });\n }, [provider, mode, model, apiKey, proxyUrl, systemPrompt, onUsage]);\n\n // Load existing conversation\n React.useEffect(() => {\n if (!conversationId || !chatServiceRef.current) return;\n\n const loadConversation = async () => {\n if (!chatServiceRef.current) return;\n\n const conv = await chatServiceRef.current.getConversation(conversationId);\n if (conv) {\n setConversation(conv);\n setMessages(conv.messages);\n }\n };\n\n loadConversation().catch(console.error);\n }, [conversationId]);\n\n const sendMessage = React.useCallback(\n async (content: string, attachments?: ChatAttachment[]) => {\n if (!chatServiceRef.current) {\n throw new Error('Chat service not initialized');\n }\n\n setIsLoading(true);\n setError(null);\n\n // Create abort controller\n abortControllerRef.current = new AbortController();\n\n try {\n // Add user message immediately\n const userMessage: ChatMessage = {\n id: `msg_${Date.now()}`,\n conversationId: conversationId ?? '',\n role: 'user',\n content,\n status: 'completed',\n createdAt: new Date(),\n updatedAt: new Date(),\n attachments,\n };\n setMessages((prev) => [...prev, userMessage]);\n onSend?.(userMessage);\n\n if (streaming) {\n // Streaming mode\n const result = await chatServiceRef.current.stream({\n conversationId: conversationId ?? undefined,\n content,\n attachments,\n });\n\n // Update conversation ID if new\n if (!conversationId) {\n setConversationId(result.conversationId);\n }\n\n // Add placeholder for assistant message\n const assistantMessage: ChatMessage = {\n id: result.messageId,\n conversationId: result.conversationId,\n role: 'assistant',\n content: '',\n status: 'streaming',\n createdAt: new Date(),\n updatedAt: new Date(),\n };\n setMessages((prev) => [...prev, assistantMessage]);\n\n // Process stream\n let fullContent = '';\n for await (const chunk of result.stream) {\n if (chunk.type === 'text' && chunk.content) {\n fullContent += chunk.content;\n setMessages((prev) =>\n prev.map((m) =>\n m.id === result.messageId ? { ...m, content: fullContent } : m\n )\n );\n } else if (chunk.type === 'done') {\n setMessages((prev) =>\n prev.map((m) =>\n m.id === result.messageId\n ? {\n ...m,\n status: 'completed',\n usage: chunk.usage,\n updatedAt: new Date(),\n }\n : m\n )\n );\n onResponse?.(\n messages.find((m) => m.id === result.messageId) ??\n assistantMessage\n );\n } else if (chunk.type === 'error') {\n setMessages((prev) =>\n prev.map((m) =>\n m.id === result.messageId\n ? {\n ...m,\n status: 'error',\n error: chunk.error,\n updatedAt: new Date(),\n }\n : m\n )\n );\n if (chunk.error) {\n const err = new Error(chunk.error.message);\n setError(err);\n onError?.(err);\n }\n }\n }\n } else {\n // Non-streaming mode\n const result = await chatServiceRef.current.send({\n conversationId: conversationId ?? undefined,\n content,\n attachments,\n });\n\n setConversation(result.conversation);\n setMessages(result.conversation.messages);\n\n if (!conversationId) {\n setConversationId(result.conversation.id);\n }\n\n onResponse?.(result.message);\n }\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n setError(error);\n onError?.(error);\n } finally {\n setIsLoading(false);\n abortControllerRef.current = null;\n }\n },\n [conversationId, streaming, onSend, onResponse, onError, messages]\n );\n\n const clearConversation = React.useCallback(() => {\n setMessages([]);\n setConversation(null);\n setConversationId(null);\n setError(null);\n }, []);\n\n const regenerate = React.useCallback(async () => {\n // Find the last user message\n const lastUserMessageIndex = messages.findLastIndex(\n (m) => m.role === 'user'\n );\n if (lastUserMessageIndex === -1) return;\n\n const lastUserMessage = messages[lastUserMessageIndex];\n if (!lastUserMessage) return;\n\n // Remove the last assistant message\n setMessages((prev) => prev.slice(0, lastUserMessageIndex + 1));\n\n // Resend\n await sendMessage(lastUserMessage.content, lastUserMessage.attachments);\n }, [messages, sendMessage]);\n\n const stop = React.useCallback(() => {\n abortControllerRef.current?.abort();\n setIsLoading(false);\n }, []);\n\n return {\n messages,\n conversation,\n isLoading,\n error,\n sendMessage,\n clearConversation,\n setConversationId,\n regenerate,\n stop,\n };\n}\n"],"mappings":";;;;;;;;;;;AA2EA,SAAgB,QAAQ,UAA0B,EAAE,EAAiB;CACnE,MAAM,EACJ,WAAW,UACX,OAAO,QACP,OACA,QACA,UACA,gBAAgB,uBAChB,cACA,YAAY,MACZ,QACA,YACA,SACA,YACE;CAEJ,MAAM,CAAC,UAAU,eAAe,MAAM,SAAwB,EAAE,CAAC;CACjE,MAAM,CAAC,cAAc,mBACnB,MAAM,SAAkC,KAAK;CAC/C,MAAM,CAAC,WAAW,gBAAgB,MAAM,SAAS,MAAM;CACvD,MAAM,CAAC,OAAO,YAAY,MAAM,SAAuB,KAAK;CAC5D,MAAM,CAAC,gBAAgB,qBAAqB,MAAM,SAChD,yBAAyB,KAC1B;CAED,MAAM,qBAAqB,MAAM,OAA+B,KAAK;CACrE,MAAM,iBAAiB,MAAM,OAA2B,KAAK;AAG7D,OAAM,gBAAgB;AAQpB,iBAAe,UAAU,IAAI,YAAY;GACvC,UARmB,eAAe;IAClC;IACA;IACA;IACA;IACD,CAAC;GAIA;GACA;GACD,CAAC;IACD;EAAC;EAAU;EAAM;EAAO;EAAQ;EAAU;EAAc;EAAQ,CAAC;AAGpE,OAAM,gBAAgB;AACpB,MAAI,CAAC,kBAAkB,CAAC,eAAe,QAAS;EAEhD,MAAM,mBAAmB,YAAY;AACnC,OAAI,CAAC,eAAe,QAAS;GAE7B,MAAM,OAAO,MAAM,eAAe,QAAQ,gBAAgB,eAAe;AACzE,OAAI,MAAM;AACR,oBAAgB,KAAK;AACrB,gBAAY,KAAK,SAAS;;;AAI9B,oBAAkB,CAAC,MAAM,QAAQ,MAAM;IACtC,CAAC,eAAe,CAAC;CAEpB,MAAM,cAAc,MAAM,YACxB,OAAO,SAAiB,gBAAmC;AACzD,MAAI,CAAC,eAAe,QAClB,OAAM,IAAI,MAAM,+BAA+B;AAGjD,eAAa,KAAK;AAClB,WAAS,KAAK;AAGd,qBAAmB,UAAU,IAAI,iBAAiB;AAElD,MAAI;GAEF,MAAMA,cAA2B;IAC/B,IAAI,OAAO,KAAK,KAAK;IACrB,gBAAgB,kBAAkB;IAClC,MAAM;IACN;IACA,QAAQ;IACR,2BAAW,IAAI,MAAM;IACrB,2BAAW,IAAI,MAAM;IACrB;IACD;AACD,gBAAa,SAAS,CAAC,GAAG,MAAM,YAAY,CAAC;AAC7C,YAAS,YAAY;AAErB,OAAI,WAAW;IAEb,MAAM,SAAS,MAAM,eAAe,QAAQ,OAAO;KACjD,gBAAgB,kBAAkB;KAClC;KACA;KACD,CAAC;AAGF,QAAI,CAAC,eACH,mBAAkB,OAAO,eAAe;IAI1C,MAAMC,mBAAgC;KACpC,IAAI,OAAO;KACX,gBAAgB,OAAO;KACvB,MAAM;KACN,SAAS;KACT,QAAQ;KACR,2BAAW,IAAI,MAAM;KACrB,2BAAW,IAAI,MAAM;KACtB;AACD,iBAAa,SAAS,CAAC,GAAG,MAAM,iBAAiB,CAAC;IAGlD,IAAI,cAAc;AAClB,eAAW,MAAM,SAAS,OAAO,OAC/B,KAAI,MAAM,SAAS,UAAU,MAAM,SAAS;AAC1C,oBAAe,MAAM;AACrB,kBAAa,SACX,KAAK,KAAK,MACR,EAAE,OAAO,OAAO,YAAY;MAAE,GAAG;MAAG,SAAS;MAAa,GAAG,EAC9D,CACF;eACQ,MAAM,SAAS,QAAQ;AAChC,kBAAa,SACX,KAAK,KAAK,MACR,EAAE,OAAO,OAAO,YACZ;MACE,GAAG;MACH,QAAQ;MACR,OAAO,MAAM;MACb,2BAAW,IAAI,MAAM;MACtB,GACD,EACL,CACF;AACD,kBACE,SAAS,MAAM,MAAM,EAAE,OAAO,OAAO,UAAU,IAC7C,iBACH;eACQ,MAAM,SAAS,SAAS;AACjC,kBAAa,SACX,KAAK,KAAK,MACR,EAAE,OAAO,OAAO,YACZ;MACE,GAAG;MACH,QAAQ;MACR,OAAO,MAAM;MACb,2BAAW,IAAI,MAAM;MACtB,GACD,EACL,CACF;AACD,SAAI,MAAM,OAAO;MACf,MAAM,MAAM,IAAI,MAAM,MAAM,MAAM,QAAQ;AAC1C,eAAS,IAAI;AACb,gBAAU,IAAI;;;UAIf;IAEL,MAAM,SAAS,MAAM,eAAe,QAAQ,KAAK;KAC/C,gBAAgB,kBAAkB;KAClC;KACA;KACD,CAAC;AAEF,oBAAgB,OAAO,aAAa;AACpC,gBAAY,OAAO,aAAa,SAAS;AAEzC,QAAI,CAAC,eACH,mBAAkB,OAAO,aAAa,GAAG;AAG3C,iBAAa,OAAO,QAAQ;;WAEvB,KAAK;GACZ,MAAMC,UAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,YAASA,QAAM;AACf,aAAUA,QAAM;YACR;AACR,gBAAa,MAAM;AACnB,sBAAmB,UAAU;;IAGjC;EAAC;EAAgB;EAAW;EAAQ;EAAY;EAAS;EAAS,CACnE;AA+BD,QAAO;EACL;EACA;EACA;EACA;EACA;EACA,mBAnCwB,MAAM,kBAAkB;AAChD,eAAY,EAAE,CAAC;AACf,mBAAgB,KAAK;AACrB,qBAAkB,KAAK;AACvB,YAAS,KAAK;KACb,EAAE,CAAC;EA+BJ;EACA,YA9BiB,MAAM,YAAY,YAAY;GAE/C,MAAM,uBAAuB,SAAS,eACnC,MAAM,EAAE,SAAS,OACnB;AACD,OAAI,yBAAyB,GAAI;GAEjC,MAAM,kBAAkB,SAAS;AACjC,OAAI,CAAC,gBAAiB;AAGtB,gBAAa,SAAS,KAAK,MAAM,GAAG,uBAAuB,EAAE,CAAC;AAG9D,SAAM,YAAY,gBAAgB,SAAS,gBAAgB,YAAY;KACtE,CAAC,UAAU,YAAY,CAAC;EAgBzB,MAdW,MAAM,kBAAkB;AACnC,sBAAmB,SAAS,OAAO;AACnC,gBAAa,MAAM;KAClB,EAAE,CAAC;EAYL"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lssm/module.ai-chat",
3
- "version": "0.0.0-canary-20251219202229",
3
+ "version": "0.0.0-canary-20251220002821",
4
4
  "type": "module",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.js",
@@ -23,13 +23,13 @@
23
23
  "test": "bun test"
24
24
  },
25
25
  "dependencies": {
26
- "@lssm/lib.ai-agent": "0.0.0-canary-20251219202229",
27
- "@lssm/lib.ai-providers": "0.0.0-canary-20251219202229",
28
- "@lssm/lib.contracts": "0.0.0-canary-20251219202229",
29
- "@lssm/lib.metering": "0.0.0-canary-20251219202229",
30
- "@lssm/lib.cost-tracking": "0.0.0-canary-20251219202229",
31
- "@lssm/lib.design-system": "0.0.0-canary-20251219202229",
32
- "@lssm/lib.ui-kit-web": "0.0.0-canary-20251219202229",
26
+ "@lssm/lib.ai-agent": "0.0.0-canary-20251220002821",
27
+ "@lssm/lib.ai-providers": "0.0.0-canary-20251220002821",
28
+ "@lssm/lib.contracts": "0.0.0-canary-20251220002821",
29
+ "@lssm/lib.metering": "0.0.0-canary-20251220002821",
30
+ "@lssm/lib.cost-tracking": "0.0.0-canary-20251220002821",
31
+ "@lssm/lib.design-system": "0.0.0-canary-20251220002821",
32
+ "@lssm/lib.ui-kit-web": "0.0.0-canary-20251220002821",
33
33
  "@ai-sdk/react": "beta",
34
34
  "ai": "beta",
35
35
  "lucide-react": "^0.535.0",
@@ -37,8 +37,8 @@
37
37
  "zod": "^4.1.13"
38
38
  },
39
39
  "devDependencies": {
40
- "@lssm/tool.tsdown": "0.0.0-canary-20251219202229",
41
- "@lssm/tool.typescript": "0.0.0-canary-20251219202229",
40
+ "@lssm/tool.tsdown": "0.0.0-canary-20251220002821",
41
+ "@lssm/tool.typescript": "0.0.0-canary-20251220002821",
42
42
  "@types/react": "^19.0.14",
43
43
  "tsdown": "^0.18.1",
44
44
  "typescript": "^5.9.3"
@@ -67,7 +67,13 @@
67
67
  "./presentation/hooks": "./dist/presentation/hooks/index.js",
68
68
  "./providers": "./dist/providers/index.js",
69
69
  "./*": "./*"
70
- }
70
+ },
71
+ "registry": "https://registry.npmjs.org/"
71
72
  },
72
- "license": "MIT"
73
+ "license": "MIT",
74
+ "repository": {
75
+ "type": "git",
76
+ "url": "https://github.com/lssm-tech/contractspec.git",
77
+ "directory": "packages/modules/ai-chat"
78
+ }
73
79
  }