@lssm/module.ai-chat 0.0.0-canary-20251217060834 → 0.0.0-canary-20251217073102

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/ai-chat.feature.js +93 -1
  2. package/dist/context/context-builder.js +147 -2
  3. package/dist/context/file-operations.js +174 -1
  4. package/dist/context/index.js +5 -1
  5. package/dist/context/workspace-context.js +123 -2
  6. package/dist/core/chat-service.js +211 -2
  7. package/dist/core/conversation-store.js +108 -1
  8. package/dist/core/index.js +4 -1
  9. package/dist/index.d.ts +0 -2
  10. package/dist/index.js +22 -1
  11. package/dist/libs/ai-providers/dist/factory.js +225 -0
  12. package/dist/libs/ai-providers/dist/index.js +4 -0
  13. package/dist/libs/ai-providers/dist/legacy.js +2 -0
  14. package/dist/libs/ai-providers/dist/models.js +299 -0
  15. package/dist/libs/ai-providers/dist/validation.js +60 -0
  16. package/dist/libs/design-system/dist/_virtual/rolldown_runtime.js +5 -0
  17. package/dist/libs/design-system/dist/components/atoms/Button.js +33 -0
  18. package/dist/libs/design-system/dist/components/atoms/Textarea.js +35 -0
  19. package/dist/libs/design-system/dist/lib/keyboard.js +193 -0
  20. package/dist/libs/design-system/dist/ui-kit-web/dist/ui/button.js +55 -0
  21. package/dist/libs/design-system/dist/ui-kit-web/dist/ui/textarea.js +16 -0
  22. package/dist/libs/design-system/dist/ui-kit-web/dist/ui-kit-core/dist/utils.js +13 -0
  23. package/dist/libs/ui-kit-web/dist/ui/avatar.js +25 -0
  24. package/dist/libs/ui-kit-web/dist/ui/badge.js +26 -0
  25. package/dist/libs/ui-kit-web/dist/ui/scroll-area.js +39 -0
  26. package/dist/libs/ui-kit-web/dist/ui/select.js +79 -0
  27. package/dist/libs/ui-kit-web/dist/ui/skeleton.js +14 -0
  28. package/dist/libs/ui-kit-web/dist/ui/tooltip.js +39 -0
  29. package/dist/libs/ui-kit-web/dist/ui/utils.js +10 -0
  30. package/dist/libs/ui-kit-web/dist/ui-kit-core/dist/utils.js +10 -0
  31. package/dist/presentation/components/ChatContainer.js +62 -1
  32. package/dist/presentation/components/ChatInput.d.ts +2 -2
  33. package/dist/presentation/components/ChatInput.js +149 -1
  34. package/dist/presentation/components/ChatMessage.d.ts +2 -2
  35. package/dist/presentation/components/ChatMessage.js +135 -1
  36. package/dist/presentation/components/CodePreview.d.ts +2 -2
  37. package/dist/presentation/components/CodePreview.js +126 -2
  38. package/dist/presentation/components/ContextIndicator.js +96 -1
  39. package/dist/presentation/components/ModelPicker.d.ts +2 -2
  40. package/dist/presentation/components/ModelPicker.js +197 -1
  41. package/dist/presentation/components/index.js +8 -1
  42. package/dist/presentation/hooks/index.js +4 -1
  43. package/dist/presentation/hooks/useChat.js +171 -1
  44. package/dist/presentation/hooks/useProviders.js +42 -1
  45. package/dist/presentation/index.d.ts +0 -1
  46. package/dist/presentation/index.js +12 -1
  47. package/dist/providers/chat-utilities.js +16 -1
  48. package/dist/providers/index.js +7 -1
  49. package/package.json +10 -10
@@ -1,4 +1,14 @@
1
- import{InMemoryConversationStore as e}from"./conversation-store.js";import{generateText as t,streamText as n}from"ai";var r=class{provider;context;store;systemPrompt;maxHistoryMessages;onUsage;constructor(t){this.provider=t.provider,this.context=t.context,this.store=t.store??new e,this.systemPrompt=t.systemPrompt??`You are ContractSpec AI, an expert coding assistant specialized in ContractSpec development.
1
+ import { InMemoryConversationStore } from "./conversation-store.js";
2
+ import { generateText, streamText } from "ai";
3
+
4
+ //#region src/core/chat-service.ts
5
+ /**
6
+ * Main chat orchestration service
7
+ */
8
+ /**
9
+ * Default system prompt for ContractSpec vibe coding
10
+ */
11
+ const DEFAULT_SYSTEM_PROMPT = `You are ContractSpec AI, an expert coding assistant specialized in ContractSpec development.
2
12
 
3
13
  Your capabilities:
4
14
  - Help users create, modify, and understand ContractSpec specifications
@@ -11,4 +21,203 @@ Guidelines:
11
21
  - Provide code examples when helpful
12
22
  - Reference relevant ContractSpec concepts and patterns
13
23
  - Ask clarifying questions when the user's intent is unclear
14
- - When suggesting code changes, explain the rationale`,this.maxHistoryMessages=t.maxHistoryMessages??20,this.onUsage=t.onUsage}async send(e){let n;if(e.conversationId){let t=await this.store.get(e.conversationId);if(!t)throw Error(`Conversation ${e.conversationId} not found`);n=t}else n=await this.store.create({status:`active`,provider:this.provider.name,model:this.provider.model,messages:[],workspacePath:this.context?.workspacePath});await this.store.appendMessage(n.id,{role:`user`,content:e.content,status:`completed`,attachments:e.attachments});let r=this.buildPrompt(n,e),i=this.provider.getModel();try{let e=await t({model:i,prompt:r,system:this.systemPrompt}),a=await this.store.appendMessage(n.id,{role:`assistant`,content:e.text,status:`completed`}),o=await this.store.get(n.id);if(!o)throw Error(`Conversation lost after update`);return{message:a,conversation:o}}catch(e){throw await this.store.appendMessage(n.id,{role:`assistant`,content:``,status:`error`,error:{code:`generation_failed`,message:e instanceof Error?e.message:String(e)}}),e}}async stream(e){let t;if(e.conversationId){let n=await this.store.get(e.conversationId);if(!n)throw Error(`Conversation ${e.conversationId} not found`);t=n}else t=await this.store.create({status:`active`,provider:this.provider.name,model:this.provider.model,messages:[],workspacePath:this.context?.workspacePath});await this.store.appendMessage(t.id,{role:`user`,content:e.content,status:`completed`,attachments:e.attachments});let r=await this.store.appendMessage(t.id,{role:`assistant`,content:``,status:`streaming`}),i=this.buildPrompt(t,e),a=this.provider.getModel(),o=this;async function*s(){let e=``;try{let s=n({model:a,prompt:i,system:o.systemPrompt});for await(let t of s.textStream)e+=t,yield{type:`text`,content:t};await o.store.updateMessage(t.id,r.id,{content:e,status:`completed`}),yield{type:`done`}}catch(n){await o.store.updateMessage(t.id,r.id,{content:e,status:`error`,error:{code:`stream_failed`,message:n instanceof Error?n.message:String(n)}}),yield{type:`error`,error:{code:`stream_failed`,message:n instanceof Error?n.message:String(n)}}}}return{conversationId:t.id,messageId:r.id,stream:s()}}async getConversation(e){return this.store.get(e)}async listConversations(e){return this.store.list({status:`active`,...e})}async deleteConversation(e){return this.store.delete(e)}buildPrompt(e,t){let n=``,r=Math.max(0,e.messages.length-this.maxHistoryMessages);for(let t=r;t<e.messages.length;t++){let r=e.messages[t];r&&(r.role===`user`||r.role===`assistant`)&&(n+=`${r.role===`user`?`User`:`Assistant`}: ${r.content}\n\n`)}let i=t.content;if(t.attachments?.length){let e=t.attachments.map(e=>e.type===`file`||e.type===`code`?`\n\n### ${e.name}\n\`\`\`\n${e.content}\n\`\`\``:`\n\n[Attachment: ${e.name}]`).join(``);i+=e}return n+=`User: ${i}\n\nAssistant:`,n}};function i(e){return new r(e)}export{r as ChatService,i as createChatService};
24
+ - When suggesting code changes, explain the rationale`;
25
+ /**
26
+ * Main chat service for AI-powered conversations
27
+ */
28
+ var ChatService = class {
29
+ provider;
30
+ context;
31
+ store;
32
+ systemPrompt;
33
+ maxHistoryMessages;
34
+ onUsage;
35
+ constructor(config) {
36
+ this.provider = config.provider;
37
+ this.context = config.context;
38
+ this.store = config.store ?? new InMemoryConversationStore();
39
+ this.systemPrompt = config.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;
40
+ this.maxHistoryMessages = config.maxHistoryMessages ?? 20;
41
+ this.onUsage = config.onUsage;
42
+ }
43
+ /**
44
+ * Send a message and get a complete response
45
+ */
46
+ async send(options) {
47
+ let conversation;
48
+ if (options.conversationId) {
49
+ const existing = await this.store.get(options.conversationId);
50
+ if (!existing) throw new Error(`Conversation ${options.conversationId} not found`);
51
+ conversation = existing;
52
+ } else conversation = await this.store.create({
53
+ status: "active",
54
+ provider: this.provider.name,
55
+ model: this.provider.model,
56
+ messages: [],
57
+ workspacePath: this.context?.workspacePath
58
+ });
59
+ await this.store.appendMessage(conversation.id, {
60
+ role: "user",
61
+ content: options.content,
62
+ status: "completed",
63
+ attachments: options.attachments
64
+ });
65
+ const prompt = this.buildPrompt(conversation, options);
66
+ const model = this.provider.getModel();
67
+ try {
68
+ const result = await generateText({
69
+ model,
70
+ prompt,
71
+ system: this.systemPrompt
72
+ });
73
+ const assistantMessage = await this.store.appendMessage(conversation.id, {
74
+ role: "assistant",
75
+ content: result.text,
76
+ status: "completed"
77
+ });
78
+ const updatedConversation = await this.store.get(conversation.id);
79
+ if (!updatedConversation) throw new Error("Conversation lost after update");
80
+ return {
81
+ message: assistantMessage,
82
+ conversation: updatedConversation
83
+ };
84
+ } catch (error) {
85
+ await this.store.appendMessage(conversation.id, {
86
+ role: "assistant",
87
+ content: "",
88
+ status: "error",
89
+ error: {
90
+ code: "generation_failed",
91
+ message: error instanceof Error ? error.message : String(error)
92
+ }
93
+ });
94
+ throw error;
95
+ }
96
+ }
97
+ /**
98
+ * Send a message and get a streaming response
99
+ */
100
+ async stream(options) {
101
+ let conversation;
102
+ if (options.conversationId) {
103
+ const existing = await this.store.get(options.conversationId);
104
+ if (!existing) throw new Error(`Conversation ${options.conversationId} not found`);
105
+ conversation = existing;
106
+ } else conversation = await this.store.create({
107
+ status: "active",
108
+ provider: this.provider.name,
109
+ model: this.provider.model,
110
+ messages: [],
111
+ workspacePath: this.context?.workspacePath
112
+ });
113
+ await this.store.appendMessage(conversation.id, {
114
+ role: "user",
115
+ content: options.content,
116
+ status: "completed",
117
+ attachments: options.attachments
118
+ });
119
+ const assistantMessage = await this.store.appendMessage(conversation.id, {
120
+ role: "assistant",
121
+ content: "",
122
+ status: "streaming"
123
+ });
124
+ const prompt = this.buildPrompt(conversation, options);
125
+ const model = this.provider.getModel();
126
+ const self = this;
127
+ async function* streamGenerator() {
128
+ let fullContent = "";
129
+ try {
130
+ const result = streamText({
131
+ model,
132
+ prompt,
133
+ system: self.systemPrompt
134
+ });
135
+ for await (const chunk of result.textStream) {
136
+ fullContent += chunk;
137
+ yield {
138
+ type: "text",
139
+ content: chunk
140
+ };
141
+ }
142
+ await self.store.updateMessage(conversation.id, assistantMessage.id, {
143
+ content: fullContent,
144
+ status: "completed"
145
+ });
146
+ yield { type: "done" };
147
+ } catch (error) {
148
+ await self.store.updateMessage(conversation.id, assistantMessage.id, {
149
+ content: fullContent,
150
+ status: "error",
151
+ error: {
152
+ code: "stream_failed",
153
+ message: error instanceof Error ? error.message : String(error)
154
+ }
155
+ });
156
+ yield {
157
+ type: "error",
158
+ error: {
159
+ code: "stream_failed",
160
+ message: error instanceof Error ? error.message : String(error)
161
+ }
162
+ };
163
+ }
164
+ }
165
+ return {
166
+ conversationId: conversation.id,
167
+ messageId: assistantMessage.id,
168
+ stream: streamGenerator()
169
+ };
170
+ }
171
+ /**
172
+ * Get a conversation by ID
173
+ */
174
+ async getConversation(conversationId) {
175
+ return this.store.get(conversationId);
176
+ }
177
+ /**
178
+ * List conversations
179
+ */
180
+ async listConversations(options) {
181
+ return this.store.list({
182
+ status: "active",
183
+ ...options
184
+ });
185
+ }
186
+ /**
187
+ * Delete a conversation
188
+ */
189
+ async deleteConversation(conversationId) {
190
+ return this.store.delete(conversationId);
191
+ }
192
+ /**
193
+ * Build prompt string for LLM
194
+ */
195
+ buildPrompt(conversation, options) {
196
+ let prompt = "";
197
+ const historyStart = Math.max(0, conversation.messages.length - this.maxHistoryMessages);
198
+ for (let i = historyStart; i < conversation.messages.length; i++) {
199
+ const msg = conversation.messages[i];
200
+ if (!msg) continue;
201
+ if (msg.role === "user" || msg.role === "assistant") prompt += `${msg.role === "user" ? "User" : "Assistant"}: ${msg.content}\n\n`;
202
+ }
203
+ let content = options.content;
204
+ if (options.attachments?.length) {
205
+ const attachmentInfo = options.attachments.map((a) => {
206
+ if (a.type === "file" || a.type === "code") return `\n\n### ${a.name}\n\`\`\`\n${a.content}\n\`\`\``;
207
+ return `\n\n[Attachment: ${a.name}]`;
208
+ }).join("");
209
+ content += attachmentInfo;
210
+ }
211
+ prompt += `User: ${content}\n\nAssistant:`;
212
+ return prompt;
213
+ }
214
+ };
215
+ /**
216
+ * Create a chat service with the given configuration
217
+ */
218
+ function createChatService(config) {
219
+ return new ChatService(config);
220
+ }
221
+
222
+ //#endregion
223
+ export { ChatService, createChatService };
@@ -1 +1,108 @@
1
- function e(e){return`${e}_${Date.now()}_${Math.random().toString(36).slice(2,11)}`}var t=class{conversations=new Map;async get(e){return this.conversations.get(e)??null}async create(t){let n=new Date,r={...t,id:e(`conv`),createdAt:n,updatedAt:n};return this.conversations.set(r.id,r),r}async update(e,t){let n=this.conversations.get(e);if(!n)return null;let r={...n,...t,updatedAt:new Date};return this.conversations.set(e,r),r}async appendMessage(t,n){let r=this.conversations.get(t);if(!r)throw Error(`Conversation ${t} not found`);let i=new Date,a={...n,id:e(`msg`),conversationId:t,createdAt:i,updatedAt:i};return r.messages.push(a),r.updatedAt=i,a}async updateMessage(e,t,n){let r=this.conversations.get(e);if(!r)return null;let i=r.messages.findIndex(e=>e.id===t);if(i===-1)return null;let a=r.messages[i];if(!a)return null;let o={...a,...n,updatedAt:new Date};return r.messages[i]=o,r.updatedAt=new Date,o}async delete(e){return this.conversations.delete(e)}async list(e){let t=Array.from(this.conversations.values());e?.status&&(t=t.filter(t=>t.status===e.status)),t.sort((e,t)=>t.updatedAt.getTime()-e.updatedAt.getTime());let n=e?.offset??0,r=e?.limit??100;return t.slice(n,n+r)}async search(e,t=20){let n=e.toLowerCase(),r=[];for(let e of this.conversations.values()){if(e.title?.toLowerCase().includes(n)){r.push(e);continue}if(e.messages.some(e=>e.content.toLowerCase().includes(n))&&r.push(e),r.length>=t)break}return r}clear(){this.conversations.clear()}};function n(){return new t}export{t as InMemoryConversationStore,n as createInMemoryConversationStore};
1
+ //#region src/core/conversation-store.ts
2
+ /**
3
+ * Generate a unique ID
4
+ */
5
+ function generateId(prefix) {
6
+ return `${prefix}_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`;
7
+ }
8
+ /**
9
+ * In-memory conversation store for development and testing
10
+ */
11
+ var InMemoryConversationStore = class {
12
+ conversations = /* @__PURE__ */ new Map();
13
+ async get(conversationId) {
14
+ return this.conversations.get(conversationId) ?? null;
15
+ }
16
+ async create(conversation) {
17
+ const now = /* @__PURE__ */ new Date();
18
+ const fullConversation = {
19
+ ...conversation,
20
+ id: generateId("conv"),
21
+ createdAt: now,
22
+ updatedAt: now
23
+ };
24
+ this.conversations.set(fullConversation.id, fullConversation);
25
+ return fullConversation;
26
+ }
27
+ async update(conversationId, updates) {
28
+ const conversation = this.conversations.get(conversationId);
29
+ if (!conversation) return null;
30
+ const updated = {
31
+ ...conversation,
32
+ ...updates,
33
+ updatedAt: /* @__PURE__ */ new Date()
34
+ };
35
+ this.conversations.set(conversationId, updated);
36
+ return updated;
37
+ }
38
+ async appendMessage(conversationId, message) {
39
+ const conversation = this.conversations.get(conversationId);
40
+ if (!conversation) throw new Error(`Conversation ${conversationId} not found`);
41
+ const now = /* @__PURE__ */ new Date();
42
+ const fullMessage = {
43
+ ...message,
44
+ id: generateId("msg"),
45
+ conversationId,
46
+ createdAt: now,
47
+ updatedAt: now
48
+ };
49
+ conversation.messages.push(fullMessage);
50
+ conversation.updatedAt = now;
51
+ return fullMessage;
52
+ }
53
+ async updateMessage(conversationId, messageId, updates) {
54
+ const conversation = this.conversations.get(conversationId);
55
+ if (!conversation) return null;
56
+ const messageIndex = conversation.messages.findIndex((m) => m.id === messageId);
57
+ if (messageIndex === -1) return null;
58
+ const message = conversation.messages[messageIndex];
59
+ if (!message) return null;
60
+ const updated = {
61
+ ...message,
62
+ ...updates,
63
+ updatedAt: /* @__PURE__ */ new Date()
64
+ };
65
+ conversation.messages[messageIndex] = updated;
66
+ conversation.updatedAt = /* @__PURE__ */ new Date();
67
+ return updated;
68
+ }
69
+ async delete(conversationId) {
70
+ return this.conversations.delete(conversationId);
71
+ }
72
+ async list(options) {
73
+ let results = Array.from(this.conversations.values());
74
+ if (options?.status) results = results.filter((c) => c.status === options.status);
75
+ results.sort((a, b) => b.updatedAt.getTime() - a.updatedAt.getTime());
76
+ const offset = options?.offset ?? 0;
77
+ const limit = options?.limit ?? 100;
78
+ return results.slice(offset, offset + limit);
79
+ }
80
+ async search(query, limit = 20) {
81
+ const lowerQuery = query.toLowerCase();
82
+ const results = [];
83
+ for (const conversation of this.conversations.values()) {
84
+ if (conversation.title?.toLowerCase().includes(lowerQuery)) {
85
+ results.push(conversation);
86
+ continue;
87
+ }
88
+ if (conversation.messages.some((m) => m.content.toLowerCase().includes(lowerQuery))) results.push(conversation);
89
+ if (results.length >= limit) break;
90
+ }
91
+ return results;
92
+ }
93
+ /**
94
+ * Clear all conversations (for testing)
95
+ */
96
+ clear() {
97
+ this.conversations.clear();
98
+ }
99
+ };
100
+ /**
101
+ * Create an in-memory conversation store
102
+ */
103
+ function createInMemoryConversationStore() {
104
+ return new InMemoryConversationStore();
105
+ }
106
+
107
+ //#endregion
108
+ export { InMemoryConversationStore, createInMemoryConversationStore };
@@ -1 +1,4 @@
1
- import{InMemoryConversationStore as e,createInMemoryConversationStore as t}from"./conversation-store.js";import{ChatService as n,createChatService as r}from"./chat-service.js";export{n as ChatService,e as InMemoryConversationStore,r as createChatService,t as createInMemoryConversationStore};
1
+ import { InMemoryConversationStore, createInMemoryConversationStore } from "./conversation-store.js";
2
+ import { ChatService, createChatService } from "./chat-service.js";
3
+
4
+ export { ChatService, InMemoryConversationStore, createChatService, createInMemoryConversationStore };
package/dist/index.d.ts CHANGED
@@ -4,7 +4,6 @@ import { FileOperation, FileOperationResult, FileOperations, FileReadResult, Fil
4
4
  import { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult } from "./core/message-types.js";
5
5
  import { ConversationStore, InMemoryConversationStore, createInMemoryConversationStore } from "./core/conversation-store.js";
6
6
  import { ChatService, ChatServiceConfig, createChatService } from "./core/chat-service.js";
7
- import "./core/index.js";
8
7
  import { AiChatFeature } from "./ai-chat.feature.js";
9
8
  import { isStudioAvailable, supportsLocalMode } from "./providers/chat-utilities.js";
10
9
  import { ChatModelInfo, ChatProvider, ChatProviderConfig, ChatProviderMode, ChatProviderName, DEFAULT_MODELS, MODELS, ModelCapabilities, ProviderAvailability, createProvider, createProviderFromEnv, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "./providers/index.js";
@@ -14,5 +13,4 @@ import { ChatInput } from "./presentation/components/ChatInput.js";
14
13
  import "./presentation/components/index.js";
15
14
  import { useChat } from "./presentation/hooks/useChat.js";
16
15
  import { useProviders } from "./presentation/hooks/useProviders.js";
17
- import "./presentation/hooks/index.js";
18
16
  export { AiChatFeature, BuiltContext, ChatAttachment, ChatCodeBlock, ChatContainer, ChatConversation, ChatInput, ChatMessage, ChatMessage$1 as ChatMessageComponent, ChatModelInfo, ChatProvider, ChatProviderConfig, ChatProviderMode, ChatProviderName, ChatRole, ChatService, ChatServiceConfig, ChatSource, ChatStreamChunk, ChatToolCall, ContextBuilder, ContextBuilderOptions, ContextEntry, ConversationStatus, ConversationStore, DEFAULT_MODELS, FileInfo, FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, InMemoryConversationStore, MODELS, MessageStatus, ModelCapabilities, ProviderAvailability, SendMessageOptions, SendMessageResult, SpecInfo, StreamMessageResult, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createChatService, createContextBuilder, createInMemoryConversationStore, createNodeFileOperations, createProvider, createProviderFromEnv, createWorkspaceContext, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, isStudioAvailable, listOllamaModels, supportsLocalMode, useChat, useProviders, validateProvider };
package/dist/index.js CHANGED
@@ -1 +1,22 @@
1
- import{AiChatFeature as e}from"./ai-chat.feature.js";import{InMemoryConversationStore as t,createInMemoryConversationStore as n}from"./core/conversation-store.js";import{ChatService as r,createChatService as i}from"./core/chat-service.js";import"./core/index.js";import{isStudioAvailable as a,supportsLocalMode as o}from"./providers/chat-utilities.js";import{DEFAULT_MODELS as s,MODELS as c,createProvider as l,createProviderFromEnv as u,getAvailableProviders as d,getDefaultModel as f,getEnvVarName as p,getModelInfo as m,getModelsForProvider as h,getRecommendedModels as g,hasCredentials as _,isOllamaRunning as v,listOllamaModels as y,validateProvider as b}from"./providers/index.js";import{WorkspaceContext as x,createWorkspaceContext as S}from"./context/workspace-context.js";import{ContextBuilder as C,createContextBuilder as w}from"./context/context-builder.js";import{FileOperations as T,createNodeFileOperations as E}from"./context/file-operations.js";import"./context/index.js";import{ChatContainer as D}from"./presentation/components/ChatContainer.js";import{ChatMessage as O}from"./presentation/components/ChatMessage.js";import{ChatInput as k}from"./presentation/components/ChatInput.js";import"./presentation/components/index.js";import{useChat as A}from"./presentation/hooks/useChat.js";import{useProviders as j}from"./presentation/hooks/useProviders.js";import"./presentation/hooks/index.js";export{e as AiChatFeature,D as ChatContainer,k as ChatInput,O as ChatMessageComponent,r as ChatService,C as ContextBuilder,s as DEFAULT_MODELS,T as FileOperations,t as InMemoryConversationStore,c as MODELS,x as WorkspaceContext,i as createChatService,w as createContextBuilder,n as createInMemoryConversationStore,E as createNodeFileOperations,l as createProvider,u as createProviderFromEnv,S as createWorkspaceContext,d as getAvailableProviders,f as getDefaultModel,p as getEnvVarName,m as getModelInfo,h as getModelsForProvider,g as getRecommendedModels,_ as hasCredentials,v as isOllamaRunning,a as isStudioAvailable,y as listOllamaModels,o as supportsLocalMode,A as useChat,j as useProviders,b as validateProvider};
1
+ import { AiChatFeature } from "./ai-chat.feature.js";
2
+ import { InMemoryConversationStore, createInMemoryConversationStore } from "./core/conversation-store.js";
3
+ import { ChatService, createChatService } from "./core/chat-service.js";
4
+ import "./core/index.js";
5
+ import { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels } from "./libs/ai-providers/dist/models.js";
6
+ import { createProvider, createProviderFromEnv, getAvailableProviders } from "./libs/ai-providers/dist/factory.js";
7
+ import { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "./libs/ai-providers/dist/validation.js";
8
+ import { isStudioAvailable, supportsLocalMode } from "./providers/chat-utilities.js";
9
+ import "./providers/index.js";
10
+ import { WorkspaceContext, createWorkspaceContext } from "./context/workspace-context.js";
11
+ import { ContextBuilder, createContextBuilder } from "./context/context-builder.js";
12
+ import { FileOperations, createNodeFileOperations } from "./context/file-operations.js";
13
+ import "./context/index.js";
14
+ import { ChatContainer } from "./presentation/components/ChatContainer.js";
15
+ import { ChatMessage } from "./presentation/components/ChatMessage.js";
16
+ import { ChatInput } from "./presentation/components/ChatInput.js";
17
+ import "./presentation/components/index.js";
18
+ import { useChat } from "./presentation/hooks/useChat.js";
19
+ import { useProviders } from "./presentation/hooks/useProviders.js";
20
+ import "./presentation/hooks/index.js";
21
+
22
+ export { AiChatFeature, ChatContainer, ChatInput, ChatMessage as ChatMessageComponent, ChatService, ContextBuilder, DEFAULT_MODELS, FileOperations, InMemoryConversationStore, MODELS, WorkspaceContext, createChatService, createContextBuilder, createInMemoryConversationStore, createNodeFileOperations, createProvider, createProviderFromEnv, createWorkspaceContext, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, isStudioAvailable, listOllamaModels, supportsLocalMode, useChat, useProviders, validateProvider };
@@ -0,0 +1,225 @@
1
+ import { DEFAULT_MODELS, getModelsForProvider } from "./models.js";
2
+ import { anthropic } from "@ai-sdk/anthropic";
3
+ import { google } from "@ai-sdk/google";
4
+ import { mistral } from "@ai-sdk/mistral";
5
+ import { openai } from "@ai-sdk/openai";
6
+ import { ollama } from "ollama-ai-provider";
7
+
8
+ //#region ../../libs/ai-providers/dist/factory.js
9
+ /**
10
+ * Base provider implementation
11
+ */
12
+ var BaseProvider = class {
13
+ name;
14
+ model;
15
+ mode;
16
+ config;
17
+ cachedModel = null;
18
+ constructor(config) {
19
+ this.name = config.provider;
20
+ this.model = config.model ?? DEFAULT_MODELS[config.provider];
21
+ this.mode = this.determineMode(config);
22
+ this.config = config;
23
+ }
24
+ determineMode(config) {
25
+ if (config.provider === "ollama") return "local";
26
+ if (config.apiKey) return "byok";
27
+ return "managed";
28
+ }
29
+ getModel() {
30
+ if (!this.cachedModel) this.cachedModel = this.createModel();
31
+ return this.cachedModel;
32
+ }
33
+ createModel() {
34
+ const { apiKey, baseUrl, proxyUrl, organizationId } = this.config;
35
+ switch (this.name) {
36
+ case "ollama": {
37
+ const originalBaseUrl = process.env.OLLAMA_BASE_URL;
38
+ if (baseUrl && baseUrl !== "http://localhost:11434") process.env.OLLAMA_BASE_URL = baseUrl;
39
+ const ollamaModel = ollama(this.model);
40
+ if (originalBaseUrl !== void 0) process.env.OLLAMA_BASE_URL = originalBaseUrl;
41
+ else if (baseUrl && baseUrl !== "http://localhost:11434") delete process.env.OLLAMA_BASE_URL;
42
+ return ollamaModel;
43
+ }
44
+ case "openai":
45
+ if (this.mode === "managed") {
46
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
47
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
48
+ const model = openai(this.model);
49
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
50
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
51
+ return model;
52
+ }
53
+ return openai(this.model);
54
+ case "anthropic":
55
+ if (this.mode === "managed") {
56
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
57
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
58
+ const model = openai(this.model);
59
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
60
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
61
+ return model;
62
+ }
63
+ return anthropic(this.model);
64
+ case "mistral":
65
+ if (this.mode === "managed") {
66
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
67
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
68
+ const model = openai(this.model);
69
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
70
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
71
+ return model;
72
+ }
73
+ return mistral(this.model);
74
+ case "gemini":
75
+ if (this.mode === "managed") {
76
+ const originalBaseUrl = process.env.OPENAI_BASE_URL;
77
+ if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
78
+ const model = openai(this.model);
79
+ if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
80
+ else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
81
+ return model;
82
+ }
83
+ return google(this.model);
84
+ default: throw new Error(`Unknown provider: ${this.name}`);
85
+ }
86
+ }
87
+ async listModels() {
88
+ if (this.name === "ollama") return this.listOllamaModels();
89
+ return getModelsForProvider(this.name);
90
+ }
91
+ async listOllamaModels() {
92
+ try {
93
+ const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
94
+ const response = await fetch(`${baseUrl}/api/tags`);
95
+ if (!response.ok) return getModelsForProvider("ollama");
96
+ return ((await response.json()).models ?? []).map((m) => ({
97
+ id: m.name,
98
+ name: m.name,
99
+ provider: "ollama",
100
+ contextWindow: 8e3,
101
+ capabilities: {
102
+ vision: false,
103
+ tools: false,
104
+ reasoning: false,
105
+ streaming: true
106
+ }
107
+ }));
108
+ } catch {
109
+ return getModelsForProvider("ollama");
110
+ }
111
+ }
112
+ async validate() {
113
+ if (this.name === "ollama") return this.validateOllama();
114
+ if (this.mode === "byok" && !this.config.apiKey) return {
115
+ valid: false,
116
+ error: `API key required for ${this.name}`
117
+ };
118
+ if (this.mode === "managed" && !this.config.proxyUrl && !this.config.organizationId) return {
119
+ valid: false,
120
+ error: "Managed mode requires proxyUrl or organizationId"
121
+ };
122
+ return { valid: true };
123
+ }
124
+ async validateOllama() {
125
+ try {
126
+ const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
127
+ const response = await fetch(`${baseUrl}/api/tags`);
128
+ if (!response.ok) return {
129
+ valid: false,
130
+ error: `Ollama server returned ${response.status}`
131
+ };
132
+ const models = (await response.json()).models ?? [];
133
+ if (!models.some((m) => m.name === this.model)) return {
134
+ valid: false,
135
+ error: `Model "${this.model}" not found. Available: ${models.map((m) => m.name).join(", ")}`
136
+ };
137
+ return { valid: true };
138
+ } catch (error) {
139
+ return {
140
+ valid: false,
141
+ error: `Cannot connect to Ollama at ${this.config.baseUrl ?? "http://localhost:11434"}: ${error instanceof Error ? error.message : String(error)}`
142
+ };
143
+ }
144
+ }
145
+ };
146
+ /**
147
+ * Create a provider from configuration
148
+ */
149
+ function createProvider(config) {
150
+ return new BaseProvider(config);
151
+ }
152
+ /**
153
+ * Create a provider from environment variables
154
+ */
155
+ function createProviderFromEnv() {
156
+ const provider = process.env.CONTRACTSPEC_AI_PROVIDER ?? "openai";
157
+ const model = process.env.CONTRACTSPEC_AI_MODEL;
158
+ let apiKey;
159
+ switch (provider) {
160
+ case "openai":
161
+ apiKey = process.env.OPENAI_API_KEY;
162
+ break;
163
+ case "anthropic":
164
+ apiKey = process.env.ANTHROPIC_API_KEY;
165
+ break;
166
+ case "mistral":
167
+ apiKey = process.env.MISTRAL_API_KEY;
168
+ break;
169
+ case "gemini":
170
+ apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
171
+ break;
172
+ case "ollama": break;
173
+ }
174
+ return createProvider({
175
+ provider,
176
+ model,
177
+ apiKey,
178
+ baseUrl: process.env.OLLAMA_BASE_URL,
179
+ proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,
180
+ organizationId: process.env.CONTRACTSPEC_ORG_ID
181
+ });
182
+ }
183
+ /**
184
+ * Get all available providers with their status
185
+ */
186
+ function getAvailableProviders() {
187
+ const providers = [];
188
+ providers.push({
189
+ provider: "ollama",
190
+ available: true,
191
+ mode: "local"
192
+ });
193
+ const openaiKey = process.env.OPENAI_API_KEY;
194
+ providers.push({
195
+ provider: "openai",
196
+ available: Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
197
+ mode: openaiKey ? "byok" : "managed",
198
+ reason: !openaiKey ? "Set OPENAI_API_KEY for BYOK mode" : void 0
199
+ });
200
+ const anthropicKey = process.env.ANTHROPIC_API_KEY;
201
+ providers.push({
202
+ provider: "anthropic",
203
+ available: Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
204
+ mode: anthropicKey ? "byok" : "managed",
205
+ reason: !anthropicKey ? "Set ANTHROPIC_API_KEY for BYOK mode" : void 0
206
+ });
207
+ const mistralKey = process.env.MISTRAL_API_KEY;
208
+ providers.push({
209
+ provider: "mistral",
210
+ available: Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
211
+ mode: mistralKey ? "byok" : "managed",
212
+ reason: !mistralKey ? "Set MISTRAL_API_KEY for BYOK mode" : void 0
213
+ });
214
+ const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
215
+ providers.push({
216
+ provider: "gemini",
217
+ available: Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
218
+ mode: geminiKey ? "byok" : "managed",
219
+ reason: !geminiKey ? "Set GOOGLE_API_KEY for BYOK mode" : void 0
220
+ });
221
+ return providers;
222
+ }
223
+
224
+ //#endregion
225
+ export { createProvider, createProviderFromEnv, getAvailableProviders };
@@ -0,0 +1,4 @@
1
+ import { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels } from "./models.js";
2
+ import { createProvider, createProviderFromEnv, getAvailableProviders } from "./factory.js";
3
+ import { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "./validation.js";
4
+ import "./legacy.js";
@@ -0,0 +1,2 @@
1
+ import { getRecommendedModels } from "./models.js";
2
+ import "./factory.js";