@lssm/module.ai-chat 0.0.0-canary-20251217083314 → 1.41.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/ai-chat.feature.js +1 -93
  2. package/dist/context/context-builder.js +2 -147
  3. package/dist/context/file-operations.js +1 -174
  4. package/dist/context/index.js +1 -5
  5. package/dist/context/workspace-context.js +2 -123
  6. package/dist/core/chat-service.js +2 -211
  7. package/dist/core/conversation-store.js +1 -108
  8. package/dist/core/index.js +1 -4
  9. package/dist/index.js +1 -22
  10. package/dist/presentation/components/ChatContainer.js +1 -62
  11. package/dist/presentation/components/ChatInput.js +1 -149
  12. package/dist/presentation/components/ChatMessage.js +1 -135
  13. package/dist/presentation/components/CodePreview.js +2 -126
  14. package/dist/presentation/components/ContextIndicator.js +1 -96
  15. package/dist/presentation/components/ModelPicker.js +1 -197
  16. package/dist/presentation/components/index.js +1 -8
  17. package/dist/presentation/hooks/index.js +1 -4
  18. package/dist/presentation/hooks/useChat.js +1 -171
  19. package/dist/presentation/hooks/useProviders.js +1 -42
  20. package/dist/presentation/index.js +1 -12
  21. package/dist/providers/chat-utilities.js +1 -16
  22. package/dist/providers/index.js +1 -7
  23. package/package.json +17 -17
  24. package/dist/ai-chat.feature.d.ts +0 -11
  25. package/dist/context/context-builder.d.ts +0 -56
  26. package/dist/context/file-operations.d.ts +0 -99
  27. package/dist/context/index.d.ts +0 -4
  28. package/dist/context/workspace-context.d.ts +0 -116
  29. package/dist/core/chat-service.d.ts +0 -72
  30. package/dist/core/conversation-store.d.ts +0 -73
  31. package/dist/core/index.d.ts +0 -4
  32. package/dist/core/message-types.d.ts +0 -149
  33. package/dist/index.d.ts +0 -16
  34. package/dist/libs/ai-providers/dist/factory.js +0 -225
  35. package/dist/libs/ai-providers/dist/index.js +0 -4
  36. package/dist/libs/ai-providers/dist/legacy.js +0 -2
  37. package/dist/libs/ai-providers/dist/models.js +0 -299
  38. package/dist/libs/ai-providers/dist/validation.js +0 -60
  39. package/dist/libs/design-system/dist/_virtual/rolldown_runtime.js +0 -5
  40. package/dist/libs/design-system/dist/components/atoms/Button.js +0 -33
  41. package/dist/libs/design-system/dist/components/atoms/Textarea.js +0 -35
  42. package/dist/libs/design-system/dist/lib/keyboard.js +0 -193
  43. package/dist/libs/design-system/dist/ui-kit-web/dist/ui/button.js +0 -55
  44. package/dist/libs/design-system/dist/ui-kit-web/dist/ui/textarea.js +0 -16
  45. package/dist/libs/design-system/dist/ui-kit-web/dist/ui-kit-core/dist/utils.js +0 -13
  46. package/dist/libs/ui-kit-web/dist/ui/avatar.js +0 -25
  47. package/dist/libs/ui-kit-web/dist/ui/badge.js +0 -26
  48. package/dist/libs/ui-kit-web/dist/ui/scroll-area.js +0 -39
  49. package/dist/libs/ui-kit-web/dist/ui/select.js +0 -79
  50. package/dist/libs/ui-kit-web/dist/ui/skeleton.js +0 -14
  51. package/dist/libs/ui-kit-web/dist/ui/tooltip.js +0 -39
  52. package/dist/libs/ui-kit-web/dist/ui/utils.js +0 -10
  53. package/dist/libs/ui-kit-web/dist/ui-kit-core/dist/utils.js +0 -10
  54. package/dist/presentation/components/ChatContainer.d.ts +0 -20
  55. package/dist/presentation/components/ChatInput.d.ts +0 -34
  56. package/dist/presentation/components/ChatMessage.d.ts +0 -23
  57. package/dist/presentation/components/CodePreview.d.ts +0 -39
  58. package/dist/presentation/components/ContextIndicator.d.ts +0 -25
  59. package/dist/presentation/components/ModelPicker.d.ts +0 -38
  60. package/dist/presentation/components/index.d.ts +0 -7
  61. package/dist/presentation/hooks/index.d.ts +0 -3
  62. package/dist/presentation/hooks/useChat.d.ts +0 -66
  63. package/dist/presentation/hooks/useProviders.d.ts +0 -37
  64. package/dist/presentation/index.d.ts +0 -10
  65. package/dist/providers/chat-utilities.d.ts +0 -14
  66. package/dist/providers/index.d.ts +0 -3
@@ -1,72 +0,0 @@
1
- import { WorkspaceContext } from "../context/workspace-context.js";
2
- import { ChatConversation, SendMessageOptions, SendMessageResult, StreamMessageResult } from "./message-types.js";
3
- import { ConversationStore } from "./conversation-store.js";
4
- import { Provider } from "@lssm/lib.ai-providers";
5
-
6
- //#region src/core/chat-service.d.ts
7
-
8
- /**
9
- * Configuration for ChatService
10
- */
11
- interface ChatServiceConfig {
12
- /** LLM provider to use */
13
- provider: Provider;
14
- /** Optional workspace context for code-aware chat */
15
- context?: WorkspaceContext;
16
- /** Optional conversation store (defaults to in-memory) */
17
- store?: ConversationStore;
18
- /** Default system prompt */
19
- systemPrompt?: string;
20
- /** Maximum conversation history to include */
21
- maxHistoryMessages?: number;
22
- /** Callback for usage tracking */
23
- onUsage?: (usage: {
24
- inputTokens: number;
25
- outputTokens: number;
26
- }) => void;
27
- }
28
- /**
29
- * Main chat service for AI-powered conversations
30
- */
31
- declare class ChatService {
32
- private readonly provider;
33
- private readonly context?;
34
- private readonly store;
35
- private readonly systemPrompt;
36
- private readonly maxHistoryMessages;
37
- private readonly onUsage?;
38
- constructor(config: ChatServiceConfig);
39
- /**
40
- * Send a message and get a complete response
41
- */
42
- send(options: SendMessageOptions): Promise<SendMessageResult>;
43
- /**
44
- * Send a message and get a streaming response
45
- */
46
- stream(options: SendMessageOptions): Promise<StreamMessageResult>;
47
- /**
48
- * Get a conversation by ID
49
- */
50
- getConversation(conversationId: string): Promise<ChatConversation | null>;
51
- /**
52
- * List conversations
53
- */
54
- listConversations(options?: {
55
- limit?: number;
56
- offset?: number;
57
- }): Promise<ChatConversation[]>;
58
- /**
59
- * Delete a conversation
60
- */
61
- deleteConversation(conversationId: string): Promise<boolean>;
62
- /**
63
- * Build prompt string for LLM
64
- */
65
- private buildPrompt;
66
- }
67
- /**
68
- * Create a chat service with the given configuration
69
- */
70
- declare function createChatService(config: ChatServiceConfig): ChatService;
71
- //#endregion
72
- export { ChatService, ChatServiceConfig, createChatService };
@@ -1,73 +0,0 @@
1
- import { ChatConversation, ChatMessage, ConversationStatus } from "./message-types.js";
2
-
3
- //#region src/core/conversation-store.d.ts
4
-
5
- /**
6
- * Interface for conversation persistence
7
- */
8
- interface ConversationStore {
9
- /**
10
- * Get a conversation by ID
11
- */
12
- get(conversationId: string): Promise<ChatConversation | null>;
13
- /**
14
- * Create a new conversation
15
- */
16
- create(conversation: Omit<ChatConversation, 'id' | 'createdAt' | 'updatedAt'>): Promise<ChatConversation>;
17
- /**
18
- * Update conversation properties
19
- */
20
- update(conversationId: string, updates: Partial<Pick<ChatConversation, 'title' | 'status' | 'summary' | 'metadata'>>): Promise<ChatConversation | null>;
21
- /**
22
- * Append a message to a conversation
23
- */
24
- appendMessage(conversationId: string, message: Omit<ChatMessage, 'id' | 'conversationId' | 'createdAt' | 'updatedAt'>): Promise<ChatMessage>;
25
- /**
26
- * Update a message in a conversation
27
- */
28
- updateMessage(conversationId: string, messageId: string, updates: Partial<ChatMessage>): Promise<ChatMessage | null>;
29
- /**
30
- * Delete a conversation
31
- */
32
- delete(conversationId: string): Promise<boolean>;
33
- /**
34
- * List conversations with optional filters
35
- */
36
- list(options?: {
37
- status?: ConversationStatus;
38
- limit?: number;
39
- offset?: number;
40
- }): Promise<ChatConversation[]>;
41
- /**
42
- * Search conversations by content
43
- */
44
- search(query: string, limit?: number): Promise<ChatConversation[]>;
45
- }
46
- /**
47
- * In-memory conversation store for development and testing
48
- */
49
- declare class InMemoryConversationStore implements ConversationStore {
50
- private readonly conversations;
51
- get(conversationId: string): Promise<ChatConversation | null>;
52
- create(conversation: Omit<ChatConversation, 'id' | 'createdAt' | 'updatedAt'>): Promise<ChatConversation>;
53
- update(conversationId: string, updates: Partial<Pick<ChatConversation, 'title' | 'status' | 'summary' | 'metadata'>>): Promise<ChatConversation | null>;
54
- appendMessage(conversationId: string, message: Omit<ChatMessage, 'id' | 'conversationId' | 'createdAt' | 'updatedAt'>): Promise<ChatMessage>;
55
- updateMessage(conversationId: string, messageId: string, updates: Partial<ChatMessage>): Promise<ChatMessage | null>;
56
- delete(conversationId: string): Promise<boolean>;
57
- list(options?: {
58
- status?: ConversationStatus;
59
- limit?: number;
60
- offset?: number;
61
- }): Promise<ChatConversation[]>;
62
- search(query: string, limit?: number): Promise<ChatConversation[]>;
63
- /**
64
- * Clear all conversations (for testing)
65
- */
66
- clear(): void;
67
- }
68
- /**
69
- * Create an in-memory conversation store
70
- */
71
- declare function createInMemoryConversationStore(): ConversationStore;
72
- //#endregion
73
- export { ConversationStore, InMemoryConversationStore, createInMemoryConversationStore };
@@ -1,4 +0,0 @@
1
- import { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult } from "./message-types.js";
2
- import { ConversationStore, InMemoryConversationStore, createInMemoryConversationStore } from "./conversation-store.js";
3
- import { ChatService, ChatServiceConfig, createChatService } from "./chat-service.js";
4
- export { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatService, ChatServiceConfig, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, ConversationStore, InMemoryConversationStore, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult, createChatService, createInMemoryConversationStore };
@@ -1,149 +0,0 @@
1
- //#region src/core/message-types.d.ts
2
- /**
3
- * Message and conversation types for AI Chat
4
- */
5
- /**
6
- * Role of a message participant
7
- */
8
- type ChatRole = 'user' | 'assistant' | 'system';
9
- /**
10
- * Status of a message
11
- */
12
- type MessageStatus = 'pending' | 'streaming' | 'completed' | 'error';
13
- /**
14
- * Attachment type for messages
15
- */
16
- interface ChatAttachment {
17
- id: string;
18
- type: 'file' | 'image' | 'code' | 'spec';
19
- name: string;
20
- content?: string;
21
- mimeType?: string;
22
- size?: number;
23
- path?: string;
24
- }
25
- /**
26
- * Code block within a message
27
- */
28
- interface ChatCodeBlock {
29
- id: string;
30
- language: string;
31
- code: string;
32
- filename?: string;
33
- startLine?: number;
34
- endLine?: number;
35
- }
36
- /**
37
- * Tool call information
38
- */
39
- interface ChatToolCall {
40
- id: string;
41
- name: string;
42
- args: Record<string, unknown>;
43
- result?: unknown;
44
- status: 'pending' | 'running' | 'completed' | 'error';
45
- error?: string;
46
- }
47
- /**
48
- * Source/citation in a message
49
- */
50
- interface ChatSource {
51
- id: string;
52
- title: string;
53
- url?: string;
54
- snippet?: string;
55
- type: 'file' | 'spec' | 'doc' | 'web';
56
- }
57
- /**
58
- * A single chat message
59
- */
60
- interface ChatMessage {
61
- id: string;
62
- conversationId: string;
63
- role: ChatRole;
64
- content: string;
65
- status: MessageStatus;
66
- createdAt: Date;
67
- updatedAt: Date;
68
- attachments?: ChatAttachment[];
69
- codeBlocks?: ChatCodeBlock[];
70
- toolCalls?: ChatToolCall[];
71
- sources?: ChatSource[];
72
- reasoning?: string;
73
- usage?: {
74
- inputTokens: number;
75
- outputTokens: number;
76
- };
77
- error?: {
78
- code: string;
79
- message: string;
80
- };
81
- metadata?: Record<string, unknown>;
82
- }
83
- /**
84
- * Conversation status
85
- */
86
- type ConversationStatus = 'active' | 'archived' | 'deleted';
87
- /**
88
- * A conversation containing multiple messages
89
- */
90
- interface ChatConversation {
91
- id: string;
92
- title?: string;
93
- status: ConversationStatus;
94
- createdAt: Date;
95
- updatedAt: Date;
96
- provider: string;
97
- model: string;
98
- workspacePath?: string;
99
- contextFiles?: string[];
100
- messages: ChatMessage[];
101
- summary?: string;
102
- metadata?: Record<string, unknown>;
103
- }
104
- /**
105
- * Options for sending a message
106
- */
107
- interface SendMessageOptions {
108
- conversationId?: string;
109
- content: string;
110
- attachments?: ChatAttachment[];
111
- systemPrompt?: string;
112
- maxTokens?: number;
113
- temperature?: number;
114
- stream?: boolean;
115
- }
116
- /**
117
- * Streaming chunk from AI response
118
- */
119
- interface ChatStreamChunk {
120
- type: 'text' | 'reasoning' | 'tool_call' | 'source' | 'error' | 'done';
121
- content?: string;
122
- toolCall?: ChatToolCall;
123
- source?: ChatSource;
124
- error?: {
125
- code: string;
126
- message: string;
127
- };
128
- usage?: {
129
- inputTokens: number;
130
- outputTokens: number;
131
- };
132
- }
133
- /**
134
- * Result of sending a message
135
- */
136
- interface SendMessageResult {
137
- message: ChatMessage;
138
- conversation: ChatConversation;
139
- }
140
- /**
141
- * Streaming result
142
- */
143
- interface StreamMessageResult {
144
- conversationId: string;
145
- messageId: string;
146
- stream: AsyncIterable<ChatStreamChunk>;
147
- }
148
- //#endregion
149
- export { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult };
package/dist/index.d.ts DELETED
@@ -1,16 +0,0 @@
1
- import { FileInfo, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createWorkspaceContext } from "./context/workspace-context.js";
2
- import { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, createContextBuilder } from "./context/context-builder.js";
3
- import { FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, createNodeFileOperations } from "./context/file-operations.js";
4
- import { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult } from "./core/message-types.js";
5
- import { ConversationStore, InMemoryConversationStore, createInMemoryConversationStore } from "./core/conversation-store.js";
6
- import { ChatService, ChatServiceConfig, createChatService } from "./core/chat-service.js";
7
- import { AiChatFeature } from "./ai-chat.feature.js";
8
- import { isStudioAvailable, supportsLocalMode } from "./providers/chat-utilities.js";
9
- import { ChatModelInfo, ChatProvider, ChatProviderConfig, ChatProviderMode, ChatProviderName, DEFAULT_MODELS, MODELS, ModelCapabilities, ProviderAvailability, createProvider, createProviderFromEnv, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "./providers/index.js";
10
- import { ChatContainer } from "./presentation/components/ChatContainer.js";
11
- import { ChatMessage as ChatMessage$1 } from "./presentation/components/ChatMessage.js";
12
- import { ChatInput } from "./presentation/components/ChatInput.js";
13
- import "./presentation/components/index.js";
14
- import { useChat } from "./presentation/hooks/useChat.js";
15
- import { useProviders } from "./presentation/hooks/useProviders.js";
16
- export { AiChatFeature, BuiltContext, ChatAttachment, ChatCodeBlock, ChatContainer, ChatConversation, ChatInput, ChatMessage, ChatMessage$1 as ChatMessageComponent, ChatModelInfo, ChatProvider, ChatProviderConfig, ChatProviderMode, ChatProviderName, ChatRole, ChatService, ChatServiceConfig, ChatSource, ChatStreamChunk, ChatToolCall, ContextBuilder, ContextBuilderOptions, ContextEntry, ConversationStatus, ConversationStore, DEFAULT_MODELS, FileInfo, FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, InMemoryConversationStore, MODELS, MessageStatus, ModelCapabilities, ProviderAvailability, SendMessageOptions, SendMessageResult, SpecInfo, StreamMessageResult, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createChatService, createContextBuilder, createInMemoryConversationStore, createNodeFileOperations, createProvider, createProviderFromEnv, createWorkspaceContext, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, isStudioAvailable, listOllamaModels, supportsLocalMode, useChat, useProviders, validateProvider };
@@ -1,225 +0,0 @@
1
- import { DEFAULT_MODELS, getModelsForProvider } from "./models.js";
2
- import { anthropic } from "@ai-sdk/anthropic";
3
- import { google } from "@ai-sdk/google";
4
- import { mistral } from "@ai-sdk/mistral";
5
- import { openai } from "@ai-sdk/openai";
6
- import { ollama } from "ollama-ai-provider";
7
-
8
- //#region ../../libs/ai-providers/dist/factory.js
9
- /**
10
- * Base provider implementation
11
- */
12
- var BaseProvider = class {
13
- name;
14
- model;
15
- mode;
16
- config;
17
- cachedModel = null;
18
- constructor(config) {
19
- this.name = config.provider;
20
- this.model = config.model ?? DEFAULT_MODELS[config.provider];
21
- this.mode = this.determineMode(config);
22
- this.config = config;
23
- }
24
- determineMode(config) {
25
- if (config.provider === "ollama") return "local";
26
- if (config.apiKey) return "byok";
27
- return "managed";
28
- }
29
- getModel() {
30
- if (!this.cachedModel) this.cachedModel = this.createModel();
31
- return this.cachedModel;
32
- }
33
- createModel() {
34
- const { apiKey, baseUrl, proxyUrl, organizationId } = this.config;
35
- switch (this.name) {
36
- case "ollama": {
37
- const originalBaseUrl = process.env.OLLAMA_BASE_URL;
38
- if (baseUrl && baseUrl !== "http://localhost:11434") process.env.OLLAMA_BASE_URL = baseUrl;
39
- const ollamaModel = ollama(this.model);
40
- if (originalBaseUrl !== void 0) process.env.OLLAMA_BASE_URL = originalBaseUrl;
41
- else if (baseUrl && baseUrl !== "http://localhost:11434") delete process.env.OLLAMA_BASE_URL;
42
- return ollamaModel;
43
- }
44
- case "openai":
45
- if (this.mode === "managed") {
46
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
47
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
48
- const model = openai(this.model);
49
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
50
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
51
- return model;
52
- }
53
- return openai(this.model);
54
- case "anthropic":
55
- if (this.mode === "managed") {
56
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
57
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
58
- const model = openai(this.model);
59
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
60
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
61
- return model;
62
- }
63
- return anthropic(this.model);
64
- case "mistral":
65
- if (this.mode === "managed") {
66
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
67
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
68
- const model = openai(this.model);
69
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
70
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
71
- return model;
72
- }
73
- return mistral(this.model);
74
- case "gemini":
75
- if (this.mode === "managed") {
76
- const originalBaseUrl = process.env.OPENAI_BASE_URL;
77
- if (proxyUrl) process.env.OPENAI_BASE_URL = proxyUrl;
78
- const model = openai(this.model);
79
- if (originalBaseUrl !== void 0) process.env.OPENAI_BASE_URL = originalBaseUrl;
80
- else if (proxyUrl) delete process.env.OPENAI_BASE_URL;
81
- return model;
82
- }
83
- return google(this.model);
84
- default: throw new Error(`Unknown provider: ${this.name}`);
85
- }
86
- }
87
- async listModels() {
88
- if (this.name === "ollama") return this.listOllamaModels();
89
- return getModelsForProvider(this.name);
90
- }
91
- async listOllamaModels() {
92
- try {
93
- const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
94
- const response = await fetch(`${baseUrl}/api/tags`);
95
- if (!response.ok) return getModelsForProvider("ollama");
96
- return ((await response.json()).models ?? []).map((m) => ({
97
- id: m.name,
98
- name: m.name,
99
- provider: "ollama",
100
- contextWindow: 8e3,
101
- capabilities: {
102
- vision: false,
103
- tools: false,
104
- reasoning: false,
105
- streaming: true
106
- }
107
- }));
108
- } catch {
109
- return getModelsForProvider("ollama");
110
- }
111
- }
112
- async validate() {
113
- if (this.name === "ollama") return this.validateOllama();
114
- if (this.mode === "byok" && !this.config.apiKey) return {
115
- valid: false,
116
- error: `API key required for ${this.name}`
117
- };
118
- if (this.mode === "managed" && !this.config.proxyUrl && !this.config.organizationId) return {
119
- valid: false,
120
- error: "Managed mode requires proxyUrl or organizationId"
121
- };
122
- return { valid: true };
123
- }
124
- async validateOllama() {
125
- try {
126
- const baseUrl = this.config.baseUrl ?? "http://localhost:11434";
127
- const response = await fetch(`${baseUrl}/api/tags`);
128
- if (!response.ok) return {
129
- valid: false,
130
- error: `Ollama server returned ${response.status}`
131
- };
132
- const models = (await response.json()).models ?? [];
133
- if (!models.some((m) => m.name === this.model)) return {
134
- valid: false,
135
- error: `Model "${this.model}" not found. Available: ${models.map((m) => m.name).join(", ")}`
136
- };
137
- return { valid: true };
138
- } catch (error) {
139
- return {
140
- valid: false,
141
- error: `Cannot connect to Ollama at ${this.config.baseUrl ?? "http://localhost:11434"}: ${error instanceof Error ? error.message : String(error)}`
142
- };
143
- }
144
- }
145
- };
146
- /**
147
- * Create a provider from configuration
148
- */
149
- function createProvider(config) {
150
- return new BaseProvider(config);
151
- }
152
- /**
153
- * Create a provider from environment variables
154
- */
155
- function createProviderFromEnv() {
156
- const provider = process.env.CONTRACTSPEC_AI_PROVIDER ?? "openai";
157
- const model = process.env.CONTRACTSPEC_AI_MODEL;
158
- let apiKey;
159
- switch (provider) {
160
- case "openai":
161
- apiKey = process.env.OPENAI_API_KEY;
162
- break;
163
- case "anthropic":
164
- apiKey = process.env.ANTHROPIC_API_KEY;
165
- break;
166
- case "mistral":
167
- apiKey = process.env.MISTRAL_API_KEY;
168
- break;
169
- case "gemini":
170
- apiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
171
- break;
172
- case "ollama": break;
173
- }
174
- return createProvider({
175
- provider,
176
- model,
177
- apiKey,
178
- baseUrl: process.env.OLLAMA_BASE_URL,
179
- proxyUrl: process.env.CONTRACTSPEC_AI_PROXY_URL,
180
- organizationId: process.env.CONTRACTSPEC_ORG_ID
181
- });
182
- }
183
- /**
184
- * Get all available providers with their status
185
- */
186
- function getAvailableProviders() {
187
- const providers = [];
188
- providers.push({
189
- provider: "ollama",
190
- available: true,
191
- mode: "local"
192
- });
193
- const openaiKey = process.env.OPENAI_API_KEY;
194
- providers.push({
195
- provider: "openai",
196
- available: Boolean(openaiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
197
- mode: openaiKey ? "byok" : "managed",
198
- reason: !openaiKey ? "Set OPENAI_API_KEY for BYOK mode" : void 0
199
- });
200
- const anthropicKey = process.env.ANTHROPIC_API_KEY;
201
- providers.push({
202
- provider: "anthropic",
203
- available: Boolean(anthropicKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
204
- mode: anthropicKey ? "byok" : "managed",
205
- reason: !anthropicKey ? "Set ANTHROPIC_API_KEY for BYOK mode" : void 0
206
- });
207
- const mistralKey = process.env.MISTRAL_API_KEY;
208
- providers.push({
209
- provider: "mistral",
210
- available: Boolean(mistralKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
211
- mode: mistralKey ? "byok" : "managed",
212
- reason: !mistralKey ? "Set MISTRAL_API_KEY for BYOK mode" : void 0
213
- });
214
- const geminiKey = process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY;
215
- providers.push({
216
- provider: "gemini",
217
- available: Boolean(geminiKey) || Boolean(process.env.CONTRACTSPEC_AI_PROXY_URL),
218
- mode: geminiKey ? "byok" : "managed",
219
- reason: !geminiKey ? "Set GOOGLE_API_KEY for BYOK mode" : void 0
220
- });
221
- return providers;
222
- }
223
-
224
- //#endregion
225
- export { createProvider, createProviderFromEnv, getAvailableProviders };
@@ -1,4 +0,0 @@
1
- import { DEFAULT_MODELS, MODELS, getDefaultModel, getModelInfo, getModelsForProvider, getRecommendedModels } from "./models.js";
2
- import { createProvider, createProviderFromEnv, getAvailableProviders } from "./factory.js";
3
- import { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "./validation.js";
4
- import "./legacy.js";
@@ -1,2 +0,0 @@
1
- import { getRecommendedModels } from "./models.js";
2
- import "./factory.js";