@lssm/module.ai-chat 0.0.0-canary-20251217054315 → 0.0.0-canary-20251217060433
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-chat.feature.d.ts +11 -0
- package/dist/context/context-builder.d.ts +56 -0
- package/dist/context/file-operations.d.ts +99 -0
- package/dist/context/index.d.ts +4 -0
- package/dist/context/workspace-context.d.ts +116 -0
- package/dist/core/chat-service.d.ts +72 -0
- package/dist/core/conversation-store.d.ts +73 -0
- package/dist/core/index.d.ts +4 -0
- package/dist/core/message-types.d.ts +149 -0
- package/dist/index.d.ts +18 -0
- package/dist/presentation/components/ChatContainer.d.ts +20 -0
- package/dist/presentation/components/ChatInput.d.ts +34 -0
- package/dist/presentation/components/ChatMessage.d.ts +23 -0
- package/dist/presentation/components/CodePreview.d.ts +39 -0
- package/dist/presentation/components/ContextIndicator.d.ts +25 -0
- package/dist/presentation/components/ModelPicker.d.ts +38 -0
- package/dist/presentation/components/index.d.ts +7 -0
- package/dist/presentation/hooks/index.d.ts +3 -0
- package/dist/presentation/hooks/useChat.d.ts +66 -0
- package/dist/presentation/hooks/useProviders.d.ts +37 -0
- package/dist/presentation/index.d.ts +11 -0
- package/dist/providers/chat-utilities.d.ts +14 -0
- package/dist/providers/index.d.ts +3 -0
- package/package.json +17 -17
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { FeatureModuleSpec } from "@lssm/lib.contracts";
|
|
2
|
+
|
|
3
|
+
//#region src/ai-chat.feature.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* AI Chat feature module that bundles conversational AI assistance
|
|
7
|
+
* for ContractSpec development across CLI, VSCode, and Studio.
|
|
8
|
+
*/
|
|
9
|
+
declare const AiChatFeature: FeatureModuleSpec;
|
|
10
|
+
//#endregion
|
|
11
|
+
export { AiChatFeature };
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { WorkspaceContext } from "./workspace-context.js";
|
|
2
|
+
|
|
3
|
+
//#region src/context/context-builder.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Context entry for a file or spec
|
|
7
|
+
*/
|
|
8
|
+
interface ContextEntry {
|
|
9
|
+
type: 'spec' | 'file' | 'reference';
|
|
10
|
+
path: string;
|
|
11
|
+
content?: string;
|
|
12
|
+
summary?: string;
|
|
13
|
+
relevance: number;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Built context for LLM
|
|
17
|
+
*/
|
|
18
|
+
interface BuiltContext {
|
|
19
|
+
entries: ContextEntry[];
|
|
20
|
+
summary: string;
|
|
21
|
+
totalTokensEstimate: number;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Options for building context
|
|
25
|
+
*/
|
|
26
|
+
interface ContextBuilderOptions {
|
|
27
|
+
/** Maximum estimated tokens for context */
|
|
28
|
+
maxTokens?: number;
|
|
29
|
+
/** Query to use for relevance scoring */
|
|
30
|
+
query?: string;
|
|
31
|
+
/** Specific files to include */
|
|
32
|
+
includeFiles?: string[];
|
|
33
|
+
/** Specific specs to include */
|
|
34
|
+
includeSpecs?: string[];
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Context builder for creating rich LLM context
|
|
38
|
+
*/
|
|
39
|
+
declare class ContextBuilder {
|
|
40
|
+
private readonly context;
|
|
41
|
+
constructor(context: WorkspaceContext);
|
|
42
|
+
/**
|
|
43
|
+
* Build context for a chat message
|
|
44
|
+
*/
|
|
45
|
+
build(options?: ContextBuilderOptions): BuiltContext;
|
|
46
|
+
/**
|
|
47
|
+
* Build a text summary of the context entries
|
|
48
|
+
*/
|
|
49
|
+
private buildSummary;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Create a context builder
|
|
53
|
+
*/
|
|
54
|
+
declare function createContextBuilder(context: WorkspaceContext): ContextBuilder;
|
|
55
|
+
//#endregion
|
|
56
|
+
export { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, createContextBuilder };
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
//#region src/context/file-operations.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* File operations for workspace context
|
|
4
|
+
*
|
|
5
|
+
* Provides read/write operations for files in the workspace.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Result of a file read operation
|
|
9
|
+
*/
|
|
10
|
+
interface FileReadResult {
|
|
11
|
+
success: boolean;
|
|
12
|
+
path: string;
|
|
13
|
+
content?: string;
|
|
14
|
+
error?: string;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Result of a file write operation
|
|
18
|
+
*/
|
|
19
|
+
interface FileWriteResult {
|
|
20
|
+
success: boolean;
|
|
21
|
+
path: string;
|
|
22
|
+
error?: string;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* File operation to perform
|
|
26
|
+
*/
|
|
27
|
+
interface FileOperation {
|
|
28
|
+
type: 'read' | 'write' | 'create' | 'delete';
|
|
29
|
+
path: string;
|
|
30
|
+
content?: string;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Result of a file operation
|
|
34
|
+
*/
|
|
35
|
+
interface FileOperationResult {
|
|
36
|
+
operation: FileOperation;
|
|
37
|
+
success: boolean;
|
|
38
|
+
content?: string;
|
|
39
|
+
error?: string;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Interface for file system operations
|
|
43
|
+
*/
|
|
44
|
+
interface FileSystem {
|
|
45
|
+
/**
|
|
46
|
+
* Read a file's contents
|
|
47
|
+
*/
|
|
48
|
+
readFile(path: string): Promise<string>;
|
|
49
|
+
/**
|
|
50
|
+
* Write content to a file
|
|
51
|
+
*/
|
|
52
|
+
writeFile(path: string, content: string): Promise<void>;
|
|
53
|
+
/**
|
|
54
|
+
* Check if a file exists
|
|
55
|
+
*/
|
|
56
|
+
exists(path: string): Promise<boolean>;
|
|
57
|
+
/**
|
|
58
|
+
* Delete a file
|
|
59
|
+
*/
|
|
60
|
+
deleteFile(path: string): Promise<void>;
|
|
61
|
+
/**
|
|
62
|
+
* List files in a directory
|
|
63
|
+
*/
|
|
64
|
+
listFiles(directory: string, options?: {
|
|
65
|
+
recursive?: boolean;
|
|
66
|
+
pattern?: string;
|
|
67
|
+
}): Promise<string[]>;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* File operations executor
|
|
71
|
+
*/
|
|
72
|
+
declare class FileOperations {
|
|
73
|
+
private readonly fs;
|
|
74
|
+
private readonly workspacePath;
|
|
75
|
+
private readonly allowWrites;
|
|
76
|
+
constructor(fs: FileSystem, workspacePath: string, allowWrites?: boolean);
|
|
77
|
+
/**
|
|
78
|
+
* Read a file
|
|
79
|
+
*/
|
|
80
|
+
read(relativePath: string): Promise<FileReadResult>;
|
|
81
|
+
/**
|
|
82
|
+
* Write to a file
|
|
83
|
+
*/
|
|
84
|
+
write(relativePath: string, content: string): Promise<FileWriteResult>;
|
|
85
|
+
/**
|
|
86
|
+
* Execute multiple file operations
|
|
87
|
+
*/
|
|
88
|
+
execute(operations: FileOperation[]): Promise<FileOperationResult[]>;
|
|
89
|
+
/**
|
|
90
|
+
* Resolve a relative path to an absolute path
|
|
91
|
+
*/
|
|
92
|
+
private resolvePath;
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Create a file operations instance with Node.js fs
|
|
96
|
+
*/
|
|
97
|
+
declare function createNodeFileOperations(workspacePath: string, allowWrites?: boolean): FileOperations;
|
|
98
|
+
//#endregion
|
|
99
|
+
export { FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, createNodeFileOperations };
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import { FileInfo, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createWorkspaceContext } from "./workspace-context.js";
|
|
2
|
+
import { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, createContextBuilder } from "./context-builder.js";
|
|
3
|
+
import { FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, createNodeFileOperations } from "./file-operations.js";
|
|
4
|
+
export { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, FileInfo, FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createContextBuilder, createNodeFileOperations, createWorkspaceContext };
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
//#region src/context/workspace-context.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Workspace context management
|
|
4
|
+
*
|
|
5
|
+
* Provides access to specs, files, and codebase information
|
|
6
|
+
* for context-aware AI chat assistance.
|
|
7
|
+
*/
|
|
8
|
+
/**
|
|
9
|
+
* Spec information for context
|
|
10
|
+
*/
|
|
11
|
+
interface SpecInfo {
|
|
12
|
+
name: string;
|
|
13
|
+
version: number;
|
|
14
|
+
type: 'command' | 'query' | 'event' | 'presentation';
|
|
15
|
+
path: string;
|
|
16
|
+
description?: string;
|
|
17
|
+
tags?: string[];
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* File information for context
|
|
21
|
+
*/
|
|
22
|
+
interface FileInfo {
|
|
23
|
+
path: string;
|
|
24
|
+
relativePath: string;
|
|
25
|
+
name: string;
|
|
26
|
+
extension: string;
|
|
27
|
+
size: number;
|
|
28
|
+
isSpec: boolean;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Workspace summary for context
|
|
32
|
+
*/
|
|
33
|
+
interface WorkspaceSummary {
|
|
34
|
+
name: string;
|
|
35
|
+
path: string;
|
|
36
|
+
specs: {
|
|
37
|
+
total: number;
|
|
38
|
+
commands: number;
|
|
39
|
+
queries: number;
|
|
40
|
+
events: number;
|
|
41
|
+
presentations: number;
|
|
42
|
+
};
|
|
43
|
+
files: {
|
|
44
|
+
total: number;
|
|
45
|
+
typescript: number;
|
|
46
|
+
specFiles: number;
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Configuration for workspace context
|
|
51
|
+
*/
|
|
52
|
+
interface WorkspaceContextConfig {
|
|
53
|
+
/** Root path of the workspace */
|
|
54
|
+
workspacePath: string;
|
|
55
|
+
/** File patterns to include */
|
|
56
|
+
includePatterns?: string[];
|
|
57
|
+
/** File patterns to exclude */
|
|
58
|
+
excludePatterns?: string[];
|
|
59
|
+
/** Maximum file size to read (bytes) */
|
|
60
|
+
maxFileSize?: number;
|
|
61
|
+
/** Whether to enable file writes */
|
|
62
|
+
allowWrites?: boolean;
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Workspace context for AI chat
|
|
66
|
+
*/
|
|
67
|
+
declare class WorkspaceContext {
|
|
68
|
+
readonly workspacePath: string;
|
|
69
|
+
readonly allowWrites: boolean;
|
|
70
|
+
private specs;
|
|
71
|
+
private files;
|
|
72
|
+
private initialized;
|
|
73
|
+
constructor(config: WorkspaceContextConfig);
|
|
74
|
+
/**
|
|
75
|
+
* Initialize the workspace context by scanning files
|
|
76
|
+
*/
|
|
77
|
+
initialize(): Promise<void>;
|
|
78
|
+
/**
|
|
79
|
+
* Get all discovered specs
|
|
80
|
+
*/
|
|
81
|
+
getSpecs(): SpecInfo[];
|
|
82
|
+
/**
|
|
83
|
+
* Get all discovered files
|
|
84
|
+
*/
|
|
85
|
+
getFiles(): FileInfo[];
|
|
86
|
+
/**
|
|
87
|
+
* Add specs to the context
|
|
88
|
+
*/
|
|
89
|
+
addSpecs(specs: SpecInfo[]): void;
|
|
90
|
+
/**
|
|
91
|
+
* Add files to the context
|
|
92
|
+
*/
|
|
93
|
+
addFiles(files: FileInfo[]): void;
|
|
94
|
+
/**
|
|
95
|
+
* Get a summary of the workspace for context
|
|
96
|
+
*/
|
|
97
|
+
getSummary(): WorkspaceSummary;
|
|
98
|
+
/**
|
|
99
|
+
* Get a context summary for LLM prompts
|
|
100
|
+
*/
|
|
101
|
+
getContextSummary(): string;
|
|
102
|
+
/**
|
|
103
|
+
* Find specs matching a query
|
|
104
|
+
*/
|
|
105
|
+
findSpecs(query: string): SpecInfo[];
|
|
106
|
+
/**
|
|
107
|
+
* Find files matching a query
|
|
108
|
+
*/
|
|
109
|
+
findFiles(query: string): FileInfo[];
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Create a workspace context from a path
|
|
113
|
+
*/
|
|
114
|
+
declare function createWorkspaceContext(path: string, options?: Partial<WorkspaceContextConfig>): Promise<WorkspaceContext>;
|
|
115
|
+
//#endregion
|
|
116
|
+
export { FileInfo, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createWorkspaceContext };
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { WorkspaceContext } from "../context/workspace-context.js";
|
|
2
|
+
import { ChatConversation, SendMessageOptions, SendMessageResult, StreamMessageResult } from "./message-types.js";
|
|
3
|
+
import { ConversationStore } from "./conversation-store.js";
|
|
4
|
+
import { Provider } from "@lssm/lib.ai-providers";
|
|
5
|
+
|
|
6
|
+
//#region src/core/chat-service.d.ts
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Configuration for ChatService
|
|
10
|
+
*/
|
|
11
|
+
interface ChatServiceConfig {
|
|
12
|
+
/** LLM provider to use */
|
|
13
|
+
provider: Provider;
|
|
14
|
+
/** Optional workspace context for code-aware chat */
|
|
15
|
+
context?: WorkspaceContext;
|
|
16
|
+
/** Optional conversation store (defaults to in-memory) */
|
|
17
|
+
store?: ConversationStore;
|
|
18
|
+
/** Default system prompt */
|
|
19
|
+
systemPrompt?: string;
|
|
20
|
+
/** Maximum conversation history to include */
|
|
21
|
+
maxHistoryMessages?: number;
|
|
22
|
+
/** Callback for usage tracking */
|
|
23
|
+
onUsage?: (usage: {
|
|
24
|
+
inputTokens: number;
|
|
25
|
+
outputTokens: number;
|
|
26
|
+
}) => void;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Main chat service for AI-powered conversations
|
|
30
|
+
*/
|
|
31
|
+
declare class ChatService {
|
|
32
|
+
private readonly provider;
|
|
33
|
+
private readonly context?;
|
|
34
|
+
private readonly store;
|
|
35
|
+
private readonly systemPrompt;
|
|
36
|
+
private readonly maxHistoryMessages;
|
|
37
|
+
private readonly onUsage?;
|
|
38
|
+
constructor(config: ChatServiceConfig);
|
|
39
|
+
/**
|
|
40
|
+
* Send a message and get a complete response
|
|
41
|
+
*/
|
|
42
|
+
send(options: SendMessageOptions): Promise<SendMessageResult>;
|
|
43
|
+
/**
|
|
44
|
+
* Send a message and get a streaming response
|
|
45
|
+
*/
|
|
46
|
+
stream(options: SendMessageOptions): Promise<StreamMessageResult>;
|
|
47
|
+
/**
|
|
48
|
+
* Get a conversation by ID
|
|
49
|
+
*/
|
|
50
|
+
getConversation(conversationId: string): Promise<ChatConversation | null>;
|
|
51
|
+
/**
|
|
52
|
+
* List conversations
|
|
53
|
+
*/
|
|
54
|
+
listConversations(options?: {
|
|
55
|
+
limit?: number;
|
|
56
|
+
offset?: number;
|
|
57
|
+
}): Promise<ChatConversation[]>;
|
|
58
|
+
/**
|
|
59
|
+
* Delete a conversation
|
|
60
|
+
*/
|
|
61
|
+
deleteConversation(conversationId: string): Promise<boolean>;
|
|
62
|
+
/**
|
|
63
|
+
* Build prompt string for LLM
|
|
64
|
+
*/
|
|
65
|
+
private buildPrompt;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Create a chat service with the given configuration
|
|
69
|
+
*/
|
|
70
|
+
declare function createChatService(config: ChatServiceConfig): ChatService;
|
|
71
|
+
//#endregion
|
|
72
|
+
export { ChatService, ChatServiceConfig, createChatService };
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { ChatConversation, ChatMessage, ConversationStatus } from "./message-types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/core/conversation-store.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Interface for conversation persistence
|
|
7
|
+
*/
|
|
8
|
+
interface ConversationStore {
|
|
9
|
+
/**
|
|
10
|
+
* Get a conversation by ID
|
|
11
|
+
*/
|
|
12
|
+
get(conversationId: string): Promise<ChatConversation | null>;
|
|
13
|
+
/**
|
|
14
|
+
* Create a new conversation
|
|
15
|
+
*/
|
|
16
|
+
create(conversation: Omit<ChatConversation, 'id' | 'createdAt' | 'updatedAt'>): Promise<ChatConversation>;
|
|
17
|
+
/**
|
|
18
|
+
* Update conversation properties
|
|
19
|
+
*/
|
|
20
|
+
update(conversationId: string, updates: Partial<Pick<ChatConversation, 'title' | 'status' | 'summary' | 'metadata'>>): Promise<ChatConversation | null>;
|
|
21
|
+
/**
|
|
22
|
+
* Append a message to a conversation
|
|
23
|
+
*/
|
|
24
|
+
appendMessage(conversationId: string, message: Omit<ChatMessage, 'id' | 'conversationId' | 'createdAt' | 'updatedAt'>): Promise<ChatMessage>;
|
|
25
|
+
/**
|
|
26
|
+
* Update a message in a conversation
|
|
27
|
+
*/
|
|
28
|
+
updateMessage(conversationId: string, messageId: string, updates: Partial<ChatMessage>): Promise<ChatMessage | null>;
|
|
29
|
+
/**
|
|
30
|
+
* Delete a conversation
|
|
31
|
+
*/
|
|
32
|
+
delete(conversationId: string): Promise<boolean>;
|
|
33
|
+
/**
|
|
34
|
+
* List conversations with optional filters
|
|
35
|
+
*/
|
|
36
|
+
list(options?: {
|
|
37
|
+
status?: ConversationStatus;
|
|
38
|
+
limit?: number;
|
|
39
|
+
offset?: number;
|
|
40
|
+
}): Promise<ChatConversation[]>;
|
|
41
|
+
/**
|
|
42
|
+
* Search conversations by content
|
|
43
|
+
*/
|
|
44
|
+
search(query: string, limit?: number): Promise<ChatConversation[]>;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* In-memory conversation store for development and testing
|
|
48
|
+
*/
|
|
49
|
+
declare class InMemoryConversationStore implements ConversationStore {
|
|
50
|
+
private readonly conversations;
|
|
51
|
+
get(conversationId: string): Promise<ChatConversation | null>;
|
|
52
|
+
create(conversation: Omit<ChatConversation, 'id' | 'createdAt' | 'updatedAt'>): Promise<ChatConversation>;
|
|
53
|
+
update(conversationId: string, updates: Partial<Pick<ChatConversation, 'title' | 'status' | 'summary' | 'metadata'>>): Promise<ChatConversation | null>;
|
|
54
|
+
appendMessage(conversationId: string, message: Omit<ChatMessage, 'id' | 'conversationId' | 'createdAt' | 'updatedAt'>): Promise<ChatMessage>;
|
|
55
|
+
updateMessage(conversationId: string, messageId: string, updates: Partial<ChatMessage>): Promise<ChatMessage | null>;
|
|
56
|
+
delete(conversationId: string): Promise<boolean>;
|
|
57
|
+
list(options?: {
|
|
58
|
+
status?: ConversationStatus;
|
|
59
|
+
limit?: number;
|
|
60
|
+
offset?: number;
|
|
61
|
+
}): Promise<ChatConversation[]>;
|
|
62
|
+
search(query: string, limit?: number): Promise<ChatConversation[]>;
|
|
63
|
+
/**
|
|
64
|
+
* Clear all conversations (for testing)
|
|
65
|
+
*/
|
|
66
|
+
clear(): void;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Create an in-memory conversation store
|
|
70
|
+
*/
|
|
71
|
+
declare function createInMemoryConversationStore(): ConversationStore;
|
|
72
|
+
//#endregion
|
|
73
|
+
export { ConversationStore, InMemoryConversationStore, createInMemoryConversationStore };
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult } from "./message-types.js";
|
|
2
|
+
import { ConversationStore, InMemoryConversationStore, createInMemoryConversationStore } from "./conversation-store.js";
|
|
3
|
+
import { ChatService, ChatServiceConfig, createChatService } from "./chat-service.js";
|
|
4
|
+
export { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatService, ChatServiceConfig, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, ConversationStore, InMemoryConversationStore, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult, createChatService, createInMemoryConversationStore };
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
//#region src/core/message-types.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Message and conversation types for AI Chat
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Role of a message participant
|
|
7
|
+
*/
|
|
8
|
+
type ChatRole = 'user' | 'assistant' | 'system';
|
|
9
|
+
/**
|
|
10
|
+
* Status of a message
|
|
11
|
+
*/
|
|
12
|
+
type MessageStatus = 'pending' | 'streaming' | 'completed' | 'error';
|
|
13
|
+
/**
|
|
14
|
+
* Attachment type for messages
|
|
15
|
+
*/
|
|
16
|
+
interface ChatAttachment {
|
|
17
|
+
id: string;
|
|
18
|
+
type: 'file' | 'image' | 'code' | 'spec';
|
|
19
|
+
name: string;
|
|
20
|
+
content?: string;
|
|
21
|
+
mimeType?: string;
|
|
22
|
+
size?: number;
|
|
23
|
+
path?: string;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Code block within a message
|
|
27
|
+
*/
|
|
28
|
+
interface ChatCodeBlock {
|
|
29
|
+
id: string;
|
|
30
|
+
language: string;
|
|
31
|
+
code: string;
|
|
32
|
+
filename?: string;
|
|
33
|
+
startLine?: number;
|
|
34
|
+
endLine?: number;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Tool call information
|
|
38
|
+
*/
|
|
39
|
+
interface ChatToolCall {
|
|
40
|
+
id: string;
|
|
41
|
+
name: string;
|
|
42
|
+
args: Record<string, unknown>;
|
|
43
|
+
result?: unknown;
|
|
44
|
+
status: 'pending' | 'running' | 'completed' | 'error';
|
|
45
|
+
error?: string;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Source/citation in a message
|
|
49
|
+
*/
|
|
50
|
+
interface ChatSource {
|
|
51
|
+
id: string;
|
|
52
|
+
title: string;
|
|
53
|
+
url?: string;
|
|
54
|
+
snippet?: string;
|
|
55
|
+
type: 'file' | 'spec' | 'doc' | 'web';
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* A single chat message
|
|
59
|
+
*/
|
|
60
|
+
interface ChatMessage {
|
|
61
|
+
id: string;
|
|
62
|
+
conversationId: string;
|
|
63
|
+
role: ChatRole;
|
|
64
|
+
content: string;
|
|
65
|
+
status: MessageStatus;
|
|
66
|
+
createdAt: Date;
|
|
67
|
+
updatedAt: Date;
|
|
68
|
+
attachments?: ChatAttachment[];
|
|
69
|
+
codeBlocks?: ChatCodeBlock[];
|
|
70
|
+
toolCalls?: ChatToolCall[];
|
|
71
|
+
sources?: ChatSource[];
|
|
72
|
+
reasoning?: string;
|
|
73
|
+
usage?: {
|
|
74
|
+
inputTokens: number;
|
|
75
|
+
outputTokens: number;
|
|
76
|
+
};
|
|
77
|
+
error?: {
|
|
78
|
+
code: string;
|
|
79
|
+
message: string;
|
|
80
|
+
};
|
|
81
|
+
metadata?: Record<string, unknown>;
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Conversation status
|
|
85
|
+
*/
|
|
86
|
+
type ConversationStatus = 'active' | 'archived' | 'deleted';
|
|
87
|
+
/**
|
|
88
|
+
* A conversation containing multiple messages
|
|
89
|
+
*/
|
|
90
|
+
interface ChatConversation {
|
|
91
|
+
id: string;
|
|
92
|
+
title?: string;
|
|
93
|
+
status: ConversationStatus;
|
|
94
|
+
createdAt: Date;
|
|
95
|
+
updatedAt: Date;
|
|
96
|
+
provider: string;
|
|
97
|
+
model: string;
|
|
98
|
+
workspacePath?: string;
|
|
99
|
+
contextFiles?: string[];
|
|
100
|
+
messages: ChatMessage[];
|
|
101
|
+
summary?: string;
|
|
102
|
+
metadata?: Record<string, unknown>;
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Options for sending a message
|
|
106
|
+
*/
|
|
107
|
+
interface SendMessageOptions {
|
|
108
|
+
conversationId?: string;
|
|
109
|
+
content: string;
|
|
110
|
+
attachments?: ChatAttachment[];
|
|
111
|
+
systemPrompt?: string;
|
|
112
|
+
maxTokens?: number;
|
|
113
|
+
temperature?: number;
|
|
114
|
+
stream?: boolean;
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Streaming chunk from AI response
|
|
118
|
+
*/
|
|
119
|
+
interface ChatStreamChunk {
|
|
120
|
+
type: 'text' | 'reasoning' | 'tool_call' | 'source' | 'error' | 'done';
|
|
121
|
+
content?: string;
|
|
122
|
+
toolCall?: ChatToolCall;
|
|
123
|
+
source?: ChatSource;
|
|
124
|
+
error?: {
|
|
125
|
+
code: string;
|
|
126
|
+
message: string;
|
|
127
|
+
};
|
|
128
|
+
usage?: {
|
|
129
|
+
inputTokens: number;
|
|
130
|
+
outputTokens: number;
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Result of sending a message
|
|
135
|
+
*/
|
|
136
|
+
interface SendMessageResult {
|
|
137
|
+
message: ChatMessage;
|
|
138
|
+
conversation: ChatConversation;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Streaming result
|
|
142
|
+
*/
|
|
143
|
+
interface StreamMessageResult {
|
|
144
|
+
conversationId: string;
|
|
145
|
+
messageId: string;
|
|
146
|
+
stream: AsyncIterable<ChatStreamChunk>;
|
|
147
|
+
}
|
|
148
|
+
//#endregion
|
|
149
|
+
export { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { FileInfo, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createWorkspaceContext } from "./context/workspace-context.js";
|
|
2
|
+
import { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, createContextBuilder } from "./context/context-builder.js";
|
|
3
|
+
import { FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, createNodeFileOperations } from "./context/file-operations.js";
|
|
4
|
+
import { ChatAttachment, ChatCodeBlock, ChatConversation, ChatMessage, ChatRole, ChatSource, ChatStreamChunk, ChatToolCall, ConversationStatus, MessageStatus, SendMessageOptions, SendMessageResult, StreamMessageResult } from "./core/message-types.js";
|
|
5
|
+
import { ConversationStore, InMemoryConversationStore, createInMemoryConversationStore } from "./core/conversation-store.js";
|
|
6
|
+
import { ChatService, ChatServiceConfig, createChatService } from "./core/chat-service.js";
|
|
7
|
+
import "./core/index.js";
|
|
8
|
+
import { AiChatFeature } from "./ai-chat.feature.js";
|
|
9
|
+
import { isStudioAvailable, supportsLocalMode } from "./providers/chat-utilities.js";
|
|
10
|
+
import { ChatModelInfo, ChatProvider, ChatProviderConfig, ChatProviderMode, ChatProviderName, DEFAULT_MODELS, MODELS, ModelCapabilities, ProviderAvailability, createProvider, createProviderFromEnv, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "./providers/index.js";
|
|
11
|
+
import { ChatContainer } from "./presentation/components/ChatContainer.js";
|
|
12
|
+
import { ChatMessage as ChatMessage$1 } from "./presentation/components/ChatMessage.js";
|
|
13
|
+
import { ChatInput } from "./presentation/components/ChatInput.js";
|
|
14
|
+
import "./presentation/components/index.js";
|
|
15
|
+
import { useChat } from "./presentation/hooks/useChat.js";
|
|
16
|
+
import { useProviders } from "./presentation/hooks/useProviders.js";
|
|
17
|
+
import "./presentation/hooks/index.js";
|
|
18
|
+
export { AiChatFeature, BuiltContext, ChatAttachment, ChatCodeBlock, ChatContainer, ChatConversation, ChatInput, ChatMessage, ChatMessage$1 as ChatMessageComponent, ChatModelInfo, ChatProvider, ChatProviderConfig, ChatProviderMode, ChatProviderName, ChatRole, ChatService, ChatServiceConfig, ChatSource, ChatStreamChunk, ChatToolCall, ContextBuilder, ContextBuilderOptions, ContextEntry, ConversationStatus, ConversationStore, DEFAULT_MODELS, FileInfo, FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, InMemoryConversationStore, MODELS, MessageStatus, ModelCapabilities, ProviderAvailability, SendMessageOptions, SendMessageResult, SpecInfo, StreamMessageResult, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createChatService, createContextBuilder, createInMemoryConversationStore, createNodeFileOperations, createProvider, createProviderFromEnv, createWorkspaceContext, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, isStudioAvailable, listOllamaModels, supportsLocalMode, useChat, useProviders, validateProvider };
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import * as React from "react";
|
|
2
|
+
import * as react_jsx_runtime0 from "react/jsx-runtime";
|
|
3
|
+
|
|
4
|
+
//#region src/presentation/components/ChatContainer.d.ts
|
|
5
|
+
interface ChatContainerProps {
|
|
6
|
+
children: React.ReactNode;
|
|
7
|
+
className?: string;
|
|
8
|
+
/** Show scroll-to-bottom button when scrolled up */
|
|
9
|
+
showScrollButton?: boolean;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Container component for chat messages with scrolling
|
|
13
|
+
*/
|
|
14
|
+
declare function ChatContainer({
|
|
15
|
+
children,
|
|
16
|
+
className,
|
|
17
|
+
showScrollButton
|
|
18
|
+
}: ChatContainerProps): react_jsx_runtime0.JSX.Element;
|
|
19
|
+
//#endregion
|
|
20
|
+
export { ChatContainer };
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { ChatAttachment } from "../../core/message-types.js";
|
|
2
|
+
import * as react_jsx_runtime2 from "react/jsx-runtime";
|
|
3
|
+
|
|
4
|
+
//#region src/presentation/components/ChatInput.d.ts
|
|
5
|
+
interface ChatInputProps {
|
|
6
|
+
/** Called when a message is sent */
|
|
7
|
+
onSend: (content: string, attachments?: ChatAttachment[]) => void;
|
|
8
|
+
/** Whether input is disabled (e.g., during streaming) */
|
|
9
|
+
disabled?: boolean;
|
|
10
|
+
/** Whether currently loading/streaming */
|
|
11
|
+
isLoading?: boolean;
|
|
12
|
+
/** Placeholder text */
|
|
13
|
+
placeholder?: string;
|
|
14
|
+
/** Additional class name */
|
|
15
|
+
className?: string;
|
|
16
|
+
/** Show attachment button */
|
|
17
|
+
showAttachments?: boolean;
|
|
18
|
+
/** Max attachments allowed */
|
|
19
|
+
maxAttachments?: number;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Chat input component with attachment support
|
|
23
|
+
*/
|
|
24
|
+
declare function ChatInput({
|
|
25
|
+
onSend,
|
|
26
|
+
disabled,
|
|
27
|
+
isLoading,
|
|
28
|
+
placeholder,
|
|
29
|
+
className,
|
|
30
|
+
showAttachments,
|
|
31
|
+
maxAttachments
|
|
32
|
+
}: ChatInputProps): react_jsx_runtime2.JSX.Element;
|
|
33
|
+
//#endregion
|
|
34
|
+
export { ChatInput };
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { ChatMessage } from "../../core/message-types.js";
|
|
2
|
+
import * as react_jsx_runtime1 from "react/jsx-runtime";
|
|
3
|
+
|
|
4
|
+
//#region src/presentation/components/ChatMessage.d.ts
|
|
5
|
+
interface ChatMessageProps {
|
|
6
|
+
message: ChatMessage;
|
|
7
|
+
className?: string;
|
|
8
|
+
/** Show copy button */
|
|
9
|
+
showCopy?: boolean;
|
|
10
|
+
/** Show avatar */
|
|
11
|
+
showAvatar?: boolean;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Chat message component
|
|
15
|
+
*/
|
|
16
|
+
declare function ChatMessage$1({
|
|
17
|
+
message,
|
|
18
|
+
className,
|
|
19
|
+
showCopy,
|
|
20
|
+
showAvatar
|
|
21
|
+
}: ChatMessageProps): react_jsx_runtime1.JSX.Element;
|
|
22
|
+
//#endregion
|
|
23
|
+
export { ChatMessage$1 as ChatMessage };
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import * as react_jsx_runtime0 from "react/jsx-runtime";
|
|
2
|
+
|
|
3
|
+
//#region src/presentation/components/CodePreview.d.ts
|
|
4
|
+
interface CodePreviewProps {
|
|
5
|
+
/** Code content */
|
|
6
|
+
code: string;
|
|
7
|
+
/** Programming language */
|
|
8
|
+
language?: string;
|
|
9
|
+
/** File name */
|
|
10
|
+
filename?: string;
|
|
11
|
+
/** Additional class name */
|
|
12
|
+
className?: string;
|
|
13
|
+
/** Show copy button */
|
|
14
|
+
showCopy?: boolean;
|
|
15
|
+
/** Show execute button (for applicable languages) */
|
|
16
|
+
showExecute?: boolean;
|
|
17
|
+
/** Called when execute is clicked */
|
|
18
|
+
onExecute?: (code: string) => void;
|
|
19
|
+
/** Show download button */
|
|
20
|
+
showDownload?: boolean;
|
|
21
|
+
/** Max height before scroll */
|
|
22
|
+
maxHeight?: number;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Code preview component with syntax highlighting placeholder
|
|
26
|
+
*/
|
|
27
|
+
declare function CodePreview({
|
|
28
|
+
code,
|
|
29
|
+
language,
|
|
30
|
+
filename,
|
|
31
|
+
className,
|
|
32
|
+
showCopy,
|
|
33
|
+
showExecute,
|
|
34
|
+
onExecute,
|
|
35
|
+
showDownload,
|
|
36
|
+
maxHeight
|
|
37
|
+
}: CodePreviewProps): react_jsx_runtime0.JSX.Element;
|
|
38
|
+
//#endregion
|
|
39
|
+
export { CodePreview };
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { WorkspaceSummary } from "../../context/workspace-context.js";
|
|
2
|
+
import * as react_jsx_runtime4 from "react/jsx-runtime";
|
|
3
|
+
|
|
4
|
+
//#region src/presentation/components/ContextIndicator.d.ts
|
|
5
|
+
interface ContextIndicatorProps {
|
|
6
|
+
/** Workspace summary */
|
|
7
|
+
summary?: WorkspaceSummary;
|
|
8
|
+
/** Whether context is active */
|
|
9
|
+
active?: boolean;
|
|
10
|
+
/** Additional class name */
|
|
11
|
+
className?: string;
|
|
12
|
+
/** Show details */
|
|
13
|
+
showDetails?: boolean;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Indicator showing active workspace context
|
|
17
|
+
*/
|
|
18
|
+
declare function ContextIndicator({
|
|
19
|
+
summary,
|
|
20
|
+
active,
|
|
21
|
+
className,
|
|
22
|
+
showDetails
|
|
23
|
+
}: ContextIndicatorProps): react_jsx_runtime4.JSX.Element;
|
|
24
|
+
//#endregion
|
|
25
|
+
export { ContextIndicator };
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { ProviderMode, ProviderName } from "@lssm/lib.ai-providers";
|
|
2
|
+
import * as react_jsx_runtime3 from "react/jsx-runtime";
|
|
3
|
+
|
|
4
|
+
//#region src/presentation/components/ModelPicker.d.ts
|
|
5
|
+
interface ModelSelection {
|
|
6
|
+
provider: ProviderName;
|
|
7
|
+
model: string;
|
|
8
|
+
mode: ProviderMode;
|
|
9
|
+
}
|
|
10
|
+
interface ModelPickerProps {
|
|
11
|
+
/** Currently selected provider/model */
|
|
12
|
+
value: ModelSelection;
|
|
13
|
+
/** Called when selection changes */
|
|
14
|
+
onChange: (value: ModelSelection) => void;
|
|
15
|
+
/** Available providers (with availability info) */
|
|
16
|
+
availableProviders?: Array<{
|
|
17
|
+
provider: ProviderName;
|
|
18
|
+
available: boolean;
|
|
19
|
+
mode: ProviderMode;
|
|
20
|
+
reason?: string;
|
|
21
|
+
}>;
|
|
22
|
+
/** Additional class name */
|
|
23
|
+
className?: string;
|
|
24
|
+
/** Compact mode (smaller) */
|
|
25
|
+
compact?: boolean;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Model picker component for selecting AI provider and model
|
|
29
|
+
*/
|
|
30
|
+
declare function ModelPicker({
|
|
31
|
+
value,
|
|
32
|
+
onChange,
|
|
33
|
+
availableProviders,
|
|
34
|
+
className,
|
|
35
|
+
compact
|
|
36
|
+
}: ModelPickerProps): react_jsx_runtime3.JSX.Element;
|
|
37
|
+
//#endregion
|
|
38
|
+
export { ModelPicker };
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { ChatContainer } from "./ChatContainer.js";
|
|
2
|
+
import { ChatMessage } from "./ChatMessage.js";
|
|
3
|
+
import { ChatInput } from "./ChatInput.js";
|
|
4
|
+
import { ModelPicker } from "./ModelPicker.js";
|
|
5
|
+
import { ContextIndicator } from "./ContextIndicator.js";
|
|
6
|
+
import { CodePreview } from "./CodePreview.js";
|
|
7
|
+
export { ChatContainer, ChatInput, ChatMessage, CodePreview, ContextIndicator, ModelPicker };
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { ChatAttachment, ChatConversation, ChatMessage } from "../../core/message-types.js";
|
|
2
|
+
import { ProviderMode, ProviderName } from "@lssm/lib.ai-providers";
|
|
3
|
+
|
|
4
|
+
//#region src/presentation/hooks/useChat.d.ts
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Options for useChat hook
|
|
8
|
+
*/
|
|
9
|
+
interface UseChatOptions {
|
|
10
|
+
/** Provider to use */
|
|
11
|
+
provider?: ProviderName;
|
|
12
|
+
/** Provider mode */
|
|
13
|
+
mode?: ProviderMode;
|
|
14
|
+
/** Model to use */
|
|
15
|
+
model?: string;
|
|
16
|
+
/** API key for BYOK mode */
|
|
17
|
+
apiKey?: string;
|
|
18
|
+
/** API proxy URL for managed mode */
|
|
19
|
+
proxyUrl?: string;
|
|
20
|
+
/** Initial conversation ID to resume */
|
|
21
|
+
conversationId?: string;
|
|
22
|
+
/** System prompt override */
|
|
23
|
+
systemPrompt?: string;
|
|
24
|
+
/** Enable streaming */
|
|
25
|
+
streaming?: boolean;
|
|
26
|
+
/** Called when a message is sent */
|
|
27
|
+
onSend?: (message: ChatMessage) => void;
|
|
28
|
+
/** Called when a response is received */
|
|
29
|
+
onResponse?: (message: ChatMessage) => void;
|
|
30
|
+
/** Called on error */
|
|
31
|
+
onError?: (error: Error) => void;
|
|
32
|
+
/** Called when usage is recorded */
|
|
33
|
+
onUsage?: (usage: {
|
|
34
|
+
inputTokens: number;
|
|
35
|
+
outputTokens: number;
|
|
36
|
+
}) => void;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Return type for useChat hook
|
|
40
|
+
*/
|
|
41
|
+
interface UseChatReturn {
|
|
42
|
+
/** Current messages */
|
|
43
|
+
messages: ChatMessage[];
|
|
44
|
+
/** Current conversation */
|
|
45
|
+
conversation: ChatConversation | null;
|
|
46
|
+
/** Whether currently loading/streaming */
|
|
47
|
+
isLoading: boolean;
|
|
48
|
+
/** Current error */
|
|
49
|
+
error: Error | null;
|
|
50
|
+
/** Send a message */
|
|
51
|
+
sendMessage: (content: string, attachments?: ChatAttachment[]) => Promise<void>;
|
|
52
|
+
/** Clear conversation and start fresh */
|
|
53
|
+
clearConversation: () => void;
|
|
54
|
+
/** Set conversation ID to resume */
|
|
55
|
+
setConversationId: (id: string | null) => void;
|
|
56
|
+
/** Regenerate last response */
|
|
57
|
+
regenerate: () => Promise<void>;
|
|
58
|
+
/** Stop current generation */
|
|
59
|
+
stop: () => void;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Hook for managing AI chat state
|
|
63
|
+
*/
|
|
64
|
+
declare function useChat(options?: UseChatOptions): UseChatReturn;
|
|
65
|
+
//#endregion
|
|
66
|
+
export { UseChatOptions, UseChatReturn, useChat };
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { ModelInfo, ProviderMode, ProviderName } from "@lssm/lib.ai-providers";
|
|
2
|
+
|
|
3
|
+
//#region src/presentation/hooks/useProviders.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Provider availability info
|
|
7
|
+
*/
|
|
8
|
+
interface ProviderInfo {
|
|
9
|
+
provider: ProviderName;
|
|
10
|
+
available: boolean;
|
|
11
|
+
mode: ProviderMode;
|
|
12
|
+
reason?: string;
|
|
13
|
+
models: ModelInfo[];
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Return type for useProviders hook
|
|
17
|
+
*/
|
|
18
|
+
interface UseProvidersReturn {
|
|
19
|
+
/** All providers with availability info */
|
|
20
|
+
providers: ProviderInfo[];
|
|
21
|
+
/** Available providers only */
|
|
22
|
+
availableProviders: ProviderInfo[];
|
|
23
|
+
/** Check if a provider is available */
|
|
24
|
+
isAvailable: (provider: ProviderName) => boolean;
|
|
25
|
+
/** Get models for a provider */
|
|
26
|
+
getModels: (provider: ProviderName) => ModelInfo[];
|
|
27
|
+
/** Loading state */
|
|
28
|
+
isLoading: boolean;
|
|
29
|
+
/** Refresh provider availability */
|
|
30
|
+
refresh: () => Promise<void>;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Hook for managing AI provider information
|
|
34
|
+
*/
|
|
35
|
+
declare function useProviders(): UseProvidersReturn;
|
|
36
|
+
//#endregion
|
|
37
|
+
export { UseProvidersReturn, useProviders };
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { ChatContainer } from "./components/ChatContainer.js";
|
|
2
|
+
import { ChatMessage } from "./components/ChatMessage.js";
|
|
3
|
+
import { ChatInput } from "./components/ChatInput.js";
|
|
4
|
+
import { ModelPicker } from "./components/ModelPicker.js";
|
|
5
|
+
import { ContextIndicator } from "./components/ContextIndicator.js";
|
|
6
|
+
import { CodePreview } from "./components/CodePreview.js";
|
|
7
|
+
import "./components/index.js";
|
|
8
|
+
import { UseChatOptions, UseChatReturn, useChat } from "./hooks/useChat.js";
|
|
9
|
+
import { UseProvidersReturn, useProviders } from "./hooks/useProviders.js";
|
|
10
|
+
import "./hooks/index.js";
|
|
11
|
+
export { ChatContainer, ChatInput, ChatMessage, CodePreview, ContextIndicator, ModelPicker, UseChatOptions, UseChatReturn, UseProvidersReturn, useChat, useProviders };
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { ProviderName } from "@lssm/lib.ai-providers";
|
|
2
|
+
|
|
3
|
+
//#region src/providers/chat-utilities.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Check if a provider supports local mode
|
|
7
|
+
*/
|
|
8
|
+
declare function supportsLocalMode(provider: ProviderName): boolean;
|
|
9
|
+
/**
|
|
10
|
+
* Check if a provider is available in Studio (cloud only)
|
|
11
|
+
*/
|
|
12
|
+
declare function isStudioAvailable(provider: ProviderName): boolean;
|
|
13
|
+
//#endregion
|
|
14
|
+
export { isStudioAvailable, supportsLocalMode };
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import { isStudioAvailable, supportsLocalMode } from "./chat-utilities.js";
|
|
2
|
+
import { DEFAULT_MODELS, MODELS, ModelCapabilities, ModelInfo as ChatModelInfo, Provider as ChatProvider, ProviderAvailability, ProviderConfig as ChatProviderConfig, ProviderMode as ChatProviderMode, ProviderName as ChatProviderName, createProvider, createProviderFromEnv, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "@lssm/lib.ai-providers";
|
|
3
|
+
export { type ChatModelInfo, type ChatProvider, type ChatProviderConfig, type ChatProviderMode, type ChatProviderName, DEFAULT_MODELS, MODELS, type ModelCapabilities, type ProviderAvailability, createProvider, createProviderFromEnv, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, isStudioAvailable, listOllamaModels, supportsLocalMode, validateProvider };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lssm/module.ai-chat",
|
|
3
|
-
"version": "0.0.0-canary-
|
|
3
|
+
"version": "0.0.0-canary-20251217060433",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"module": "./dist/index.js",
|
|
@@ -23,13 +23,13 @@
|
|
|
23
23
|
"test": "bun test"
|
|
24
24
|
},
|
|
25
25
|
"dependencies": {
|
|
26
|
-
"@lssm/lib.ai-agent": "0.0.0-canary-
|
|
27
|
-
"@lssm/lib.ai-providers": "0.0.0-canary-
|
|
28
|
-
"@lssm/lib.contracts": "0.0.0-canary-
|
|
29
|
-
"@lssm/lib.metering": "0.0.0-canary-
|
|
30
|
-
"@lssm/lib.cost-tracking": "0.0.0-canary-
|
|
31
|
-
"@lssm/lib.design-system": "0.0.0-canary-
|
|
32
|
-
"@lssm/lib.ui-kit-web": "0.0.0-canary-
|
|
26
|
+
"@lssm/lib.ai-agent": "0.0.0-canary-20251217060433",
|
|
27
|
+
"@lssm/lib.ai-providers": "0.0.0-canary-20251217060433",
|
|
28
|
+
"@lssm/lib.contracts": "0.0.0-canary-20251217060433",
|
|
29
|
+
"@lssm/lib.metering": "0.0.0-canary-20251217060433",
|
|
30
|
+
"@lssm/lib.cost-tracking": "0.0.0-canary-20251217060433",
|
|
31
|
+
"@lssm/lib.design-system": "0.0.0-canary-20251217060433",
|
|
32
|
+
"@lssm/lib.ui-kit-web": "0.0.0-canary-20251217060433",
|
|
33
33
|
"@ai-sdk/react": "beta",
|
|
34
34
|
"ai": "beta",
|
|
35
35
|
"lucide-react": "^0.535.0",
|
|
@@ -37,8 +37,8 @@
|
|
|
37
37
|
"zod": "^4.1.13"
|
|
38
38
|
},
|
|
39
39
|
"devDependencies": {
|
|
40
|
-
"@lssm/tool.tsdown": "0.0.0-canary-
|
|
41
|
-
"@lssm/tool.typescript": "0.0.0-canary-
|
|
40
|
+
"@lssm/tool.tsdown": "0.0.0-canary-20251217060433",
|
|
41
|
+
"@lssm/tool.typescript": "0.0.0-canary-20251217060433",
|
|
42
42
|
"@types/react": "^19.0.14",
|
|
43
43
|
"tsdown": "^0.17.4",
|
|
44
44
|
"typescript": "^5.9.3"
|
|
@@ -47,13 +47,13 @@
|
|
|
47
47
|
"react": ">=18.0.0"
|
|
48
48
|
},
|
|
49
49
|
"exports": {
|
|
50
|
-
".": "./
|
|
51
|
-
"./context": "./
|
|
52
|
-
"./core": "./
|
|
53
|
-
"./presentation": "./
|
|
54
|
-
"./presentation/components": "./
|
|
55
|
-
"./presentation/hooks": "./
|
|
56
|
-
"./providers": "./
|
|
50
|
+
".": "./dist/index.js",
|
|
51
|
+
"./context": "./dist/context/index.js",
|
|
52
|
+
"./core": "./dist/core/index.js",
|
|
53
|
+
"./presentation": "./dist/presentation/index.js",
|
|
54
|
+
"./presentation/components": "./dist/presentation/components/index.js",
|
|
55
|
+
"./presentation/hooks": "./dist/presentation/hooks/index.js",
|
|
56
|
+
"./providers": "./dist/providers/index.js",
|
|
57
57
|
"./*": "./*"
|
|
58
58
|
},
|
|
59
59
|
"publishConfig": {
|