@lssm/module.ai-chat 0.0.0-canary-20251217083314 → 1.41.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-chat.feature.js +1 -93
- package/dist/context/context-builder.js +2 -147
- package/dist/context/file-operations.js +1 -174
- package/dist/context/index.js +1 -5
- package/dist/context/workspace-context.js +2 -123
- package/dist/core/chat-service.js +2 -211
- package/dist/core/conversation-store.js +1 -108
- package/dist/core/index.js +1 -4
- package/dist/index.js +1 -22
- package/dist/presentation/components/ChatContainer.js +1 -62
- package/dist/presentation/components/ChatInput.js +1 -149
- package/dist/presentation/components/ChatMessage.js +1 -135
- package/dist/presentation/components/CodePreview.js +2 -126
- package/dist/presentation/components/ContextIndicator.js +1 -96
- package/dist/presentation/components/ModelPicker.js +1 -197
- package/dist/presentation/components/index.js +1 -8
- package/dist/presentation/hooks/index.js +1 -4
- package/dist/presentation/hooks/useChat.js +1 -171
- package/dist/presentation/hooks/useProviders.js +1 -42
- package/dist/presentation/index.js +1 -12
- package/dist/providers/chat-utilities.js +1 -16
- package/dist/providers/index.js +1 -7
- package/package.json +17 -18
- package/dist/ai-chat.feature.d.ts +0 -11
- package/dist/context/context-builder.d.ts +0 -56
- package/dist/context/file-operations.d.ts +0 -99
- package/dist/context/index.d.ts +0 -4
- package/dist/context/workspace-context.d.ts +0 -116
- package/dist/core/chat-service.d.ts +0 -72
- package/dist/core/conversation-store.d.ts +0 -73
- package/dist/core/index.d.ts +0 -4
- package/dist/core/message-types.d.ts +0 -149
- package/dist/index.d.ts +0 -16
- package/dist/libs/ai-providers/dist/factory.js +0 -225
- package/dist/libs/ai-providers/dist/index.js +0 -4
- package/dist/libs/ai-providers/dist/legacy.js +0 -2
- package/dist/libs/ai-providers/dist/models.js +0 -299
- package/dist/libs/ai-providers/dist/validation.js +0 -60
- package/dist/libs/design-system/dist/_virtual/rolldown_runtime.js +0 -5
- package/dist/libs/design-system/dist/components/atoms/Button.js +0 -33
- package/dist/libs/design-system/dist/components/atoms/Textarea.js +0 -35
- package/dist/libs/design-system/dist/lib/keyboard.js +0 -193
- package/dist/libs/design-system/dist/ui-kit-web/dist/ui/button.js +0 -55
- package/dist/libs/design-system/dist/ui-kit-web/dist/ui/textarea.js +0 -16
- package/dist/libs/design-system/dist/ui-kit-web/dist/ui-kit-core/dist/utils.js +0 -13
- package/dist/libs/ui-kit-web/dist/ui/avatar.js +0 -25
- package/dist/libs/ui-kit-web/dist/ui/badge.js +0 -26
- package/dist/libs/ui-kit-web/dist/ui/scroll-area.js +0 -39
- package/dist/libs/ui-kit-web/dist/ui/select.js +0 -79
- package/dist/libs/ui-kit-web/dist/ui/skeleton.js +0 -14
- package/dist/libs/ui-kit-web/dist/ui/tooltip.js +0 -39
- package/dist/libs/ui-kit-web/dist/ui/utils.js +0 -10
- package/dist/libs/ui-kit-web/dist/ui-kit-core/dist/utils.js +0 -10
- package/dist/presentation/components/ChatContainer.d.ts +0 -20
- package/dist/presentation/components/ChatInput.d.ts +0 -34
- package/dist/presentation/components/ChatMessage.d.ts +0 -23
- package/dist/presentation/components/CodePreview.d.ts +0 -39
- package/dist/presentation/components/ContextIndicator.d.ts +0 -25
- package/dist/presentation/components/ModelPicker.d.ts +0 -38
- package/dist/presentation/components/index.d.ts +0 -7
- package/dist/presentation/hooks/index.d.ts +0 -3
- package/dist/presentation/hooks/useChat.d.ts +0 -66
- package/dist/presentation/hooks/useProviders.d.ts +0 -37
- package/dist/presentation/index.d.ts +0 -10
- package/dist/providers/chat-utilities.d.ts +0 -14
- package/dist/providers/index.d.ts +0 -3
|
@@ -1,171 +1 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
import { ChatService } from "../../core/chat-service.js";
|
|
4
|
-
import { createProvider } from "../../libs/ai-providers/dist/factory.js";
|
|
5
|
-
import "../../libs/ai-providers/dist/index.js";
|
|
6
|
-
import * as React from "react";
|
|
7
|
-
|
|
8
|
-
//#region src/presentation/hooks/useChat.tsx
|
|
9
|
-
/**
|
|
10
|
-
* Hook for managing AI chat state
|
|
11
|
-
*/
|
|
12
|
-
function useChat(options = {}) {
|
|
13
|
-
const { provider = "openai", mode = "byok", model, apiKey, proxyUrl, conversationId: initialConversationId, systemPrompt, streaming = true, onSend, onResponse, onError, onUsage } = options;
|
|
14
|
-
const [messages, setMessages] = React.useState([]);
|
|
15
|
-
const [conversation, setConversation] = React.useState(null);
|
|
16
|
-
const [isLoading, setIsLoading] = React.useState(false);
|
|
17
|
-
const [error, setError] = React.useState(null);
|
|
18
|
-
const [conversationId, setConversationId] = React.useState(initialConversationId ?? null);
|
|
19
|
-
const abortControllerRef = React.useRef(null);
|
|
20
|
-
const chatServiceRef = React.useRef(null);
|
|
21
|
-
React.useEffect(() => {
|
|
22
|
-
chatServiceRef.current = new ChatService({
|
|
23
|
-
provider: createProvider({
|
|
24
|
-
provider,
|
|
25
|
-
model,
|
|
26
|
-
apiKey,
|
|
27
|
-
proxyUrl
|
|
28
|
-
}),
|
|
29
|
-
systemPrompt,
|
|
30
|
-
onUsage
|
|
31
|
-
});
|
|
32
|
-
}, [
|
|
33
|
-
provider,
|
|
34
|
-
mode,
|
|
35
|
-
model,
|
|
36
|
-
apiKey,
|
|
37
|
-
proxyUrl,
|
|
38
|
-
systemPrompt,
|
|
39
|
-
onUsage
|
|
40
|
-
]);
|
|
41
|
-
React.useEffect(() => {
|
|
42
|
-
if (!conversationId || !chatServiceRef.current) return;
|
|
43
|
-
const loadConversation = async () => {
|
|
44
|
-
const conv = await chatServiceRef.current.getConversation(conversationId);
|
|
45
|
-
if (conv) {
|
|
46
|
-
setConversation(conv);
|
|
47
|
-
setMessages(conv.messages);
|
|
48
|
-
}
|
|
49
|
-
};
|
|
50
|
-
loadConversation().catch(console.error);
|
|
51
|
-
}, [conversationId]);
|
|
52
|
-
const sendMessage = React.useCallback(async (content, attachments) => {
|
|
53
|
-
if (!chatServiceRef.current) throw new Error("Chat service not initialized");
|
|
54
|
-
setIsLoading(true);
|
|
55
|
-
setError(null);
|
|
56
|
-
abortControllerRef.current = new AbortController();
|
|
57
|
-
try {
|
|
58
|
-
const userMessage = {
|
|
59
|
-
id: `msg_${Date.now()}`,
|
|
60
|
-
conversationId: conversationId ?? "",
|
|
61
|
-
role: "user",
|
|
62
|
-
content,
|
|
63
|
-
status: "completed",
|
|
64
|
-
createdAt: /* @__PURE__ */ new Date(),
|
|
65
|
-
updatedAt: /* @__PURE__ */ new Date(),
|
|
66
|
-
attachments
|
|
67
|
-
};
|
|
68
|
-
setMessages((prev) => [...prev, userMessage]);
|
|
69
|
-
onSend?.(userMessage);
|
|
70
|
-
if (streaming) {
|
|
71
|
-
const result = await chatServiceRef.current.stream({
|
|
72
|
-
conversationId: conversationId ?? void 0,
|
|
73
|
-
content,
|
|
74
|
-
attachments
|
|
75
|
-
});
|
|
76
|
-
if (!conversationId) setConversationId(result.conversationId);
|
|
77
|
-
const assistantMessage = {
|
|
78
|
-
id: result.messageId,
|
|
79
|
-
conversationId: result.conversationId,
|
|
80
|
-
role: "assistant",
|
|
81
|
-
content: "",
|
|
82
|
-
status: "streaming",
|
|
83
|
-
createdAt: /* @__PURE__ */ new Date(),
|
|
84
|
-
updatedAt: /* @__PURE__ */ new Date()
|
|
85
|
-
};
|
|
86
|
-
setMessages((prev) => [...prev, assistantMessage]);
|
|
87
|
-
let fullContent = "";
|
|
88
|
-
for await (const chunk of result.stream) if (chunk.type === "text" && chunk.content) {
|
|
89
|
-
fullContent += chunk.content;
|
|
90
|
-
setMessages((prev) => prev.map((m) => m.id === result.messageId ? {
|
|
91
|
-
...m,
|
|
92
|
-
content: fullContent
|
|
93
|
-
} : m));
|
|
94
|
-
} else if (chunk.type === "done") {
|
|
95
|
-
setMessages((prev) => prev.map((m) => m.id === result.messageId ? {
|
|
96
|
-
...m,
|
|
97
|
-
status: "completed",
|
|
98
|
-
usage: chunk.usage,
|
|
99
|
-
updatedAt: /* @__PURE__ */ new Date()
|
|
100
|
-
} : m));
|
|
101
|
-
onResponse?.(messages.find((m) => m.id === result.messageId) ?? assistantMessage);
|
|
102
|
-
} else if (chunk.type === "error") {
|
|
103
|
-
setMessages((prev) => prev.map((m) => m.id === result.messageId ? {
|
|
104
|
-
...m,
|
|
105
|
-
status: "error",
|
|
106
|
-
error: chunk.error,
|
|
107
|
-
updatedAt: /* @__PURE__ */ new Date()
|
|
108
|
-
} : m));
|
|
109
|
-
if (chunk.error) {
|
|
110
|
-
const err = new Error(chunk.error.message);
|
|
111
|
-
setError(err);
|
|
112
|
-
onError?.(err);
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
} else {
|
|
116
|
-
const result = await chatServiceRef.current.send({
|
|
117
|
-
conversationId: conversationId ?? void 0,
|
|
118
|
-
content,
|
|
119
|
-
attachments
|
|
120
|
-
});
|
|
121
|
-
setConversation(result.conversation);
|
|
122
|
-
setMessages(result.conversation.messages);
|
|
123
|
-
if (!conversationId) setConversationId(result.conversation.id);
|
|
124
|
-
onResponse?.(result.message);
|
|
125
|
-
}
|
|
126
|
-
} catch (err) {
|
|
127
|
-
const error$1 = err instanceof Error ? err : new Error(String(err));
|
|
128
|
-
setError(error$1);
|
|
129
|
-
onError?.(error$1);
|
|
130
|
-
} finally {
|
|
131
|
-
setIsLoading(false);
|
|
132
|
-
abortControllerRef.current = null;
|
|
133
|
-
}
|
|
134
|
-
}, [
|
|
135
|
-
conversationId,
|
|
136
|
-
streaming,
|
|
137
|
-
onSend,
|
|
138
|
-
onResponse,
|
|
139
|
-
onError,
|
|
140
|
-
messages
|
|
141
|
-
]);
|
|
142
|
-
return {
|
|
143
|
-
messages,
|
|
144
|
-
conversation,
|
|
145
|
-
isLoading,
|
|
146
|
-
error,
|
|
147
|
-
sendMessage,
|
|
148
|
-
clearConversation: React.useCallback(() => {
|
|
149
|
-
setMessages([]);
|
|
150
|
-
setConversation(null);
|
|
151
|
-
setConversationId(null);
|
|
152
|
-
setError(null);
|
|
153
|
-
}, []),
|
|
154
|
-
setConversationId,
|
|
155
|
-
regenerate: React.useCallback(async () => {
|
|
156
|
-
const lastUserMessageIndex = messages.findLastIndex((m) => m.role === "user");
|
|
157
|
-
if (lastUserMessageIndex === -1) return;
|
|
158
|
-
const lastUserMessage = messages[lastUserMessageIndex];
|
|
159
|
-
if (!lastUserMessage) return;
|
|
160
|
-
setMessages((prev) => prev.slice(0, lastUserMessageIndex + 1));
|
|
161
|
-
await sendMessage(lastUserMessage.content, lastUserMessage.attachments);
|
|
162
|
-
}, [messages, sendMessage]),
|
|
163
|
-
stop: React.useCallback(() => {
|
|
164
|
-
abortControllerRef.current?.abort();
|
|
165
|
-
setIsLoading(false);
|
|
166
|
-
}, [])
|
|
167
|
-
};
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
//#endregion
|
|
171
|
-
export { useChat };
|
|
1
|
+
"use client";import{ChatService as e}from"../../core/chat-service.js";import{createProvider as t}from"@lssm/lib.ai-providers";import*as n from"react";function r(r={}){let{provider:i=`openai`,mode:a=`byok`,model:o,apiKey:s,proxyUrl:c,conversationId:l,systemPrompt:u,streaming:d=!0,onSend:f,onResponse:p,onError:m,onUsage:h}=r,[g,_]=n.useState([]),[v,y]=n.useState(null),[b,x]=n.useState(!1),[S,C]=n.useState(null),[w,T]=n.useState(l??null),E=n.useRef(null),D=n.useRef(null);n.useEffect(()=>{D.current=new e({provider:t({provider:i,model:o,apiKey:s,proxyUrl:c}),systemPrompt:u,onUsage:h})},[i,a,o,s,c,u,h]),n.useEffect(()=>{!w||!D.current||(async()=>{let e=await D.current.getConversation(w);e&&(y(e),_(e.messages))})().catch(console.error)},[w]);let O=n.useCallback(async(e,t)=>{if(!D.current)throw Error(`Chat service not initialized`);x(!0),C(null),E.current=new AbortController;try{let n={id:`msg_${Date.now()}`,conversationId:w??``,role:`user`,content:e,status:`completed`,createdAt:new Date,updatedAt:new Date,attachments:t};if(_(e=>[...e,n]),f?.(n),d){let n=await D.current.stream({conversationId:w??void 0,content:e,attachments:t});w||T(n.conversationId);let r={id:n.messageId,conversationId:n.conversationId,role:`assistant`,content:``,status:`streaming`,createdAt:new Date,updatedAt:new Date};_(e=>[...e,r]);let i=``;for await(let e of n.stream)if(e.type===`text`&&e.content)i+=e.content,_(e=>e.map(e=>e.id===n.messageId?{...e,content:i}:e));else if(e.type===`done`)_(t=>t.map(t=>t.id===n.messageId?{...t,status:`completed`,usage:e.usage,updatedAt:new Date}:t)),p?.(g.find(e=>e.id===n.messageId)??r);else if(e.type===`error`&&(_(t=>t.map(t=>t.id===n.messageId?{...t,status:`error`,error:e.error,updatedAt:new Date}:t)),e.error)){let t=Error(e.error.message);C(t),m?.(t)}}else{let n=await D.current.send({conversationId:w??void 0,content:e,attachments:t});y(n.conversation),_(n.conversation.messages),w||T(n.conversation.id),p?.(n.message)}}catch(e){let t=e instanceof Error?e:Error(String(e));C(t),m?.(t)}finally{x(!1),E.current=null}},[w,d,f,p,m,g]);return{messages:g,conversation:v,isLoading:b,error:S,sendMessage:O,clearConversation:n.useCallback(()=>{_([]),y(null),T(null),C(null)},[]),setConversationId:T,regenerate:n.useCallback(async()=>{let e=g.findLastIndex(e=>e.role===`user`);if(e===-1)return;let t=g[e];t&&(_(t=>t.slice(0,e+1)),await O(t.content,t.attachments))},[g,O]),stop:n.useCallback(()=>{E.current?.abort(),x(!1)},[])}}export{r as useChat};
|
|
@@ -1,42 +1 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
import { getModelsForProvider } from "../../libs/ai-providers/dist/models.js";
|
|
4
|
-
import { getAvailableProviders } from "../../libs/ai-providers/dist/factory.js";
|
|
5
|
-
import "../../libs/ai-providers/dist/index.js";
|
|
6
|
-
import * as React from "react";
|
|
7
|
-
|
|
8
|
-
//#region src/presentation/hooks/useProviders.tsx
|
|
9
|
-
/**
|
|
10
|
-
* Hook for managing AI provider information
|
|
11
|
-
*/
|
|
12
|
-
function useProviders() {
|
|
13
|
-
const [providers, setProviders] = React.useState([]);
|
|
14
|
-
const [isLoading, setIsLoading] = React.useState(true);
|
|
15
|
-
const loadProviders = React.useCallback(async () => {
|
|
16
|
-
setIsLoading(true);
|
|
17
|
-
try {
|
|
18
|
-
setProviders(getAvailableProviders().map((p) => ({
|
|
19
|
-
...p,
|
|
20
|
-
models: getModelsForProvider(p.provider)
|
|
21
|
-
})));
|
|
22
|
-
} catch (error) {
|
|
23
|
-
console.error("Failed to load providers:", error);
|
|
24
|
-
} finally {
|
|
25
|
-
setIsLoading(false);
|
|
26
|
-
}
|
|
27
|
-
}, []);
|
|
28
|
-
React.useEffect(() => {
|
|
29
|
-
loadProviders();
|
|
30
|
-
}, [loadProviders]);
|
|
31
|
-
return {
|
|
32
|
-
providers,
|
|
33
|
-
availableProviders: React.useMemo(() => providers.filter((p) => p.available), [providers]),
|
|
34
|
-
isAvailable: React.useCallback((provider) => providers.some((p) => p.provider === provider && p.available), [providers]),
|
|
35
|
-
getModels: React.useCallback((provider) => providers.find((p) => p.provider === provider)?.models ?? [], [providers]),
|
|
36
|
-
isLoading,
|
|
37
|
-
refresh: loadProviders
|
|
38
|
-
};
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
//#endregion
|
|
42
|
-
export { useProviders };
|
|
1
|
+
"use client";import{getAvailableProviders as e,getModelsForProvider as t}from"@lssm/lib.ai-providers";import*as n from"react";function r(){let[r,i]=n.useState([]),[a,o]=n.useState(!0),s=n.useCallback(async()=>{o(!0);try{i(e().map(e=>({...e,models:t(e.provider)})))}catch(e){console.error(`Failed to load providers:`,e)}finally{o(!1)}},[]);return n.useEffect(()=>{s()},[s]),{providers:r,availableProviders:n.useMemo(()=>r.filter(e=>e.available),[r]),isAvailable:n.useCallback(e=>r.some(t=>t.provider===e&&t.available),[r]),getModels:n.useCallback(e=>r.find(t=>t.provider===e)?.models??[],[r]),isLoading:a,refresh:s}}export{r as useProviders};
|
|
@@ -1,12 +1 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { CodePreview } from "./components/CodePreview.js";
|
|
3
|
-
import { ChatMessage } from "./components/ChatMessage.js";
|
|
4
|
-
import { ChatInput } from "./components/ChatInput.js";
|
|
5
|
-
import { ModelPicker } from "./components/ModelPicker.js";
|
|
6
|
-
import { ContextIndicator } from "./components/ContextIndicator.js";
|
|
7
|
-
import "./components/index.js";
|
|
8
|
-
import { useChat } from "./hooks/useChat.js";
|
|
9
|
-
import { useProviders } from "./hooks/useProviders.js";
|
|
10
|
-
import "./hooks/index.js";
|
|
11
|
-
|
|
12
|
-
export { ChatContainer, ChatInput, ChatMessage, CodePreview, ContextIndicator, ModelPicker, useChat, useProviders };
|
|
1
|
+
import{ChatContainer as e}from"./components/ChatContainer.js";import{CodePreview as t}from"./components/CodePreview.js";import{ChatMessage as n}from"./components/ChatMessage.js";import{ChatInput as r}from"./components/ChatInput.js";import{ModelPicker as i}from"./components/ModelPicker.js";import{ContextIndicator as a}from"./components/ContextIndicator.js";import"./components/index.js";import{useChat as o}from"./hooks/useChat.js";import{useProviders as s}from"./hooks/useProviders.js";import"./hooks/index.js";export{e as ChatContainer,r as ChatInput,n as ChatMessage,t as CodePreview,a as ContextIndicator,i as ModelPicker,o as useChat,s as useProviders};
|
|
@@ -1,16 +1 @@
|
|
|
1
|
-
|
|
2
|
-
/**
|
|
3
|
-
* Check if a provider supports local mode
|
|
4
|
-
*/
|
|
5
|
-
function supportsLocalMode(provider) {
|
|
6
|
-
return provider === "ollama";
|
|
7
|
-
}
|
|
8
|
-
/**
|
|
9
|
-
* Check if a provider is available in Studio (cloud only)
|
|
10
|
-
*/
|
|
11
|
-
function isStudioAvailable(provider) {
|
|
12
|
-
return provider !== "ollama";
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
//#endregion
|
|
16
|
-
export { isStudioAvailable, supportsLocalMode };
|
|
1
|
+
function e(e){return e===`ollama`}function t(e){return e!==`ollama`}export{t as isStudioAvailable,e as supportsLocalMode};
|
package/dist/providers/index.js
CHANGED
|
@@ -1,7 +1 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { createProvider, createProviderFromEnv, getAvailableProviders } from "../libs/ai-providers/dist/factory.js";
|
|
3
|
-
import { getEnvVarName, hasCredentials, isOllamaRunning, listOllamaModels, validateProvider } from "../libs/ai-providers/dist/validation.js";
|
|
4
|
-
import "../libs/ai-providers/dist/index.js";
|
|
5
|
-
import { isStudioAvailable, supportsLocalMode } from "./chat-utilities.js";
|
|
6
|
-
|
|
7
|
-
export { DEFAULT_MODELS, MODELS, createProvider, createProviderFromEnv, getAvailableProviders, getDefaultModel, getEnvVarName, getModelInfo, getModelsForProvider, getRecommendedModels, hasCredentials, isOllamaRunning, isStudioAvailable, listOllamaModels, supportsLocalMode, validateProvider };
|
|
1
|
+
import{isStudioAvailable as e,supportsLocalMode as t}from"./chat-utilities.js";import{DEFAULT_MODELS as n,MODELS as r,createProvider as i,createProviderFromEnv as a,getAvailableProviders as o,getDefaultModel as s,getEnvVarName as c,getModelInfo as l,getModelsForProvider as u,getRecommendedModels as d,hasCredentials as f,isOllamaRunning as p,listOllamaModels as m,validateProvider as h}from"@lssm/lib.ai-providers";export{n as DEFAULT_MODELS,r as MODELS,i as createProvider,a as createProviderFromEnv,o as getAvailableProviders,s as getDefaultModel,c as getEnvVarName,l as getModelInfo,u as getModelsForProvider,d as getRecommendedModels,f as hasCredentials,p as isOllamaRunning,e as isStudioAvailable,m as listOllamaModels,t as supportsLocalMode,h as validateProvider};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lssm/module.ai-chat",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "1.41.0",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"module": "./dist/index.js",
|
|
@@ -11,7 +11,6 @@
|
|
|
11
11
|
],
|
|
12
12
|
"scripts": {
|
|
13
13
|
"publish:pkg": "bun publish --tolerate-republish --ignore-scripts --verbose",
|
|
14
|
-
"publish:pkg:canary": "bun publish:pkg --tag canary",
|
|
15
14
|
"build": "bun build:bundle && bun build:types",
|
|
16
15
|
"build:bundle": "tsdown",
|
|
17
16
|
"build:types": "tsc --noEmit",
|
|
@@ -23,13 +22,13 @@
|
|
|
23
22
|
"test": "bun test"
|
|
24
23
|
},
|
|
25
24
|
"dependencies": {
|
|
26
|
-
"@lssm/lib.ai-agent": "
|
|
27
|
-
"@lssm/lib.ai-providers": "
|
|
28
|
-
"@lssm/lib.contracts": "
|
|
29
|
-
"@lssm/lib.metering": "
|
|
30
|
-
"@lssm/lib.cost-tracking": "
|
|
31
|
-
"@lssm/lib.design-system": "
|
|
32
|
-
"@lssm/lib.ui-kit-web": "
|
|
25
|
+
"@lssm/lib.ai-agent": "workspace:*",
|
|
26
|
+
"@lssm/lib.ai-providers": "workspace:*",
|
|
27
|
+
"@lssm/lib.contracts": "workspace:*",
|
|
28
|
+
"@lssm/lib.metering": "workspace:*",
|
|
29
|
+
"@lssm/lib.cost-tracking": "workspace:*",
|
|
30
|
+
"@lssm/lib.design-system": "workspace:*",
|
|
31
|
+
"@lssm/lib.ui-kit-web": "workspace:*",
|
|
33
32
|
"@ai-sdk/react": "beta",
|
|
34
33
|
"ai": "beta",
|
|
35
34
|
"lucide-react": "^0.535.0",
|
|
@@ -37,8 +36,8 @@
|
|
|
37
36
|
"zod": "^4.1.13"
|
|
38
37
|
},
|
|
39
38
|
"devDependencies": {
|
|
40
|
-
"@lssm/tool.tsdown": "
|
|
41
|
-
"@lssm/tool.typescript": "
|
|
39
|
+
"@lssm/tool.tsdown": "workspace:*",
|
|
40
|
+
"@lssm/tool.typescript": "workspace:*",
|
|
42
41
|
"@types/react": "^19.0.14",
|
|
43
42
|
"tsdown": "^0.17.4",
|
|
44
43
|
"typescript": "^5.9.3"
|
|
@@ -47,13 +46,13 @@
|
|
|
47
46
|
"react": ">=18.0.0"
|
|
48
47
|
},
|
|
49
48
|
"exports": {
|
|
50
|
-
".": "./
|
|
51
|
-
"./context": "./
|
|
52
|
-
"./core": "./
|
|
53
|
-
"./presentation": "./
|
|
54
|
-
"./presentation/components": "./
|
|
55
|
-
"./presentation/hooks": "./
|
|
56
|
-
"./providers": "./
|
|
49
|
+
".": "./src/index.ts",
|
|
50
|
+
"./context": "./src/context/index.ts",
|
|
51
|
+
"./core": "./src/core/index.ts",
|
|
52
|
+
"./presentation": "./src/presentation/index.ts",
|
|
53
|
+
"./presentation/components": "./src/presentation/components/index.ts",
|
|
54
|
+
"./presentation/hooks": "./src/presentation/hooks/index.ts",
|
|
55
|
+
"./providers": "./src/providers/index.ts",
|
|
57
56
|
"./*": "./*"
|
|
58
57
|
},
|
|
59
58
|
"publishConfig": {
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import { FeatureModuleSpec } from "@lssm/lib.contracts";
|
|
2
|
-
|
|
3
|
-
//#region src/ai-chat.feature.d.ts
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* AI Chat feature module that bundles conversational AI assistance
|
|
7
|
-
* for ContractSpec development across CLI, VSCode, and Studio.
|
|
8
|
-
*/
|
|
9
|
-
declare const AiChatFeature: FeatureModuleSpec;
|
|
10
|
-
//#endregion
|
|
11
|
-
export { AiChatFeature };
|
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
import { WorkspaceContext } from "./workspace-context.js";
|
|
2
|
-
|
|
3
|
-
//#region src/context/context-builder.d.ts
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Context entry for a file or spec
|
|
7
|
-
*/
|
|
8
|
-
interface ContextEntry {
|
|
9
|
-
type: 'spec' | 'file' | 'reference';
|
|
10
|
-
path: string;
|
|
11
|
-
content?: string;
|
|
12
|
-
summary?: string;
|
|
13
|
-
relevance: number;
|
|
14
|
-
}
|
|
15
|
-
/**
|
|
16
|
-
* Built context for LLM
|
|
17
|
-
*/
|
|
18
|
-
interface BuiltContext {
|
|
19
|
-
entries: ContextEntry[];
|
|
20
|
-
summary: string;
|
|
21
|
-
totalTokensEstimate: number;
|
|
22
|
-
}
|
|
23
|
-
/**
|
|
24
|
-
* Options for building context
|
|
25
|
-
*/
|
|
26
|
-
interface ContextBuilderOptions {
|
|
27
|
-
/** Maximum estimated tokens for context */
|
|
28
|
-
maxTokens?: number;
|
|
29
|
-
/** Query to use for relevance scoring */
|
|
30
|
-
query?: string;
|
|
31
|
-
/** Specific files to include */
|
|
32
|
-
includeFiles?: string[];
|
|
33
|
-
/** Specific specs to include */
|
|
34
|
-
includeSpecs?: string[];
|
|
35
|
-
}
|
|
36
|
-
/**
|
|
37
|
-
* Context builder for creating rich LLM context
|
|
38
|
-
*/
|
|
39
|
-
declare class ContextBuilder {
|
|
40
|
-
private readonly context;
|
|
41
|
-
constructor(context: WorkspaceContext);
|
|
42
|
-
/**
|
|
43
|
-
* Build context for a chat message
|
|
44
|
-
*/
|
|
45
|
-
build(options?: ContextBuilderOptions): BuiltContext;
|
|
46
|
-
/**
|
|
47
|
-
* Build a text summary of the context entries
|
|
48
|
-
*/
|
|
49
|
-
private buildSummary;
|
|
50
|
-
}
|
|
51
|
-
/**
|
|
52
|
-
* Create a context builder
|
|
53
|
-
*/
|
|
54
|
-
declare function createContextBuilder(context: WorkspaceContext): ContextBuilder;
|
|
55
|
-
//#endregion
|
|
56
|
-
export { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, createContextBuilder };
|
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
//#region src/context/file-operations.d.ts
|
|
2
|
-
/**
|
|
3
|
-
* File operations for workspace context
|
|
4
|
-
*
|
|
5
|
-
* Provides read/write operations for files in the workspace.
|
|
6
|
-
*/
|
|
7
|
-
/**
|
|
8
|
-
* Result of a file read operation
|
|
9
|
-
*/
|
|
10
|
-
interface FileReadResult {
|
|
11
|
-
success: boolean;
|
|
12
|
-
path: string;
|
|
13
|
-
content?: string;
|
|
14
|
-
error?: string;
|
|
15
|
-
}
|
|
16
|
-
/**
|
|
17
|
-
* Result of a file write operation
|
|
18
|
-
*/
|
|
19
|
-
interface FileWriteResult {
|
|
20
|
-
success: boolean;
|
|
21
|
-
path: string;
|
|
22
|
-
error?: string;
|
|
23
|
-
}
|
|
24
|
-
/**
|
|
25
|
-
* File operation to perform
|
|
26
|
-
*/
|
|
27
|
-
interface FileOperation {
|
|
28
|
-
type: 'read' | 'write' | 'create' | 'delete';
|
|
29
|
-
path: string;
|
|
30
|
-
content?: string;
|
|
31
|
-
}
|
|
32
|
-
/**
|
|
33
|
-
* Result of a file operation
|
|
34
|
-
*/
|
|
35
|
-
interface FileOperationResult {
|
|
36
|
-
operation: FileOperation;
|
|
37
|
-
success: boolean;
|
|
38
|
-
content?: string;
|
|
39
|
-
error?: string;
|
|
40
|
-
}
|
|
41
|
-
/**
|
|
42
|
-
* Interface for file system operations
|
|
43
|
-
*/
|
|
44
|
-
interface FileSystem {
|
|
45
|
-
/**
|
|
46
|
-
* Read a file's contents
|
|
47
|
-
*/
|
|
48
|
-
readFile(path: string): Promise<string>;
|
|
49
|
-
/**
|
|
50
|
-
* Write content to a file
|
|
51
|
-
*/
|
|
52
|
-
writeFile(path: string, content: string): Promise<void>;
|
|
53
|
-
/**
|
|
54
|
-
* Check if a file exists
|
|
55
|
-
*/
|
|
56
|
-
exists(path: string): Promise<boolean>;
|
|
57
|
-
/**
|
|
58
|
-
* Delete a file
|
|
59
|
-
*/
|
|
60
|
-
deleteFile(path: string): Promise<void>;
|
|
61
|
-
/**
|
|
62
|
-
* List files in a directory
|
|
63
|
-
*/
|
|
64
|
-
listFiles(directory: string, options?: {
|
|
65
|
-
recursive?: boolean;
|
|
66
|
-
pattern?: string;
|
|
67
|
-
}): Promise<string[]>;
|
|
68
|
-
}
|
|
69
|
-
/**
|
|
70
|
-
* File operations executor
|
|
71
|
-
*/
|
|
72
|
-
declare class FileOperations {
|
|
73
|
-
private readonly fs;
|
|
74
|
-
private readonly workspacePath;
|
|
75
|
-
private readonly allowWrites;
|
|
76
|
-
constructor(fs: FileSystem, workspacePath: string, allowWrites?: boolean);
|
|
77
|
-
/**
|
|
78
|
-
* Read a file
|
|
79
|
-
*/
|
|
80
|
-
read(relativePath: string): Promise<FileReadResult>;
|
|
81
|
-
/**
|
|
82
|
-
* Write to a file
|
|
83
|
-
*/
|
|
84
|
-
write(relativePath: string, content: string): Promise<FileWriteResult>;
|
|
85
|
-
/**
|
|
86
|
-
* Execute multiple file operations
|
|
87
|
-
*/
|
|
88
|
-
execute(operations: FileOperation[]): Promise<FileOperationResult[]>;
|
|
89
|
-
/**
|
|
90
|
-
* Resolve a relative path to an absolute path
|
|
91
|
-
*/
|
|
92
|
-
private resolvePath;
|
|
93
|
-
}
|
|
94
|
-
/**
|
|
95
|
-
* Create a file operations instance with Node.js fs
|
|
96
|
-
*/
|
|
97
|
-
declare function createNodeFileOperations(workspacePath: string, allowWrites?: boolean): FileOperations;
|
|
98
|
-
//#endregion
|
|
99
|
-
export { FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, createNodeFileOperations };
|
package/dist/context/index.d.ts
DELETED
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
import { FileInfo, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createWorkspaceContext } from "./workspace-context.js";
|
|
2
|
-
import { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, createContextBuilder } from "./context-builder.js";
|
|
3
|
-
import { FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, createNodeFileOperations } from "./file-operations.js";
|
|
4
|
-
export { BuiltContext, ContextBuilder, ContextBuilderOptions, ContextEntry, FileInfo, FileOperation, FileOperationResult, FileOperations, FileReadResult, FileSystem, FileWriteResult, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createContextBuilder, createNodeFileOperations, createWorkspaceContext };
|
|
@@ -1,116 +0,0 @@
|
|
|
1
|
-
//#region src/context/workspace-context.d.ts
|
|
2
|
-
/**
|
|
3
|
-
* Workspace context management
|
|
4
|
-
*
|
|
5
|
-
* Provides access to specs, files, and codebase information
|
|
6
|
-
* for context-aware AI chat assistance.
|
|
7
|
-
*/
|
|
8
|
-
/**
|
|
9
|
-
* Spec information for context
|
|
10
|
-
*/
|
|
11
|
-
interface SpecInfo {
|
|
12
|
-
name: string;
|
|
13
|
-
version: number;
|
|
14
|
-
type: 'command' | 'query' | 'event' | 'presentation';
|
|
15
|
-
path: string;
|
|
16
|
-
description?: string;
|
|
17
|
-
tags?: string[];
|
|
18
|
-
}
|
|
19
|
-
/**
|
|
20
|
-
* File information for context
|
|
21
|
-
*/
|
|
22
|
-
interface FileInfo {
|
|
23
|
-
path: string;
|
|
24
|
-
relativePath: string;
|
|
25
|
-
name: string;
|
|
26
|
-
extension: string;
|
|
27
|
-
size: number;
|
|
28
|
-
isSpec: boolean;
|
|
29
|
-
}
|
|
30
|
-
/**
|
|
31
|
-
* Workspace summary for context
|
|
32
|
-
*/
|
|
33
|
-
interface WorkspaceSummary {
|
|
34
|
-
name: string;
|
|
35
|
-
path: string;
|
|
36
|
-
specs: {
|
|
37
|
-
total: number;
|
|
38
|
-
commands: number;
|
|
39
|
-
queries: number;
|
|
40
|
-
events: number;
|
|
41
|
-
presentations: number;
|
|
42
|
-
};
|
|
43
|
-
files: {
|
|
44
|
-
total: number;
|
|
45
|
-
typescript: number;
|
|
46
|
-
specFiles: number;
|
|
47
|
-
};
|
|
48
|
-
}
|
|
49
|
-
/**
|
|
50
|
-
* Configuration for workspace context
|
|
51
|
-
*/
|
|
52
|
-
interface WorkspaceContextConfig {
|
|
53
|
-
/** Root path of the workspace */
|
|
54
|
-
workspacePath: string;
|
|
55
|
-
/** File patterns to include */
|
|
56
|
-
includePatterns?: string[];
|
|
57
|
-
/** File patterns to exclude */
|
|
58
|
-
excludePatterns?: string[];
|
|
59
|
-
/** Maximum file size to read (bytes) */
|
|
60
|
-
maxFileSize?: number;
|
|
61
|
-
/** Whether to enable file writes */
|
|
62
|
-
allowWrites?: boolean;
|
|
63
|
-
}
|
|
64
|
-
/**
|
|
65
|
-
* Workspace context for AI chat
|
|
66
|
-
*/
|
|
67
|
-
declare class WorkspaceContext {
|
|
68
|
-
readonly workspacePath: string;
|
|
69
|
-
readonly allowWrites: boolean;
|
|
70
|
-
private specs;
|
|
71
|
-
private files;
|
|
72
|
-
private initialized;
|
|
73
|
-
constructor(config: WorkspaceContextConfig);
|
|
74
|
-
/**
|
|
75
|
-
* Initialize the workspace context by scanning files
|
|
76
|
-
*/
|
|
77
|
-
initialize(): Promise<void>;
|
|
78
|
-
/**
|
|
79
|
-
* Get all discovered specs
|
|
80
|
-
*/
|
|
81
|
-
getSpecs(): SpecInfo[];
|
|
82
|
-
/**
|
|
83
|
-
* Get all discovered files
|
|
84
|
-
*/
|
|
85
|
-
getFiles(): FileInfo[];
|
|
86
|
-
/**
|
|
87
|
-
* Add specs to the context
|
|
88
|
-
*/
|
|
89
|
-
addSpecs(specs: SpecInfo[]): void;
|
|
90
|
-
/**
|
|
91
|
-
* Add files to the context
|
|
92
|
-
*/
|
|
93
|
-
addFiles(files: FileInfo[]): void;
|
|
94
|
-
/**
|
|
95
|
-
* Get a summary of the workspace for context
|
|
96
|
-
*/
|
|
97
|
-
getSummary(): WorkspaceSummary;
|
|
98
|
-
/**
|
|
99
|
-
* Get a context summary for LLM prompts
|
|
100
|
-
*/
|
|
101
|
-
getContextSummary(): string;
|
|
102
|
-
/**
|
|
103
|
-
* Find specs matching a query
|
|
104
|
-
*/
|
|
105
|
-
findSpecs(query: string): SpecInfo[];
|
|
106
|
-
/**
|
|
107
|
-
* Find files matching a query
|
|
108
|
-
*/
|
|
109
|
-
findFiles(query: string): FileInfo[];
|
|
110
|
-
}
|
|
111
|
-
/**
|
|
112
|
-
* Create a workspace context from a path
|
|
113
|
-
*/
|
|
114
|
-
declare function createWorkspaceContext(path: string, options?: Partial<WorkspaceContextConfig>): Promise<WorkspaceContext>;
|
|
115
|
-
//#endregion
|
|
116
|
-
export { FileInfo, SpecInfo, WorkspaceContext, WorkspaceContextConfig, WorkspaceSummary, createWorkspaceContext };
|