@react-frameui/loki-ai 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,71 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.LocalRAG = void 0;
7
+ const fs_1 = __importDefault(require("fs"));
8
+ const path_1 = __importDefault(require("path"));
9
+ // @ts-ignore
10
+ const ollama_1 = require("@langchain/community/embeddings/ollama");
11
+ // @ts-ignore
12
+ const memory_1 = require("langchain/vectorstores/memory");
13
+ // @ts-ignore
14
+ const text_splitter_1 = require("langchain/text_splitter");
15
+ const config_1 = require("../../utils/config");
16
+ class LocalRAG {
17
+ constructor() {
18
+ this.vectorStore = null;
19
+ // @ts-ignore
20
+ this.embeddings = new ollama_1.OllamaEmbeddings({
21
+ model: config_1.CONFIG.OLLAMA_EMBED_MODEL,
22
+ baseUrl: config_1.CONFIG.OLLAMA_HOST,
23
+ });
24
+ }
25
+ async indexRepo(cwd) {
26
+ console.log(`[LangChain RAG] Indexing ${cwd}...`);
27
+ const docs = this.loadDocuments(cwd);
28
+ // @ts-ignore
29
+ const splitter = new text_splitter_1.RecursiveCharacterTextSplitter({
30
+ chunkSize: 1000,
31
+ chunkOverlap: 200,
32
+ });
33
+ const chunks = await splitter.createDocuments(docs.map((d) => d.pageContent), docs.map((d) => d.metadata));
34
+ // @ts-ignore
35
+ this.vectorStore = await memory_1.MemoryVectorStore.fromDocuments(chunks, this.embeddings);
36
+ console.log(`[LangChain RAG] Indexed ${chunks.length} chunks.`);
37
+ }
38
+ async query(question, k = 4) {
39
+ if (!this.vectorStore)
40
+ return "RAG not initialized. Run index first.";
41
+ const results = await this.vectorStore.similaritySearch(question, k);
42
+ return results.map((r) => `[${r.metadata.source}]: ${r.pageContent}`).join('\n\n');
43
+ }
44
+ loadDocuments(dir, fileList = []) {
45
+ try {
46
+ const files = fs_1.default.readdirSync(dir, { withFileTypes: true });
47
+ const docs = [];
48
+ for (const file of files) {
49
+ if (file.isDirectory()) {
50
+ if (['node_modules', '.git', 'dist', 'out'].includes(file.name))
51
+ continue;
52
+ docs.push(...this.loadDocuments(path_1.default.join(dir, file.name)));
53
+ }
54
+ else {
55
+ if (['.ts', '.js', '.md', '.json'].includes(path_1.default.extname(file.name))) {
56
+ const content = fs_1.default.readFileSync(path_1.default.join(dir, file.name), 'utf-8');
57
+ docs.push({
58
+ pageContent: content,
59
+ metadata: { source: path_1.default.relative(process.cwd(), path_1.default.join(dir, file.name)) }
60
+ });
61
+ }
62
+ }
63
+ }
64
+ return docs;
65
+ }
66
+ catch {
67
+ return [];
68
+ }
69
+ }
70
+ }
71
+ exports.LocalRAG = LocalRAG;
@@ -0,0 +1,74 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.lokiTools = exports.gitStatusTool = exports.fsReadTool = exports.fsListTool = exports.mathTool = exports.dateTool = exports.timeTool = void 0;
4
+ const tools_1 = require("@langchain/core/tools");
5
+ const zod_1 = require("zod");
6
+ const toolRegistry_1 = require("../tools/toolRegistry");
7
+ // Helper to wrap LOKI tools
8
+ // We verify parameters manually for now since LOKI tools behave predictably
9
+ exports.timeTool = new tools_1.DynamicStructuredTool({
10
+ name: "get_time",
11
+ description: toolRegistry_1.TOOLS.get_time.description,
12
+ schema: zod_1.z.object({
13
+ timezone: zod_1.z.string().optional()
14
+ }),
15
+ func: async ({ timezone }) => {
16
+ return await toolRegistry_1.TOOLS.get_time.execute({ timezone });
17
+ }
18
+ });
19
+ exports.dateTool = new tools_1.DynamicStructuredTool({
20
+ name: "get_date",
21
+ description: toolRegistry_1.TOOLS.get_date.description,
22
+ schema: zod_1.z.object({
23
+ timezone: zod_1.z.string().optional()
24
+ }),
25
+ func: async ({ timezone }) => {
26
+ return await toolRegistry_1.TOOLS.get_date.execute({ timezone });
27
+ }
28
+ });
29
+ exports.mathTool = new tools_1.DynamicStructuredTool({
30
+ name: "calculate",
31
+ description: toolRegistry_1.TOOLS.calculate.description,
32
+ schema: zod_1.z.object({
33
+ expression: zod_1.z.string().describe("The math expression to evaluate")
34
+ }),
35
+ func: async ({ expression }) => {
36
+ return await toolRegistry_1.TOOLS.calculate.execute({ expression });
37
+ }
38
+ });
39
+ exports.fsListTool = new tools_1.DynamicStructuredTool({
40
+ name: "list_files",
41
+ description: toolRegistry_1.TOOLS.list_files.description,
42
+ schema: zod_1.z.object({
43
+ path: zod_1.z.string().optional().describe("Directory path to list")
44
+ }),
45
+ func: async ({ path }) => {
46
+ return await toolRegistry_1.TOOLS.list_files.execute({ path });
47
+ }
48
+ });
49
+ exports.fsReadTool = new tools_1.DynamicStructuredTool({
50
+ name: "read_file",
51
+ description: toolRegistry_1.TOOLS.read_file.description,
52
+ schema: zod_1.z.object({
53
+ path: zod_1.z.string().describe("File path to read")
54
+ }),
55
+ func: async ({ path }) => {
56
+ return await toolRegistry_1.TOOLS.read_file.execute({ path });
57
+ }
58
+ });
59
+ exports.gitStatusTool = new tools_1.DynamicStructuredTool({
60
+ name: "git_status",
61
+ description: toolRegistry_1.TOOLS.git_status.description,
62
+ schema: zod_1.z.object({}),
63
+ func: async () => {
64
+ return await toolRegistry_1.TOOLS.git_status.execute({});
65
+ }
66
+ });
67
+ exports.lokiTools = [
68
+ exports.timeTool,
69
+ exports.dateTool,
70
+ exports.mathTool,
71
+ exports.fsListTool,
72
+ exports.fsReadTool,
73
+ exports.gitStatusTool
74
+ ];
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.refactorGraph = void 0;
4
+ const langgraph_1 = require("@langchain/langgraph");
5
+ const messages_1 = require("@langchain/core/messages");
6
+ const llmAdapter_1 = require("../langchain/llmAdapter");
7
+ // Agents execution
8
+ async function plannerNode(state, config) {
9
+ const model = new llmAdapter_1.LokiChatModel();
10
+ const result = await model.invoke([
11
+ ...state.messages,
12
+ new messages_1.HumanMessage("Create a plan for this request.")
13
+ ]);
14
+ return { plan: result.content, messages: [result] };
15
+ }
16
+ async function analyzerNode(state) {
17
+ const model = new llmAdapter_1.LokiChatModel();
18
+ const result = await model.invoke([
19
+ new messages_1.HumanMessage(`Analyze the plan: ${state.plan}`)
20
+ ]);
21
+ return { analysis: result.content };
22
+ }
23
+ async function refactorNode(state) {
24
+ const model = new llmAdapter_1.LokiChatModel();
25
+ const result = await model.invoke([
26
+ new messages_1.HumanMessage(`Generate code for plan: ${state.plan}. Analysis: ${state.analysis}`)
27
+ ]);
28
+ return { code: result.content };
29
+ }
30
+ // Build Graph
31
+ const builder = new langgraph_1.StateGraph({
32
+ channels: {
33
+ messages: { reducer: (a, b) => a.concat(b) },
34
+ plan: null,
35
+ analysis: null,
36
+ code: null,
37
+ approved: null
38
+ }
39
+ });
40
+ // Register nodes
41
+ builder.addNode("planner", plannerNode);
42
+ builder.addNode("analyzer", analyzerNode);
43
+ builder.addNode("refactor", refactorNode);
44
+ // Define edges with strict casts to bypass TS checking "Start" node logic
45
+ builder.addEdge("planner", "analyzer");
46
+ builder.addEdge("analyzer", "refactor");
47
+ builder.addEdge("refactor", langgraph_1.END);
48
+ builder.setEntryPoint("planner");
49
+ exports.refactorGraph = builder.compile();
@@ -0,0 +1,46 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.GroqProvider = void 0;
7
+ const axios_1 = __importDefault(require("axios"));
8
+ const config_1 = require("../utils/config");
9
+ class GroqProvider {
10
+ async generate(prompt) {
11
+ if (!config_1.CONFIG.GROQ_API_KEY) {
12
+ throw new Error('GROQ_API_KEY is not set. Please set it in your environment to use Groq.');
13
+ }
14
+ try {
15
+ const response = await axios_1.default.post(config_1.CONFIG.GROQ_API_URL, {
16
+ model: config_1.CONFIG.GROQ_MODEL,
17
+ messages: [
18
+ { role: 'user', content: prompt } // Groq expects chat format
19
+ ]
20
+ }, {
21
+ headers: {
22
+ 'Authorization': `Bearer ${config_1.CONFIG.GROQ_API_KEY}`,
23
+ 'Content-Type': 'application/json'
24
+ },
25
+ timeout: config_1.CONFIG.TIMEOUT
26
+ });
27
+ return response.data.choices[0].message.content;
28
+ }
29
+ catch (error) {
30
+ if (axios_1.default.isAxiosError(error)) {
31
+ throw new Error(`Groq API error: ${error.response?.data?.error?.message || error.message}`);
32
+ }
33
+ throw error;
34
+ }
35
+ }
36
+ async checkHealth() {
37
+ if (!config_1.CONFIG.GROQ_API_KEY)
38
+ return false;
39
+ // We can't easily ping without cost or auth, so we assume strictly key presence + superficial check
40
+ return true;
41
+ }
42
+ getName() {
43
+ return `Groq Cloud (${config_1.CONFIG.GROQ_MODEL})`;
44
+ }
45
+ }
46
+ exports.GroqProvider = GroqProvider;
@@ -0,0 +1,112 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.OllamaProvider = void 0;
7
+ const axios_1 = __importDefault(require("axios"));
8
+ const config_1 = require("../utils/config");
9
+ class OllamaProvider {
10
+ async generate(prompt, context, signal) {
11
+ const url = `${config_1.CONFIG.OLLAMA_HOST}/api/generate`;
12
+ try {
13
+ const response = await axios_1.default.post(url, {
14
+ model: config_1.CONFIG.OLLAMA_MODEL,
15
+ prompt: prompt,
16
+ stream: false,
17
+ }, {
18
+ timeout: config_1.CONFIG.TIMEOUT,
19
+ signal: signal
20
+ });
21
+ if (response.data && response.data.response) {
22
+ return response.data.response;
23
+ }
24
+ throw new Error('Invalid response format from Ollama');
25
+ }
26
+ catch (error) {
27
+ this.handleError(error);
28
+ return '';
29
+ }
30
+ }
31
+ async streamGenerate(prompt, onToken, signal) {
32
+ const url = `${config_1.CONFIG.OLLAMA_HOST}/api/generate`;
33
+ try {
34
+ const response = await axios_1.default.post(url, {
35
+ model: config_1.CONFIG.OLLAMA_MODEL,
36
+ prompt: prompt,
37
+ stream: true
38
+ }, {
39
+ responseType: 'stream',
40
+ timeout: config_1.CONFIG.TIMEOUT,
41
+ signal: signal
42
+ });
43
+ return new Promise((resolve, reject) => {
44
+ let fullText = '';
45
+ const stream = response.data;
46
+ stream.on('data', (chunk) => {
47
+ const lines = chunk.toString().split('\n').filter(Boolean);
48
+ for (const line of lines) {
49
+ try {
50
+ const json = JSON.parse(line);
51
+ if (json.response) {
52
+ onToken(json.response);
53
+ fullText += json.response;
54
+ }
55
+ if (json.done) {
56
+ resolve(fullText);
57
+ }
58
+ }
59
+ catch (e) {
60
+ // ignore
61
+ }
62
+ }
63
+ });
64
+ stream.on('end', () => {
65
+ resolve(fullText);
66
+ });
67
+ stream.on('error', (err) => {
68
+ // If aborted, axios destroys stream with error
69
+ if (signal?.aborted) {
70
+ reject(new Error('Aborted'));
71
+ }
72
+ else {
73
+ reject(err);
74
+ }
75
+ });
76
+ });
77
+ }
78
+ catch (error) {
79
+ // Axios throws CanceledError if aborted
80
+ if (axios_1.default.isCancel(error)) {
81
+ throw new Error('Request cancelled by user');
82
+ }
83
+ this.handleError(error);
84
+ return '';
85
+ }
86
+ }
87
+ async checkHealth() {
88
+ try {
89
+ await axios_1.default.get(config_1.CONFIG.OLLAMA_HOST, { timeout: 2000 });
90
+ return true;
91
+ }
92
+ catch {
93
+ return false;
94
+ }
95
+ }
96
+ getName() {
97
+ return `Ollama (${config_1.CONFIG.OLLAMA_MODEL})`;
98
+ }
99
+ handleError(error) {
100
+ if (axios_1.default.isCancel(error)) {
101
+ throw error;
102
+ }
103
+ if (axios_1.default.isAxiosError(error)) {
104
+ if (error.code === 'ECONNREFUSED') {
105
+ throw new Error(`Could not connect to Ollama at ${config_1.CONFIG.OLLAMA_HOST}. Is it running?`);
106
+ }
107
+ throw new Error(`Ollama API error: ${error.message}`);
108
+ }
109
+ throw error;
110
+ }
111
+ }
112
+ exports.OllamaProvider = OllamaProvider;
@@ -0,0 +1,14 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getProvider = getProvider;
4
+ const ollama_1 = require("./ollama");
5
+ const groq_1 = require("./groq");
6
+ const config_1 = require("../utils/config");
7
+ function getProvider(requestedProvider) {
8
+ const providerKey = (requestedProvider || config_1.CONFIG.DEFAULT_PROVIDER).toLowerCase();
9
+ if (providerKey === 'groq') {
10
+ return new groq_1.GroqProvider();
11
+ }
12
+ // Default to Ollama
13
+ return new ollama_1.OllamaProvider();
14
+ }
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,57 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.loadMemory = loadMemory;
7
+ exports.saveMemory = saveMemory;
8
+ exports.appendMemory = appendMemory;
9
+ const fs_1 = __importDefault(require("fs"));
10
+ const config_1 = require("../utils/config");
11
+ /**
12
+ * Ensures the configuration directory exists.
13
+ */
14
+ function ensureConfigDir() {
15
+ if (!fs_1.default.existsSync(config_1.CONFIG.CONFIG_DIR)) {
16
+ fs_1.default.mkdirSync(config_1.CONFIG.CONFIG_DIR, { recursive: true });
17
+ }
18
+ }
19
+ /**
20
+ * Loads the last N memory items from local storage.
21
+ */
22
+ function loadMemory() {
23
+ try {
24
+ ensureConfigDir();
25
+ if (!fs_1.default.existsSync(config_1.CONFIG.MEMORY_FILE)) {
26
+ return [];
27
+ }
28
+ const data = fs_1.default.readFileSync(config_1.CONFIG.MEMORY_FILE, 'utf-8');
29
+ const items = JSON.parse(data);
30
+ // Return only the last MAX_MEMORY_ITEMS
31
+ return items.slice(-config_1.CONFIG.MAX_MEMORY_ITEMS);
32
+ }
33
+ catch (error) {
34
+ // If memory is corrupt or unreadable, return empty to prevent crash
35
+ return [];
36
+ }
37
+ }
38
+ /**
39
+ * Saves the full list of memory items to local storage.
40
+ */
41
+ function saveMemory(memory) {
42
+ try {
43
+ ensureConfigDir();
44
+ fs_1.default.writeFileSync(config_1.CONFIG.MEMORY_FILE, JSON.stringify(memory, null, 2), 'utf-8');
45
+ }
46
+ catch (error) {
47
+ console.error('Failed to save memory:', error);
48
+ }
49
+ }
50
+ /**
51
+ * Appends a single item to the persistent memory store.
52
+ */
53
+ function appendMemory(item) {
54
+ const currentMemory = loadMemory();
55
+ currentMemory.push(item);
56
+ saveMemory(currentMemory);
57
+ }
@@ -0,0 +1,88 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.storeSemanticMemory = storeSemanticMemory;
7
+ exports.retrieveRelevantMemory = retrieveRelevantMemory;
8
+ const fs_1 = __importDefault(require("fs"));
9
+ const axios_1 = __importDefault(require("axios"));
10
+ const config_1 = require("../utils/config");
11
+ // Simple cosine similarity
12
+ function cosineSimilarity(vecA, vecB) {
13
+ let dotProduct = 0;
14
+ let normA = 0;
15
+ let normB = 0;
16
+ for (let i = 0; i < vecA.length; i++) {
17
+ dotProduct += vecA[i] * vecB[i];
18
+ normA += vecA[i] * vecA[i];
19
+ normB += vecB[i] * vecB[i];
20
+ }
21
+ return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
22
+ }
23
+ async function getEmbedding(text) {
24
+ try {
25
+ const response = await axios_1.default.post(`${config_1.CONFIG.OLLAMA_HOST}/api/embeddings`, {
26
+ model: config_1.CONFIG.OLLAMA_EMBED_MODEL,
27
+ prompt: text,
28
+ });
29
+ return response.data.embedding;
30
+ }
31
+ catch (e) {
32
+ // console.error("Error getting embedding:", e);
33
+ return [];
34
+ }
35
+ }
36
+ function ensureDir() {
37
+ if (!fs_1.default.existsSync(config_1.CONFIG.SEMANTIC_MEMORY_DIR)) {
38
+ fs_1.default.mkdirSync(config_1.CONFIG.SEMANTIC_MEMORY_DIR, { recursive: true });
39
+ }
40
+ }
41
+ function loadIndex() {
42
+ try {
43
+ ensureDir();
44
+ if (!fs_1.default.existsSync(config_1.CONFIG.SEMANTIC_INDEX_FILE))
45
+ return [];
46
+ return JSON.parse(fs_1.default.readFileSync(config_1.CONFIG.SEMANTIC_INDEX_FILE, 'utf-8'));
47
+ }
48
+ catch {
49
+ return [];
50
+ }
51
+ }
52
+ function saveIndex(index) {
53
+ ensureDir();
54
+ fs_1.default.writeFileSync(config_1.CONFIG.SEMANTIC_INDEX_FILE, JSON.stringify(index, null, 2));
55
+ }
56
+ async function storeSemanticMemory(content, source = 'chat') {
57
+ if (!content || content.length < 10)
58
+ return; // Ignore noise
59
+ // Check if Ollama is embedding-capable
60
+ const embedding = await getEmbedding(content);
61
+ if (!embedding || embedding.length === 0)
62
+ return;
63
+ const index = loadIndex();
64
+ const entry = {
65
+ id: Math.random().toString(36).substring(7),
66
+ content,
67
+ embedding,
68
+ metadata: {
69
+ source,
70
+ timestamp: new Date().toISOString()
71
+ }
72
+ };
73
+ index.push(entry);
74
+ saveIndex(index);
75
+ }
76
+ async function retrieveRelevantMemory(query, topK = 5) {
77
+ const queryEmbedding = await getEmbedding(query);
78
+ if (!queryEmbedding || queryEmbedding.length === 0)
79
+ return [];
80
+ const index = loadIndex();
81
+ const scored = index.map(entry => ({
82
+ entry,
83
+ score: cosineSimilarity(queryEmbedding, entry.embedding)
84
+ }));
85
+ // Filter and sort
86
+ scored.sort((a, b) => b.score - a.score);
87
+ return scored.slice(0, topK).map(s => `[Memory: ${s.entry.metadata.timestamp}] ${s.entry.content}`);
88
+ }
@@ -0,0 +1,83 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.indexRepository = indexRepository;
7
+ exports.retrieveContext = retrieveContext;
8
+ const fs_1 = __importDefault(require("fs"));
9
+ const path_1 = __importDefault(require("path"));
10
+ const crypto_1 = __importDefault(require("crypto"));
11
+ const config_1 = require("../utils/config");
12
+ const semanticStore_1 = require("../memory/semanticStore");
13
+ // Local RAG Store Path
14
+ const RAG_DIR = path_1.default.join(config_1.CONFIG.CONFIG_DIR, 'rag');
15
+ function getRepoHash(cwd) {
16
+ return crypto_1.default.createHash('md5').update(cwd).digest('hex');
17
+ }
18
+ function getRepoIndexDir(cwd) {
19
+ return path_1.default.join(RAG_DIR, getRepoHash(cwd));
20
+ }
21
+ // Simple heuristic to find source files
22
+ function findSourceFiles(dir, fileList = []) {
23
+ const files = fs_1.default.readdirSync(dir, { withFileTypes: true });
24
+ for (const file of files) {
25
+ if (file.isDirectory()) {
26
+ if (['node_modules', '.git', 'dist', 'out', 'build'].includes(file.name))
27
+ continue;
28
+ findSourceFiles(path_1.default.join(dir, file.name), fileList);
29
+ }
30
+ else {
31
+ if (['.ts', '.js', '.md', '.json'].includes(path_1.default.extname(file.name))) {
32
+ fileList.push(path_1.default.join(dir, file.name));
33
+ }
34
+ }
35
+ }
36
+ return fileList;
37
+ }
38
+ async function indexRepository(cwd = process.cwd()) {
39
+ const indexDir = getRepoIndexDir(cwd);
40
+ // Check if already indexed (simple existence check for now)
41
+ // Real implementation would check timestamps
42
+ if (fs_1.default.existsSync(indexDir)) {
43
+ console.log(`[RAG] Repository already indexed: ${indexDir}`);
44
+ return;
45
+ }
46
+ console.log(`[RAG] Indexing repository...`);
47
+ const files = findSourceFiles(cwd);
48
+ // We reuse semantic memory store logic but tag it with "RAG"
49
+ // Ideally we'd use a separate collection, but for this MVP, shared store with metadata is fine
50
+ // Or we use storeSemanticMemory but force source="rag"
51
+ // Actually, 'semanticStore' uses a single file index.json.
52
+ // For per-repo RAG, we need to namespace it.
53
+ // Complexity warning: The current simple store is single-file.
54
+ // Upgrading it for per-repo is needed.
55
+ // Strategy: We will just index into the SAME store but with metadata: { repo: hash }
56
+ let count = 0;
57
+ for (const f of files) {
58
+ try {
59
+ const content = fs_1.default.readFileSync(f, 'utf-8');
60
+ // Chunking would go here. For now, we take whole file if small, or truncate.
61
+ // Limit to ~2k chars for embedding
62
+ const chunk = content.substring(0, 2000);
63
+ // Artificial metadata injection for retrieval filtering
64
+ // Note: Our current retrieveRelevantMemory doesn't support metadata filtering yet.
65
+ // We accepted this limitation for the MVP.
66
+ // We just store it.
67
+ await (0, semanticStore_1.storeSemanticMemory)(chunk, `file:${path_1.default.relative(cwd, f)}`);
68
+ if (count++ > 50)
69
+ break; // Safety limit
70
+ process.stdout.write('.');
71
+ }
72
+ catch { }
73
+ }
74
+ console.log(`\n[RAG] Indexed ${count} files.`);
75
+ // Mark as indexed
76
+ fs_1.default.mkdirSync(indexDir, { recursive: true });
77
+ }
78
+ async function retrieveContext(query, cwd = process.cwd()) {
79
+ // Just leverage the global semantic store for now.
80
+ // It will return chat memories AND file memories.
81
+ const results = await (0, semanticStore_1.retrieveRelevantMemory)(query, 3);
82
+ return results.join('\n');
83
+ }
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.engineCommand = engineCommand;
4
+ const orchestrator_1 = require("../agents/orchestrator");
5
+ const child_process_1 = require("child_process");
6
+ async function engineCommand(instruction) {
7
+ if (!instruction) {
8
+ console.log("Please provide a refactor instruction e.g. 'convert to typescript'");
9
+ return;
10
+ }
11
+ try {
12
+ // Enforce safety: Check git status first
13
+ try {
14
+ const status = (0, child_process_1.execSync)('git status --porcelain').toString();
15
+ if (status.trim().length > 0) {
16
+ console.log("⚠️ Standard Safety Check: You have uncommitted changes.");
17
+ console.log(" LOKI recommends a clean state before refactoring.");
18
+ // In a real CLI we would ask prompt yes/no.
19
+ // For now, proceed with warning.
20
+ }
21
+ }
22
+ catch {
23
+ console.log("⚠️ Not a git repository. Safety features limited.");
24
+ }
25
+ console.log(`🧠 LOKI Engine: Initializing Refactor Workflow...`);
26
+ const result = await (0, orchestrator_1.runRefactorWorkflow)(instruction);
27
+ console.log(result.details);
28
+ console.log("\n---------------------------------------------------");
29
+ console.log("To apply changes, this scaffold would prompt for 'Yes'.");
30
+ console.log("Currently, changes are proposed ONLY. No files modified.");
31
+ }
32
+ catch (e) {
33
+ console.error("Engine Error:", e.message);
34
+ }
35
+ }