codexa 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,130 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.createLLMClient = createLLMClient;
7
+ const node_fetch_1 = __importDefault(require("node-fetch"));
8
+ const ai_1 = require("ai");
9
+ const groq_1 = require("@ai-sdk/groq");
10
+ class OllamaLLM {
11
+ model;
12
+ baseURL;
13
+ constructor(model, baseURL) {
14
+ this.model = model;
15
+ this.baseURL = baseURL;
16
+ }
17
+ async generate(messages, options) {
18
+ const prompt = messages.map((m) => `${m.role.toUpperCase()}: ${m.content}`).join('\n\n');
19
+ const body = {
20
+ model: this.model,
21
+ prompt,
22
+ stream: options?.stream ?? false,
23
+ };
24
+ const resp = await (0, node_fetch_1.default)(`${this.baseURL}/api/generate`, {
25
+ method: 'POST',
26
+ headers: { 'Content-Type': 'application/json' },
27
+ body: JSON.stringify(body),
28
+ });
29
+ if (!resp.ok) {
30
+ const text = await resp.text();
31
+ throw new Error(`Ollama request failed: ${resp.status} ${text}`);
32
+ }
33
+ if (!options?.stream) {
34
+ const json = await resp.json();
35
+ return (json.response ??
36
+ json.output ??
37
+ '');
38
+ }
39
+ const stream = resp.body;
40
+ if (!stream) {
41
+ throw new Error('Streaming not supported on this response body.');
42
+ }
43
+ let full = '';
44
+ let buffer = '';
45
+ for await (const chunk of stream) {
46
+ buffer += chunk.toString();
47
+ const lines = buffer.split('\n');
48
+ buffer = lines.pop() ?? '';
49
+ for (const line of lines) {
50
+ const trimmed = line.trim();
51
+ if (!trimmed)
52
+ continue;
53
+ try {
54
+ const parsed = JSON.parse(trimmed);
55
+ const token = parsed.response ?? '';
56
+ if (token) {
57
+ full += token;
58
+ options.onToken?.(token);
59
+ }
60
+ }
61
+ catch {
62
+ // Should not happen with proper buffering, but ignore if it does
63
+ }
64
+ }
65
+ }
66
+ // Process any remaining buffer
67
+ if (buffer.trim()) {
68
+ try {
69
+ const parsed = JSON.parse(buffer.trim());
70
+ const token = parsed.response ?? '';
71
+ if (token) {
72
+ full += token;
73
+ options.onToken?.(token);
74
+ }
75
+ }
76
+ catch {
77
+ // Ignore incomplete final chunk
78
+ }
79
+ }
80
+ return full;
81
+ }
82
+ }
83
+ class GroqLLM {
84
+ model;
85
+ apiKey;
86
+ groq;
87
+ constructor(model, apiKey) {
88
+ this.model = model;
89
+ this.apiKey = apiKey;
90
+ this.groq = (0, groq_1.createGroq)({
91
+ apiKey: this.apiKey,
92
+ });
93
+ }
94
+ async generate(messages, options) {
95
+ const modelId = this.model;
96
+ if (!options?.stream) {
97
+ const { text } = await (0, ai_1.generateText)({
98
+ model: this.groq(modelId),
99
+ messages,
100
+ });
101
+ return text;
102
+ }
103
+ const result = (0, ai_1.streamText)({
104
+ model: this.groq(modelId),
105
+ messages,
106
+ });
107
+ let fullText = '';
108
+ for await (const textPart of result.textStream) {
109
+ fullText += textPart;
110
+ options.onToken?.(textPart);
111
+ }
112
+ return fullText;
113
+ }
114
+ }
115
+ function createLLMClient(config) {
116
+ if (config.modelProvider === 'local') {
117
+ const base = config.localModelUrl?.replace(/\/$/, '') || 'http://localhost:11434';
118
+ if (process.env.AGENT_DEBUG) {
119
+ console.error('Using Ollama client:', config.model, config.localModelUrl);
120
+ }
121
+ return new OllamaLLM(config.model, base);
122
+ }
123
+ if (config.modelProvider === 'groq') {
124
+ if (process.env.AGENT_DEBUG) {
125
+ console.error('Using Groq client:', config.model);
126
+ }
127
+ return new GroqLLM(config.model, process.env.GROQ_API_KEY);
128
+ }
129
+ throw new Error('Only local provider supported for now.');
130
+ }
@@ -0,0 +1,22 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.retrieveContext = retrieveContext;
4
+ exports.formatContext = formatContext;
5
+ const embeddings_1 = require("./embeddings");
6
+ const db_1 = require("./db");
7
+ async function retrieveContext(question, config) {
8
+ const embedder = await (0, embeddings_1.createEmbedder)(config);
9
+ const [qvec] = await embedder.embed([question]);
10
+ const store = new db_1.VectorStore(config.dbPath);
11
+ store.init();
12
+ return store.search(qvec, config.topK);
13
+ }
14
+ function formatContext(results) {
15
+ return results
16
+ .map((r) => {
17
+ const snippet = r.compressed ?? r.content.slice(0, 300);
18
+ return `FILE: ${r.filePath}:${r.startLine}-${r.endLine}
19
+ CODE_SNIPPET: ${snippet}`;
20
+ })
21
+ .join('\n\n---\n\n');
22
+ }
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.log = void 0;
7
+ const chalk_1 = __importDefault(require("chalk"));
8
+ exports.log = {
9
+ info: (msg) => console.log(chalk_1.default.cyan('info'), msg),
10
+ success: (msg) => console.log(chalk_1.default.green('success'), msg),
11
+ warn: (msg) => console.log(chalk_1.default.yellow('warn'), msg),
12
+ error: (msg) => console.log(chalk_1.default.red('error'), msg),
13
+ };
package/package.json ADDED
@@ -0,0 +1,79 @@
1
+ {
2
+ "name": "codexa",
3
+ "version": "1.0.0",
4
+ "description": "CLI agent that indexes local repos and answers questions with hosted or local LLMs.",
5
+ "bin": {
6
+ "codexa": "bin/codexa.js"
7
+ },
8
+ "scripts": {
9
+ "build": "tsc -p tsconfig.json",
10
+ "clean": "rimraf dist",
11
+ "prepare": "npm run build",
12
+ "prepublishOnly": "npm run clean && npm run build",
13
+ "dev": "tsx src/cli.ts",
14
+ "smoke": "ts-node --transpile-only scripts/smoke.ts",
15
+ "lint": "eslint .",
16
+ "format": "prettier --write .",
17
+ "test": "vitest"
18
+ },
19
+ "type": "commonjs",
20
+ "keywords": [
21
+ "ai",
22
+ "cli",
23
+ "rag",
24
+ "codebase"
25
+ ],
26
+ "author": "",
27
+ "license": "MIT",
28
+ "repository": {
29
+ "type": "git",
30
+ "url": "https://github.com/sahitya-chandra/codexa.git"
31
+ },
32
+ "homepage": "https://github.com/sahitya-chandra/codexa#readme",
33
+ "bugs": {
34
+ "url": "https://github.com/sahitya-chandra/codexa/issues"
35
+ },
36
+ "files": [
37
+ "bin",
38
+ "dist",
39
+ "README.md",
40
+ "LICENSE"
41
+ ],
42
+ "engines": {
43
+ "node": ">=20.0.0"
44
+ },
45
+ "dependencies": {
46
+ "@ai-sdk/groq": "^2.0.32",
47
+ "@xenova/transformers": "^2.17.2",
48
+ "ai": "^5.0.105",
49
+ "better-sqlite3": "^9.6.0",
50
+ "chalk": "^5.3.0",
51
+ "commander": "^12.1.0",
52
+ "dotenv": "^16.4.5",
53
+ "fs-extra": "^11.2.0",
54
+ "globby": "^13.0.0",
55
+ "ignore": "^5.3.1",
56
+ "node-fetch": "^3.3.2",
57
+ "openai": "^4.73.1",
58
+ "ora": "^8.1.0"
59
+ },
60
+ "devDependencies": {
61
+ "@eslint/js": "^9.39.1",
62
+ "@types/better-sqlite3": "^7.6.9",
63
+ "@types/fs-extra": "^11.0.4",
64
+ "@types/node": "^22.19.1",
65
+ "@typescript-eslint/eslint-plugin": "^8.47.0",
66
+ "@typescript-eslint/parser": "^8.47.0",
67
+ "eslint": "^9.39.1",
68
+ "eslint-config-prettier": "^10.1.8",
69
+ "eslint-plugin-prettier": "^5.5.4",
70
+ "globals": "^16.5.0",
71
+ "prettier": "^3.6.2",
72
+ "rimraf": "^6.0.1",
73
+ "ts-node": "^10.9.2",
74
+ "tsx": "^4.20.6",
75
+ "typescript": "^5.6.3",
76
+ "typescript-eslint": "^8.47.0",
77
+ "vitest": "^4.0.14"
78
+ }
79
+ }