opencode-autognosis 2.0.0 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,17 +1,40 @@
1
- import { appendFileSync, writeFileSync } from "node:fs";
2
- import { homedir } from "node:os";
3
- import { join } from "node:path";
4
- const LOG_FILE = join(homedir(), ".opencode-autognosis.log");
5
- // Start a fresh session block
6
- writeFileSync(LOG_FILE, `
7
- --- Autognosis Session started: ${new Date().toISOString()} ---
8
- `, { flag: "a" });
9
- export function log(message, data) {
10
- const timestamp = new Date().toISOString();
11
- const line = data
12
- ? `[${timestamp}] ${message}: ${JSON.stringify(data)}
13
- `
14
- : `[${timestamp}] ${message}
15
- `;
16
- appendFileSync(LOG_FILE, line);
1
+ import * as fs from "node:fs";
2
+ import * as path from "node:path";
3
+ const PROJECT_ROOT = process.cwd();
4
+ const LOG_DIR = path.join(PROJECT_ROOT, ".opencode", "logs");
5
+ const LOG_FILE = path.join(LOG_DIR, "autognosis.log");
6
+ // Ensure log directory exists
7
+ try {
8
+ if (!fs.existsSync(LOG_DIR)) {
9
+ fs.mkdirSync(LOG_DIR, { recursive: true });
10
+ }
11
+ }
12
+ catch (e) {
13
+ // Ignore error if we can't create directory (e.g. read-only fs)
14
+ }
15
+ export class Logger {
16
+ static formatMessage(module, message, data) {
17
+ const timestamp = new Date().toISOString();
18
+ let dataStr = "";
19
+ if (data) {
20
+ try {
21
+ dataStr = typeof data === "string" ? data : JSON.stringify(data);
22
+ }
23
+ catch {
24
+ dataStr = "[Circular/Unserializable]";
25
+ }
26
+ }
27
+ return `[${timestamp}] [${module}] ${message} ${dataStr}\n`;
28
+ }
29
+ static log(module, message, data) {
30
+ const line = this.formatMessage(module, message, data);
31
+ try {
32
+ // Append to log file synchronously to ensure write
33
+ fs.appendFileSync(LOG_FILE, line);
34
+ }
35
+ catch (e) {
36
+ // Fallback: strictly avoid console.log/error to prevent TUI breakage.
37
+ // We essentially swallow the log if file write fails.
38
+ }
39
+ }
17
40
  }
@@ -0,0 +1,11 @@
1
+ export declare const DEFAULT_EMBEDDING_MODEL = "nomic-embed-text";
2
+ export declare const OLLAMA_BASE_URL = "http://127.0.0.1:11434";
3
+ export declare class OllamaService {
4
+ isInstalled(): Promise<boolean>;
5
+ isRunning(): Promise<boolean>;
6
+ install(): Promise<string>;
7
+ startServer(): Promise<void>;
8
+ pullModel(model?: string): Promise<void>;
9
+ getEmbedding(text: string, model?: string): Promise<number[]>;
10
+ }
11
+ export declare const ollama: OllamaService;
@@ -0,0 +1,132 @@
1
+ import { exec, spawn } from "node:child_process";
2
+ import { promisify } from "node:util";
3
+ import * as fs from "node:fs";
4
+ import * as path from "node:path";
5
+ import { Logger } from "./logger.js";
6
+ const execAsync = promisify(exec);
7
+ export const DEFAULT_EMBEDDING_MODEL = "nomic-embed-text";
8
+ export const OLLAMA_BASE_URL = "http://127.0.0.1:11434";
9
+ export class OllamaService {
10
+ async isInstalled() {
11
+ try {
12
+ await execAsync("which ollama");
13
+ return true;
14
+ }
15
+ catch {
16
+ return false;
17
+ }
18
+ }
19
+ async isRunning() {
20
+ try {
21
+ const controller = new AbortController();
22
+ const timeoutId = setTimeout(() => controller.abort(), 1000);
23
+ const res = await fetch(`${OLLAMA_BASE_URL}/api/version`, { signal: controller.signal });
24
+ clearTimeout(timeoutId);
25
+ return res.ok;
26
+ }
27
+ catch {
28
+ return false;
29
+ }
30
+ }
31
+ async install() {
32
+ const platform = process.platform;
33
+ try {
34
+ if (platform === "darwin") {
35
+ // Try Homebrew first
36
+ try {
37
+ await execAsync("which brew");
38
+ await execAsync("brew install ollama");
39
+ return "Installed via Homebrew";
40
+ }
41
+ catch {
42
+ // Fallback to script
43
+ await execAsync("curl -fsSL https://ollama.com/install.sh | sh");
44
+ return "Installed via official script";
45
+ }
46
+ }
47
+ else if (platform === "linux") {
48
+ await execAsync("curl -fsSL https://ollama.com/install.sh | sh");
49
+ return "Installed via official script";
50
+ }
51
+ else {
52
+ throw new Error("Automatic installation only supported on macOS and Linux. Please install Ollama manually.");
53
+ }
54
+ }
55
+ catch (error) {
56
+ throw new Error(`Installation failed: ${error.message}`);
57
+ }
58
+ }
59
+ async startServer() {
60
+ if (await this.isRunning())
61
+ return;
62
+ // Start in background
63
+ const logFile = fs.openSync(path.join(process.cwd(), ".opencode", "ollama.log"), "a");
64
+ const child = spawn("ollama", ["serve"], {
65
+ detached: true,
66
+ stdio: ["ignore", logFile, logFile]
67
+ });
68
+ child.unref();
69
+ // Wait for it to come up
70
+ let attempts = 0;
71
+ while (attempts < 10) {
72
+ await new Promise(r => setTimeout(r, 1000));
73
+ if (await this.isRunning())
74
+ return;
75
+ attempts++;
76
+ }
77
+ throw new Error("Ollama server failed to start within 10 seconds");
78
+ }
79
+ async pullModel(model = DEFAULT_EMBEDDING_MODEL) {
80
+ // Check if exists
81
+ try {
82
+ const res = await fetch(`${OLLAMA_BASE_URL}/api/tags`);
83
+ const data = await res.json();
84
+ const models = data.models || [];
85
+ if (models.some((m) => m.name.includes(model))) {
86
+ return; // Already exists
87
+ }
88
+ }
89
+ catch { }
90
+ // Pull model (this blocks, usually handled via CLI)
91
+ // We'll use the API to pull so we can await it
92
+ const res = await fetch(`${OLLAMA_BASE_URL}/api/pull`, {
93
+ method: "POST",
94
+ body: JSON.stringify({ name: model }),
95
+ });
96
+ if (!res.ok)
97
+ throw new Error(`Failed to pull model ${model}`);
98
+ // Read stream to completion to ensure it's done
99
+ const reader = res.body?.getReader();
100
+ if (reader) {
101
+ while (true) {
102
+ const { done } = await reader.read();
103
+ if (done)
104
+ break;
105
+ }
106
+ }
107
+ }
108
+ async getEmbedding(text, model = DEFAULT_EMBEDDING_MODEL) {
109
+ if (!text || !text.trim())
110
+ return [];
111
+ try {
112
+ const res = await fetch(`${OLLAMA_BASE_URL}/api/embeddings`, {
113
+ method: "POST",
114
+ body: JSON.stringify({
115
+ model,
116
+ prompt: text
117
+ })
118
+ });
119
+ if (!res.ok) {
120
+ const errText = await res.text();
121
+ throw new Error(`Ollama API error: ${res.status} ${errText}`);
122
+ }
123
+ const data = await res.json();
124
+ return data.embedding;
125
+ }
126
+ catch (error) {
127
+ Logger.log("Ollama", "Embedding failed", error);
128
+ return [];
129
+ }
130
+ }
131
+ }
132
+ export const ollama = new OllamaService();
@@ -5,14 +5,14 @@ import * as fsSync from "node:fs";
5
5
  import * as path from "node:path";
6
6
  import { promisify } from "node:util";
7
7
  import * as crypto from "node:crypto";
8
+ import { Logger } from "./services/logger.js";
8
9
  const execAsync = promisify(exec);
9
10
  const PROJECT_ROOT = process.cwd();
10
11
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
11
12
  const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
12
13
  // Internal logging
13
14
  function log(message, data) {
14
- // console.error is used here for internal debugging as standard console.log may interfere with tool outputs
15
- console.error(`[Autognosis] ${message}`, data || '');
15
+ Logger.log("Autognosis", message, data);
16
16
  }
17
17
  // =============================================================================
18
18
  // HELPERS
@@ -5,6 +5,7 @@ import * as fsSync from "node:fs";
5
5
  import * as path from "node:path";
6
6
  import { promisify } from "node:util";
7
7
  import * as crypto from "node:crypto";
8
+ import { Logger } from "./services/logger.js";
8
9
  const execAsync = promisify(exec);
9
10
  const PROJECT_ROOT = process.cwd();
10
11
  const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
@@ -12,7 +13,7 @@ const TEST_DIR = path.join(OPENCODE_DIR, "tests");
12
13
  const BENCHMARK_DIR = path.join(OPENCODE_DIR, "benchmarks");
13
14
  // Internal logging
14
15
  function log(message, data) {
15
- console.error(`[Testing] ${message}`, data || '');
16
+ Logger.log("Testing", message, data);
16
17
  }
17
18
  // =============================================================================
18
19
  // HELPERS
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-autognosis",
3
- "version": "2.0.0",
3
+ "version": "2.0.2",
4
4
  "description": "Advanced RAG-powered codebase awareness for OpenCode agents. Features Chunk Cards synthesis, hierarchical reasoning, ActiveSet working memory, and performance optimization for enterprise-scale repositories.",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -46,8 +46,12 @@
46
46
  "devDependencies": {
47
47
  "@opencode-ai/plugin": "^1.0.162",
48
48
  "@opencode-ai/sdk": "^1.1.40",
49
+ "@types/better-sqlite3": "^7.6.13",
49
50
  "@types/node": "^20.0.0",
50
51
  "typescript": "^5.0.0",
51
52
  "zod": "^4.3.6"
53
+ },
54
+ "dependencies": {
55
+ "better-sqlite3": "^12.6.2"
52
56
  }
53
57
  }