@iceinvein/code-intelligence-mcp 1.0.1 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bin/run.js +35 -4
  2. package/package.json +1 -1
package/bin/run.js CHANGED
@@ -44,6 +44,19 @@ if (os.platform() === 'darwin' && !env.EMBEDDINGS_DEVICE) {
44
44
  env.EMBEDDINGS_DEVICE = 'cpu';
45
45
  }
46
46
 
47
+ // 6. Limit CPU threads for embedding model (helps reduce CPU usage)
48
+ // For example, set to 50% of available cores: EMBEDDINGS_MAX_THREADS=4
49
+ // Default is 0 (auto, use all available CPUs)
50
+ if (!env.EMBEDDINGS_MAX_THREADS) {
51
+ // Set a sensible default based on CPU count to avoid 100% CPU usage
52
+ const cpuCount = os.cpus().length;
53
+ // Use 50% of available CPUs, minimum 2, maximum 8
54
+ const defaultThreads = Math.max(2, Math.min(8, Math.floor(cpuCount * 0.5)));
55
+ env.EMBEDDINGS_MAX_THREADS = defaultThreads.toString();
56
+ console.error(`[code-intelligence-mcp] Setting EMBEDDINGS_MAX_THREADS=${defaultThreads} (${cpuCount} CPUs detected)`);
57
+ console.error('[code-intelligence-mcp] Set EMBEDDINGS_MAX_THREADS=0 to use all CPUs or customize as needed');
58
+ }
59
+
47
60
  // 5. Set persistence paths to be inside the project (BASE_DIR/.cimcp)
48
61
  // if not explicitly overridden. This keeps indexes local to the project.
49
62
  const cimcpDir = path.join(env.BASE_DIR, '.cimcp');
@@ -62,15 +75,33 @@ if (!env.DB_PATH) env.DB_PATH = path.join(cimcpDir, 'code-intelligence.db');
62
75
  if (!env.VECTOR_DB_PATH) env.VECTOR_DB_PATH = path.join(cimcpDir, 'vectors');
63
76
  if (!env.TANTIVY_INDEX_PATH) env.TANTIVY_INDEX_PATH = path.join(cimcpDir, 'tantivy-index');
64
77
 
65
- // Also set model dir to local project cache if not set globally
78
+ // Also set model dir - use GLOBAL cache to avoid downloading models for every project
79
+ // Models are shared across projects, but indexes remain local
66
80
  if (!env.EMBEDDINGS_MODEL_DIR) {
67
- env.EMBEDDINGS_MODEL_DIR = path.join(cimcpDir, 'embeddings-model');
68
- // Ensure model dir exists, otherwise the Rust server might complain
81
+ // Use platform-appropriate global cache location
82
+ if (os.platform() === 'darwin') {
83
+ // macOS: ~/Library/Application Support/cimcp/embeddings-cache
84
+ env.EMBEDDINGS_MODEL_DIR = path.join(os.homedir(), 'Library', 'Application Support', 'cimcp', 'embeddings-cache');
85
+ } else if (os.platform() === 'linux') {
86
+ // Linux: ~/.local/share/cimcp/embeddings-cache
87
+ const xdgDataHome = process.env.XDG_DATA_HOME || path.join(os.homedir(), '.local', 'share');
88
+ env.EMBEDDINGS_MODEL_DIR = path.join(xdgDataHome, 'cimcp', 'embeddings-cache');
89
+ } else if (os.platform() === 'win32') {
90
+ // Windows: %APPDATA%/cimcp/embeddings-cache
91
+ env.EMBEDDINGS_MODEL_DIR = path.join(process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming'), 'cimcp', 'embeddings-cache');
92
+ } else {
93
+ // Fallback to ~/.cimcp/embeddings-cache
94
+ env.EMBEDDINGS_MODEL_DIR = path.join(os.homedir(), '.cimcp', 'embeddings-cache');
95
+ }
96
+
97
+ // Ensure global model cache directory exists
69
98
  if (!fs.existsSync(env.EMBEDDINGS_MODEL_DIR)) {
70
99
  try {
71
100
  fs.mkdirSync(env.EMBEDDINGS_MODEL_DIR, { recursive: true });
72
101
  } catch (e) {
73
- console.error(`Failed to create embeddings directory at ${env.EMBEDDINGS_MODEL_DIR}:`, e.message);
102
+ console.error(`Failed to create global embeddings cache at ${env.EMBEDDINGS_MODEL_DIR}:`, e.message);
103
+ console.warn('Falling back to local project cache for this session');
104
+ env.EMBEDDINGS_MODEL_DIR = path.join(cimcpDir, 'embeddings-model');
74
105
  }
75
106
  }
76
107
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@iceinvein/code-intelligence-mcp",
3
- "version": "1.0.1",
3
+ "version": "1.0.2",
4
4
  "description": "Code Intelligence MCP Server - Smart context for your LLM coding agent",
5
5
  "bin": {
6
6
  "code-intelligence-mcp": "bin/run.js"