archicore 0.3.8 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -600,7 +600,7 @@ async function handleIndexCommand() {
600
600
  const codeIndex = new CodeIndex(state.projectPath);
601
601
  // Проверяем, является ли проект bundled (содержит source maps)
602
602
  const isBundled = await codeIndex.isBundledProject();
603
- let asts;
603
+ let asts = new Map();
604
604
  let virtualFileContents = [];
605
605
  if (isBundled) {
606
606
  // Извлекаем исходники из source maps
@@ -608,17 +608,31 @@ async function handleIndexCommand() {
608
608
  const extractionResult = await codeIndex.extractFromSourceMaps();
609
609
  if (extractionResult.files.length > 0) {
610
610
  indexSpinner.update(`Extracted ${extractionResult.files.length} files from source maps, parsing...`);
611
- // Парсим виртуальные файлы
611
+ // Парсим виртуальные файлы из source maps
612
612
  asts = codeIndex.parseVirtualFiles(extractionResult.files);
613
613
  // Сохраняем содержимое виртуальных файлов для загрузки
614
614
  virtualFileContents = extractionResult.files.map(f => [f.path, f.content]);
615
- indexSpinner.update(`Parsed ${asts.size} files from source maps, extracting symbols...`);
615
+ indexSpinner.update(`Parsed ${asts.size} virtual files from source maps`);
616
616
  }
617
- else {
618
- // Fallback: парсим обычные файлы
619
- indexSpinner.update('No extractable sources in source maps, parsing regular files...');
620
- asts = await codeIndex.parseProject();
617
+ // ВАЖНО: Также парсим PHP/Python/другие backend файлы (не JS бандлы)
618
+ indexSpinner.update('Also parsing backend files (PHP, Python, etc.)...');
619
+ const backendAsts = await codeIndex.parseProject();
620
+ // Объединяем: source maps + backend файлы (исключая bundled JS)
621
+ let addedBackendFiles = 0;
622
+ for (const [path, ast] of backendAsts) {
623
+ // Пропускаем bundled JS файлы - они уже извлечены из source maps
624
+ const isBundledJs = path.match(/\.(min\.js|bundle\.js)$/) ||
625
+ path.match(/\/(dist|build|vendor)\//i) ||
626
+ path.match(/\.(js|ts)$/) && path.match(/\d+\.[a-f0-9]+\.(js|ts)$/i);
627
+ if (!isBundledJs && !asts.has(path)) {
628
+ asts.set(path, ast);
629
+ addedBackendFiles++;
630
+ }
621
631
  }
632
+ if (addedBackendFiles > 0) {
633
+ indexSpinner.update(`Added ${addedBackendFiles} backend files (PHP, Python, etc.)`);
634
+ }
635
+ indexSpinner.update(`Total: ${asts.size} files (source maps + backend)`);
622
636
  }
623
637
  else {
624
638
  // Обычный проект - парсим файлы напрямую
@@ -635,25 +649,38 @@ async function handleIndexCommand() {
635
649
  const isLargeProject = symbols.size > 50000 || asts.size > 1000;
636
650
  // Читаем содержимое файлов (с оптимизацией для больших проектов)
637
651
  let fileContents = [];
638
- // Если есть виртуальные файлы из source maps — используем их
652
+ const virtualFilePaths = new Set(virtualFileContents.map(([path]) => path));
653
+ // Добавляем виртуальные файлы из source maps
639
654
  if (virtualFileContents.length > 0) {
640
- indexSpinner.update('Using extracted source files...');
641
- fileContents = virtualFileContents;
655
+ indexSpinner.update('Using extracted source files from source maps...');
656
+ fileContents = [...virtualFileContents];
642
657
  }
643
- else if (!isLargeProject) {
644
- indexSpinner.update('Reading file contents...');
658
+ // Также читаем backend файлы (PHP, Python и т.д.) которых нет в source maps
659
+ if (!isLargeProject) {
660
+ indexSpinner.update('Reading backend file contents (PHP, Python, etc.)...');
661
+ let backendFilesRead = 0;
645
662
  for (const [filePath] of asts) {
663
+ // Пропускаем файлы которые уже есть из source maps
664
+ if (virtualFilePaths.has(filePath))
665
+ continue;
666
+ // Пропускаем bundled JS файлы
667
+ if (filePath.match(/\d+\.[a-f0-9]+\.(js|ts)$/i))
668
+ continue;
646
669
  try {
647
670
  const fullPath = pathModule.default.isAbsolute(filePath)
648
671
  ? filePath
649
672
  : pathModule.default.join(state.projectPath, filePath);
650
673
  const content = await fs.readFile(fullPath, 'utf-8');
651
674
  fileContents.push([filePath, content]);
675
+ backendFilesRead++;
652
676
  }
653
677
  catch {
654
678
  // Игнорируем ошибки чтения отдельных файлов
655
679
  }
656
680
  }
681
+ if (backendFilesRead > 0) {
682
+ indexSpinner.update(`Read ${backendFilesRead} additional backend files`);
683
+ }
657
684
  }
658
685
  else {
659
686
  indexSpinner.update('Large project detected, skipping file contents for faster upload...');
@@ -683,7 +710,9 @@ async function handleIndexCommand() {
683
710
  // Детальная обработка ошибок
684
711
  indexSpinner.fail('Indexing failed');
685
712
  // Debug output to see what's in the result
686
- console.log(colors.dim(` [DEBUG] uploadResult: ${JSON.stringify(uploadResult, null, 2)}`));
713
+ if (process.env.DEBUG) {
714
+ console.log(colors.dim(` [DEBUG] uploadResult: ${JSON.stringify(uploadResult, null, 2)}`));
715
+ }
687
716
  if (uploadResult.errorDetails) {
688
717
  const { code, message, suggestion, technicalDetails } = uploadResult.errorDetails;
689
718
  console.log();
@@ -868,7 +897,7 @@ async function handleAnalyzeCommand(args) {
868
897
  async function handleSearchCommand(query) {
869
898
  if (!state.projectId) {
870
899
  printError('No project selected');
871
- printInfo('Use /projects select <id> first');
900
+ printInfo('Use /index first to register the project');
872
901
  return;
873
902
  }
874
903
  if (!query) {
@@ -915,7 +944,7 @@ async function handleSearchCommand(query) {
915
944
  async function handleQuery(query) {
916
945
  if (!state.projectId) {
917
946
  printError('No project selected');
918
- printInfo('Use /projects to list and select a project first');
947
+ printInfo('Use /index first to register the project');
919
948
  return;
920
949
  }
921
950
  // Save user message to history
@@ -7,6 +7,13 @@
7
7
  * - Детальная обработка ошибок
8
8
  */
9
9
  import { loadConfig } from './config.js';
10
+ // Debug logging (only when DEBUG env var is set)
11
+ const DEBUG = process.env.DEBUG === 'true' || process.env.DEBUG === '1';
12
+ function debugLog(message) {
13
+ if (DEBUG) {
14
+ debugLog(` ${message}`);
15
+ }
16
+ }
10
17
  // Лимиты для chunked upload (оптимизировано для очень больших проектов и нестабильных соединений)
11
18
  const MAX_PAYLOAD_SIZE = 3 * 1024 * 1024; // 3MB per chunk (уменьшено для надёжности на медленных соединениях)
12
19
  const MAX_SYMBOLS_PER_CHUNK = 1500; // Меньше символов на chunk для стабильности
@@ -190,7 +197,7 @@ async function fetchWithRetry(url, options, timeout = UPLOAD_TIMEOUT, maxRetries
190
197
  if (attempt < maxRetries) {
191
198
  // 2s, 4s, 8s, 16s, 32s, до 60s max
192
199
  const delay = Math.min(2000 * Math.pow(2, attempt - 1), 60000);
193
- console.log(`[DEBUG] fetchWithRetry: waiting ${delay / 1000}s before attempt ${attempt + 1}...`);
200
+ debugLog(` fetchWithRetry: waiting ${delay / 1000}s before attempt ${attempt + 1}...`);
194
201
  await new Promise(resolve => setTimeout(resolve, delay));
195
202
  }
196
203
  }
@@ -215,11 +222,11 @@ export async function uploadIndexData(projectId, data, onProgress) {
215
222
  });
216
223
  // Для небольших проектов - обычная загрузка
217
224
  if (!isLargeProject) {
218
- console.log(`[DEBUG] Using single request upload for ${data.symbols.length} symbols`);
225
+ debugLog(` Using single request upload for ${data.symbols.length} symbols`);
219
226
  return uploadSingleRequest(url, projectId, data, config.accessToken || '', onProgress);
220
227
  }
221
228
  // Для больших проектов - chunked upload
222
- console.log(`[DEBUG] Using chunked upload for large project (${data.symbols.length} symbols)`);
229
+ debugLog(` Using chunked upload for large project (${data.symbols.length} symbols)`);
223
230
  return uploadChunked(url, projectId, data, config.accessToken || '', onProgress);
224
231
  }
225
232
  /**
@@ -243,11 +250,11 @@ async function uploadSingleRequest(url, projectId, data, accessToken, onProgress
243
250
  });
244
251
  if (!response.ok) {
245
252
  const errorBody = await response.json().catch(() => ({}));
246
- console.log(`[DEBUG] HTTP error ${response.status}: ${JSON.stringify(errorBody)}`);
253
+ debugLog(` HTTP error ${response.status}: ${JSON.stringify(errorBody)}`);
247
254
  const errorDetails = analyzeHttpError(response.status, errorBody);
248
255
  // Если payload слишком большой - пробуем chunked
249
256
  if (response.status === 413) {
250
- console.log(`[DEBUG] Payload too large, trying chunked upload`);
257
+ debugLog(` Payload too large, trying chunked upload`);
251
258
  return uploadChunked(url, projectId, data, accessToken, onProgress);
252
259
  }
253
260
  return {
@@ -269,13 +276,13 @@ async function uploadSingleRequest(url, projectId, data, accessToken, onProgress
269
276
  };
270
277
  }
271
278
  catch (error) {
272
- console.log(`[DEBUG] uploadSingleRequest caught error: ${error}`);
279
+ debugLog(` uploadSingleRequest caught error: ${error}`);
273
280
  const errorDetails = analyzeNetworkError(error);
274
- console.log(`[DEBUG] Analyzed error: ${JSON.stringify(errorDetails)}`);
281
+ debugLog(` Analyzed error: ${JSON.stringify(errorDetails)}`);
275
282
  // Если ошибка связана с размером - пробуем chunked
276
283
  if (errorDetails.code === 'CONNECTION_RESET' ||
277
284
  errorDetails.code === 'PAYLOAD_TOO_LARGE') {
278
- console.log(`[DEBUG] Trying chunked upload due to ${errorDetails.code}`);
285
+ debugLog(` Trying chunked upload due to ${errorDetails.code}`);
279
286
  return uploadChunked(url, projectId, data, accessToken, onProgress);
280
287
  }
281
288
  return {
@@ -293,7 +300,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
293
300
  // Для очень больших проектов пропускаем fileContents (экономим трафик и время)
294
301
  const isVeryLargeProject = data.symbols.length > VERY_LARGE_PROJECT_SYMBOLS;
295
302
  if (isVeryLargeProject) {
296
- console.log(`[DEBUG] Very large project (${data.symbols.length} symbols), skipping fileContents upload`);
303
+ debugLog(` Very large project (${data.symbols.length} symbols), skipping fileContents upload`);
297
304
  }
298
305
  // Разбиваем данные на chunks
299
306
  const symbolChunks = chunkArray(data.symbols, MAX_SYMBOLS_PER_CHUNK);
@@ -301,7 +308,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
301
308
  const fileChunks = isVeryLargeProject ? [] : chunkArray(data.fileContents, MAX_FILES_PER_CHUNK);
302
309
  const totalChunks = symbolChunks.length + astChunks.length + fileChunks.length + 1; // +1 for graph
303
310
  let completedChunks = 0;
304
- console.log(`[DEBUG] Chunked upload: ${symbolChunks.length} symbol chunks, ${astChunks.length} AST chunks, ${fileChunks.length} file chunks`);
311
+ debugLog(` Chunked upload: ${symbolChunks.length} symbol chunks, ${astChunks.length} AST chunks, ${fileChunks.length} file chunks`);
305
312
  onProgress?.({
306
313
  phase: 'uploading',
307
314
  current: 0,
@@ -328,13 +335,13 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
328
335
  }
329
336
  catch (initError) {
330
337
  // Сервер не поддерживает chunked upload или недоступен
331
- console.log(`[DEBUG] Chunked upload not supported, falling back to minimal data`);
338
+ debugLog(` Chunked upload not supported, falling back to minimal data`);
332
339
  return uploadMinimalData(baseUrl, projectId, data, accessToken, onProgress);
333
340
  }
334
341
  if (!initResponse.ok) {
335
342
  // Fallback: если сервер не поддерживает chunked upload,
336
343
  // отправляем только минимальные данные
337
- console.log(`[DEBUG] Init returned ${initResponse.status}, falling back to minimal data`);
344
+ debugLog(` Init returned ${initResponse.status}, falling back to minimal data`);
338
345
  return uploadMinimalData(baseUrl, projectId, data, accessToken, onProgress);
339
346
  }
340
347
  const initResult = await initResponse.json();
@@ -343,13 +350,13 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
343
350
  const PARALLEL_UPLOADS = 3;
344
351
  // Helper для параллельной загрузки с retry для отдельных chunks
345
352
  async function uploadChunksParallel(chunks, chunkType, label) {
346
- console.log(`[DEBUG] Starting parallel upload of ${chunks.length} ${chunkType} chunks`);
353
+ debugLog(` Starting parallel upload of ${chunks.length} ${chunkType} chunks`);
347
354
  const failedChunks = [];
348
355
  const MAX_CHUNK_RETRIES = 5; // Увеличено для нестабильных соединений (Debian и т.д.)
349
356
  for (let batch = 0; batch < chunks.length; batch += PARALLEL_UPLOADS) {
350
357
  const batchChunks = chunks.slice(batch, batch + PARALLEL_UPLOADS);
351
358
  const batchNum = Math.floor(batch / PARALLEL_UPLOADS) + 1;
352
- console.log(`[DEBUG] Uploading batch ${batchNum} (${batchChunks.length} chunks)`);
359
+ debugLog(` Uploading batch ${batchNum} (${batchChunks.length} chunks)`);
353
360
  // Upload each chunk with individual retry logic
354
361
  const results = await Promise.allSettled(batchChunks.map(async (chunk, idx) => {
355
362
  const chunkIndex = batch + idx;
@@ -357,17 +364,17 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
357
364
  for (let retry = 0; retry < MAX_CHUNK_RETRIES; retry++) {
358
365
  try {
359
366
  await uploadChunk(config.serverUrl, projectId, uploadId, chunkType, chunkIndex, chunk, accessToken);
360
- console.log(`[DEBUG] Chunk ${chunkType}[${chunkIndex}] uploaded`);
367
+ debugLog(` Chunk ${chunkType}[${chunkIndex}] uploaded`);
361
368
  return { success: true, chunkIndex };
362
369
  }
363
370
  catch (error) {
364
371
  lastError = error instanceof Error ? error : new Error(String(error));
365
- console.log(`[DEBUG] Chunk ${chunkType}[${chunkIndex}] failed (attempt ${retry + 1}/${MAX_CHUNK_RETRIES}): ${lastError.message}`);
372
+ debugLog(` Chunk ${chunkType}[${chunkIndex}] failed (attempt ${retry + 1}/${MAX_CHUNK_RETRIES}): ${lastError.message}`);
366
373
  // Exponential backoff before retry (увеличено для нестабильных соединений)
367
374
  if (retry < MAX_CHUNK_RETRIES - 1) {
368
375
  // Более агрессивный backoff: 2s, 4s, 8s, 16s, до 30s max
369
376
  const delay = Math.min(2000 * Math.pow(2, retry), 30000);
370
- console.log(`[DEBUG] Waiting ${delay / 1000}s before retry ${retry + 2}...`);
377
+ debugLog(` Waiting ${delay / 1000}s before retry ${retry + 2}...`);
371
378
  await new Promise(resolve => setTimeout(resolve, delay));
372
379
  }
373
380
  }
@@ -385,7 +392,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
385
392
  else {
386
393
  const chunkIndex = batch + i;
387
394
  failedChunks.push(chunkIndex);
388
- console.log(`[DEBUG] Chunk ${chunkType}[${chunkIndex}] failed permanently: ${result.reason}`);
395
+ debugLog(` Chunk ${chunkType}[${chunkIndex}] failed permanently: ${result.reason}`);
389
396
  }
390
397
  }
391
398
  completedChunks += batchSuccesses;
@@ -400,10 +407,10 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
400
407
  throw new Error(`Too many chunks failed (${failedChunks.length}/${chunks.length}). Network may be unstable. Try again or check your connection.`);
401
408
  }
402
409
  }
403
- console.log(`[DEBUG] Finished uploading ${chunks.length} ${chunkType} chunks (${failedChunks.length} failed)`);
410
+ debugLog(` Finished uploading ${chunks.length} ${chunkType} chunks (${failedChunks.length} failed)`);
404
411
  // If any chunks failed, warn but continue if under threshold
405
412
  if (failedChunks.length > 0) {
406
- console.log(`[DEBUG] Warning: ${failedChunks.length} ${chunkType} chunks failed to upload`);
413
+ debugLog(` Warning: ${failedChunks.length} ${chunkType} chunks failed to upload`);
407
414
  }
408
415
  }
409
416
  // 2. Загружаем ASTs параллельно
@@ -475,7 +482,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
475
482
  async function uploadMinimalData(url, _projectId, data, accessToken, onProgress) {
476
483
  const symbolCount = data.symbols.length;
477
484
  const fileCount = data.asts.length;
478
- console.log(`[DEBUG] uploadMinimalData: ${symbolCount} symbols, ${fileCount} files`);
485
+ debugLog(` uploadMinimalData: ${symbolCount} symbols, ${fileCount} files`);
479
486
  onProgress?.({
480
487
  phase: 'uploading',
481
488
  current: 50,
@@ -491,9 +498,9 @@ async function uploadMinimalData(url, _projectId, data, accessToken, onProgress)
491
498
  statistics: data.statistics,
492
499
  // Без fileContents - это самая большая часть
493
500
  };
494
- console.log(`[DEBUG] Minimal payload: ${minimalData.asts.length} ASTs, ${minimalData.symbols.length} symbols`);
501
+ debugLog(` Minimal payload: ${minimalData.asts.length} ASTs, ${minimalData.symbols.length} symbols`);
495
502
  try {
496
- console.log(`[DEBUG] Sending minimal data to ${url}`);
503
+ debugLog(` Sending minimal data to ${url}`);
497
504
  const response = await fetchWithRetry(url, {
498
505
  method: 'POST',
499
506
  headers: {
@@ -502,10 +509,10 @@ async function uploadMinimalData(url, _projectId, data, accessToken, onProgress)
502
509
  },
503
510
  body: JSON.stringify(minimalData),
504
511
  }, 180000, 2); // 3 минуты, 2 попытки
505
- console.log(`[DEBUG] Response status: ${response.status}`);
512
+ debugLog(` Response status: ${response.status}`);
506
513
  if (!response.ok) {
507
514
  const errorBody = await response.json().catch(() => ({}));
508
- console.log(`[DEBUG] Error body: ${JSON.stringify(errorBody)}`);
515
+ debugLog(` Error body: ${JSON.stringify(errorBody)}`);
509
516
  const errorDetails = analyzeHttpError(response.status, errorBody);
510
517
  return {
511
518
  success: false,
@@ -529,7 +536,7 @@ async function uploadMinimalData(url, _projectId, data, accessToken, onProgress)
529
536
  };
530
537
  }
531
538
  catch (error) {
532
- console.log(`[DEBUG] uploadMinimalData error: ${error}`);
539
+ debugLog(` uploadMinimalData error: ${error}`);
533
540
  const errorDetails = analyzeNetworkError(error);
534
541
  return {
535
542
  success: false,
package/dist/cli.js CHANGED
@@ -56,7 +56,7 @@ program
56
56
  provider: 'deepseek',
57
57
  model: 'deepseek-chat',
58
58
  temperature: 0.1,
59
- maxTokens: 4096
59
+ maxTokens: 8192
60
60
  },
61
61
  vectorStore: {
62
62
  url: process.env.QDRANT_URL || 'http://localhost:6333',
@@ -107,7 +107,7 @@ program
107
107
  provider: 'deepseek',
108
108
  model: 'deepseek-chat',
109
109
  temperature: 0.1,
110
- maxTokens: 4096
110
+ maxTokens: 8192
111
111
  },
112
112
  vectorStore: {
113
113
  url: process.env.QDRANT_URL || 'http://localhost:6333',
@@ -192,7 +192,7 @@ program
192
192
  provider: 'deepseek',
193
193
  model: 'deepseek-chat',
194
194
  temperature: 0.1,
195
- maxTokens: 4096
195
+ maxTokens: 8192
196
196
  },
197
197
  vectorStore: {
198
198
  url: process.env.QDRANT_URL || 'http://localhost:6333',
@@ -230,7 +230,7 @@ program
230
230
  provider: 'deepseek',
231
231
  model: 'deepseek-chat',
232
232
  temperature: 0.3,
233
- maxTokens: 4096
233
+ maxTokens: 8192
234
234
  },
235
235
  vectorStore: {
236
236
  url: process.env.QDRANT_URL || 'http://localhost:6333',
@@ -136,7 +136,12 @@ router.get('/verify/:userCode', (req, res) => {
136
136
  * Authorize device (called from web after user logs in)
137
137
  */
138
138
  router.post('/authorize', async (req, res) => {
139
- const { userCode, userId, accessToken, action } = req.body;
139
+ const { userCode, userId, action } = req.body;
140
+ // Get access token from cookie or Authorization header (not from body!)
141
+ // Web frontend sends credentials: 'include' which sends the httpOnly cookie
142
+ const accessToken = req.cookies?.archicore_token ||
143
+ req.headers.authorization?.substring(7) ||
144
+ req.body.accessToken; // Fallback to body for backwards compatibility
140
145
  if (!userCode) {
141
146
  res.status(400).json({ error: 'invalid_request', message: 'Missing user_code' });
142
147
  return;
@@ -178,7 +178,7 @@ export class ProjectService {
178
178
  provider: 'deepseek',
179
179
  model: process.env.DEEPSEEK_MODEL || 'deepseek-chat',
180
180
  temperature: 0.1,
181
- maxTokens: 4096,
181
+ maxTokens: 8192,
182
182
  baseURL: 'https://api.deepseek.com'
183
183
  };
184
184
  // SemanticMemory - поддержка Jina (бесплатно) или OpenAI
@@ -80,7 +80,10 @@ export class FileUtils {
80
80
  const resolvedPath = resolve(rootDir);
81
81
  // Normalize for glob (use forward slashes on all platforms)
82
82
  const absoluteRootDir = resolvedPath.replace(/\\/g, '/');
83
- Logger.info(`getAllFiles: rootDir=${rootDir}, resolvedPath=${resolvedPath}, absoluteRootDir=${absoluteRootDir}`);
83
+ // Logger.debug for verbose output (only in DEBUG mode)
84
+ if (process.env.DEBUG) {
85
+ Logger.info(`getAllFiles: rootDir=${rootDir}, resolvedPath=${resolvedPath}`);
86
+ }
84
87
  // Check if directory exists and is accessible
85
88
  try {
86
89
  const dirStats = await stat(resolvedPath);
@@ -88,7 +91,7 @@ export class FileUtils {
88
91
  Logger.error(`getAllFiles: ${resolvedPath} is not a directory`);
89
92
  return [];
90
93
  }
91
- Logger.info(`getAllFiles: Directory exists and is accessible`);
94
+ // Directory exists - no need to log in production
92
95
  }
93
96
  catch (err) {
94
97
  Logger.error(`getAllFiles: Cannot access directory ${resolvedPath}: ${err}`);
@@ -107,6 +110,9 @@ export class FileUtils {
107
110
  'bower_components/**',
108
111
  '**/bower_components/**',
109
112
  'jspm_packages/**',
113
+ // PHP Composer dependencies
114
+ 'vendor/**',
115
+ '**/vendor/**',
110
116
  // Build/Generated output
111
117
  'dist/**',
112
118
  '**/dist/**',
@@ -168,7 +174,10 @@ export class FileUtils {
168
174
  });
169
175
  files.push(...matches);
170
176
  }
171
- Logger.info(`getAllFiles: glob found ${files.length} raw matches`);
177
+ // Verbose logging only in DEBUG mode
178
+ if (process.env.DEBUG) {
179
+ Logger.info(`getAllFiles: found ${files.length} files`);
180
+ }
172
181
  // If no files found, list directory contents for debugging
173
182
  if (files.length === 0) {
174
183
  try {
@@ -207,12 +216,9 @@ export class FileUtils {
207
216
  // Skip files we can't stat
208
217
  }
209
218
  }
210
- Logger.info(`getAllFiles: returning ${filteredFiles.length} files after filtering`);
211
- if (filteredFiles.length > 0 && filteredFiles.length <= 20) {
212
- Logger.info(`Files found: ${filteredFiles.join(', ')}`);
213
- }
214
- else if (filteredFiles.length > 20) {
215
- Logger.info(`First 20 files: ${filteredFiles.slice(0, 20).join(', ')}`);
219
+ // Only log file list in DEBUG mode
220
+ if (process.env.DEBUG && filteredFiles.length > 0) {
221
+ Logger.info(`getAllFiles: returning ${filteredFiles.length} files`);
216
222
  }
217
223
  return filteredFiles;
218
224
  }
@@ -7,7 +7,8 @@ export var LogLevel;
7
7
  LogLevel[LogLevel["ERROR"] = 3] = "ERROR";
8
8
  })(LogLevel || (LogLevel = {}));
9
9
  export class Logger {
10
- static level = LogLevel.INFO;
10
+ // Default to WARN in production, INFO/DEBUG only when DEBUG env var is set
11
+ static level = process.env.DEBUG ? LogLevel.DEBUG : LogLevel.WARN;
11
12
  static setLevel(level) {
12
13
  this.level = level;
13
14
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "archicore",
3
- "version": "0.3.8",
3
+ "version": "0.4.0",
4
4
  "description": "AI Software Architect - code analysis, impact prediction, semantic search",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",