archicore 0.3.9 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -710,7 +710,9 @@ async function handleIndexCommand() {
710
710
  // Детальная обработка ошибок
711
711
  indexSpinner.fail('Indexing failed');
712
712
  // Debug output to see what's in the result
713
- console.log(colors.dim(` [DEBUG] uploadResult: ${JSON.stringify(uploadResult, null, 2)}`));
713
+ if (process.env.DEBUG) {
714
+ console.log(colors.dim(` [DEBUG] uploadResult: ${JSON.stringify(uploadResult, null, 2)}`));
715
+ }
714
716
  if (uploadResult.errorDetails) {
715
717
  const { code, message, suggestion, technicalDetails } = uploadResult.errorDetails;
716
718
  console.log();
@@ -7,6 +7,13 @@
7
7
  * - Детальная обработка ошибок
8
8
  */
9
9
  import { loadConfig } from './config.js';
10
+ // Debug logging (only when DEBUG env var is set)
11
+ const DEBUG = process.env.DEBUG === 'true' || process.env.DEBUG === '1';
12
+ function debugLog(message) {
13
+ if (DEBUG) {
14
+ debugLog(` ${message}`);
15
+ }
16
+ }
10
17
  // Лимиты для chunked upload (оптимизировано для очень больших проектов и нестабильных соединений)
11
18
  const MAX_PAYLOAD_SIZE = 3 * 1024 * 1024; // 3MB per chunk (уменьшено для надёжности на медленных соединениях)
12
19
  const MAX_SYMBOLS_PER_CHUNK = 1500; // Меньше символов на chunk для стабильности
@@ -190,7 +197,7 @@ async function fetchWithRetry(url, options, timeout = UPLOAD_TIMEOUT, maxRetries
190
197
  if (attempt < maxRetries) {
191
198
  // 2s, 4s, 8s, 16s, 32s, до 60s max
192
199
  const delay = Math.min(2000 * Math.pow(2, attempt - 1), 60000);
193
- console.log(`[DEBUG] fetchWithRetry: waiting ${delay / 1000}s before attempt ${attempt + 1}...`);
200
+ debugLog(` fetchWithRetry: waiting ${delay / 1000}s before attempt ${attempt + 1}...`);
194
201
  await new Promise(resolve => setTimeout(resolve, delay));
195
202
  }
196
203
  }
@@ -215,11 +222,11 @@ export async function uploadIndexData(projectId, data, onProgress) {
215
222
  });
216
223
  // Для небольших проектов - обычная загрузка
217
224
  if (!isLargeProject) {
218
- console.log(`[DEBUG] Using single request upload for ${data.symbols.length} symbols`);
225
+ debugLog(` Using single request upload for ${data.symbols.length} symbols`);
219
226
  return uploadSingleRequest(url, projectId, data, config.accessToken || '', onProgress);
220
227
  }
221
228
  // Для больших проектов - chunked upload
222
- console.log(`[DEBUG] Using chunked upload for large project (${data.symbols.length} symbols)`);
229
+ debugLog(` Using chunked upload for large project (${data.symbols.length} symbols)`);
223
230
  return uploadChunked(url, projectId, data, config.accessToken || '', onProgress);
224
231
  }
225
232
  /**
@@ -243,11 +250,11 @@ async function uploadSingleRequest(url, projectId, data, accessToken, onProgress
243
250
  });
244
251
  if (!response.ok) {
245
252
  const errorBody = await response.json().catch(() => ({}));
246
- console.log(`[DEBUG] HTTP error ${response.status}: ${JSON.stringify(errorBody)}`);
253
+ debugLog(` HTTP error ${response.status}: ${JSON.stringify(errorBody)}`);
247
254
  const errorDetails = analyzeHttpError(response.status, errorBody);
248
255
  // Если payload слишком большой - пробуем chunked
249
256
  if (response.status === 413) {
250
- console.log(`[DEBUG] Payload too large, trying chunked upload`);
257
+ debugLog(` Payload too large, trying chunked upload`);
251
258
  return uploadChunked(url, projectId, data, accessToken, onProgress);
252
259
  }
253
260
  return {
@@ -269,13 +276,13 @@ async function uploadSingleRequest(url, projectId, data, accessToken, onProgress
269
276
  };
270
277
  }
271
278
  catch (error) {
272
- console.log(`[DEBUG] uploadSingleRequest caught error: ${error}`);
279
+ debugLog(` uploadSingleRequest caught error: ${error}`);
273
280
  const errorDetails = analyzeNetworkError(error);
274
- console.log(`[DEBUG] Analyzed error: ${JSON.stringify(errorDetails)}`);
281
+ debugLog(` Analyzed error: ${JSON.stringify(errorDetails)}`);
275
282
  // Если ошибка связана с размером - пробуем chunked
276
283
  if (errorDetails.code === 'CONNECTION_RESET' ||
277
284
  errorDetails.code === 'PAYLOAD_TOO_LARGE') {
278
- console.log(`[DEBUG] Trying chunked upload due to ${errorDetails.code}`);
285
+ debugLog(` Trying chunked upload due to ${errorDetails.code}`);
279
286
  return uploadChunked(url, projectId, data, accessToken, onProgress);
280
287
  }
281
288
  return {
@@ -293,7 +300,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
293
300
  // Для очень больших проектов пропускаем fileContents (экономим трафик и время)
294
301
  const isVeryLargeProject = data.symbols.length > VERY_LARGE_PROJECT_SYMBOLS;
295
302
  if (isVeryLargeProject) {
296
- console.log(`[DEBUG] Very large project (${data.symbols.length} symbols), skipping fileContents upload`);
303
+ debugLog(` Very large project (${data.symbols.length} symbols), skipping fileContents upload`);
297
304
  }
298
305
  // Разбиваем данные на chunks
299
306
  const symbolChunks = chunkArray(data.symbols, MAX_SYMBOLS_PER_CHUNK);
@@ -301,7 +308,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
301
308
  const fileChunks = isVeryLargeProject ? [] : chunkArray(data.fileContents, MAX_FILES_PER_CHUNK);
302
309
  const totalChunks = symbolChunks.length + astChunks.length + fileChunks.length + 1; // +1 for graph
303
310
  let completedChunks = 0;
304
- console.log(`[DEBUG] Chunked upload: ${symbolChunks.length} symbol chunks, ${astChunks.length} AST chunks, ${fileChunks.length} file chunks`);
311
+ debugLog(` Chunked upload: ${symbolChunks.length} symbol chunks, ${astChunks.length} AST chunks, ${fileChunks.length} file chunks`);
305
312
  onProgress?.({
306
313
  phase: 'uploading',
307
314
  current: 0,
@@ -328,13 +335,13 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
328
335
  }
329
336
  catch (initError) {
330
337
  // Сервер не поддерживает chunked upload или недоступен
331
- console.log(`[DEBUG] Chunked upload not supported, falling back to minimal data`);
338
+ debugLog(` Chunked upload not supported, falling back to minimal data`);
332
339
  return uploadMinimalData(baseUrl, projectId, data, accessToken, onProgress);
333
340
  }
334
341
  if (!initResponse.ok) {
335
342
  // Fallback: если сервер не поддерживает chunked upload,
336
343
  // отправляем только минимальные данные
337
- console.log(`[DEBUG] Init returned ${initResponse.status}, falling back to minimal data`);
344
+ debugLog(` Init returned ${initResponse.status}, falling back to minimal data`);
338
345
  return uploadMinimalData(baseUrl, projectId, data, accessToken, onProgress);
339
346
  }
340
347
  const initResult = await initResponse.json();
@@ -343,13 +350,13 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
343
350
  const PARALLEL_UPLOADS = 3;
344
351
  // Helper для параллельной загрузки с retry для отдельных chunks
345
352
  async function uploadChunksParallel(chunks, chunkType, label) {
346
- console.log(`[DEBUG] Starting parallel upload of ${chunks.length} ${chunkType} chunks`);
353
+ debugLog(` Starting parallel upload of ${chunks.length} ${chunkType} chunks`);
347
354
  const failedChunks = [];
348
355
  const MAX_CHUNK_RETRIES = 5; // Увеличено для нестабильных соединений (Debian и т.д.)
349
356
  for (let batch = 0; batch < chunks.length; batch += PARALLEL_UPLOADS) {
350
357
  const batchChunks = chunks.slice(batch, batch + PARALLEL_UPLOADS);
351
358
  const batchNum = Math.floor(batch / PARALLEL_UPLOADS) + 1;
352
- console.log(`[DEBUG] Uploading batch ${batchNum} (${batchChunks.length} chunks)`);
359
+ debugLog(` Uploading batch ${batchNum} (${batchChunks.length} chunks)`);
353
360
  // Upload each chunk with individual retry logic
354
361
  const results = await Promise.allSettled(batchChunks.map(async (chunk, idx) => {
355
362
  const chunkIndex = batch + idx;
@@ -357,17 +364,17 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
357
364
  for (let retry = 0; retry < MAX_CHUNK_RETRIES; retry++) {
358
365
  try {
359
366
  await uploadChunk(config.serverUrl, projectId, uploadId, chunkType, chunkIndex, chunk, accessToken);
360
- console.log(`[DEBUG] Chunk ${chunkType}[${chunkIndex}] uploaded`);
367
+ debugLog(` Chunk ${chunkType}[${chunkIndex}] uploaded`);
361
368
  return { success: true, chunkIndex };
362
369
  }
363
370
  catch (error) {
364
371
  lastError = error instanceof Error ? error : new Error(String(error));
365
- console.log(`[DEBUG] Chunk ${chunkType}[${chunkIndex}] failed (attempt ${retry + 1}/${MAX_CHUNK_RETRIES}): ${lastError.message}`);
372
+ debugLog(` Chunk ${chunkType}[${chunkIndex}] failed (attempt ${retry + 1}/${MAX_CHUNK_RETRIES}): ${lastError.message}`);
366
373
  // Exponential backoff before retry (увеличено для нестабильных соединений)
367
374
  if (retry < MAX_CHUNK_RETRIES - 1) {
368
375
  // Более агрессивный backoff: 2s, 4s, 8s, 16s, до 30s max
369
376
  const delay = Math.min(2000 * Math.pow(2, retry), 30000);
370
- console.log(`[DEBUG] Waiting ${delay / 1000}s before retry ${retry + 2}...`);
377
+ debugLog(` Waiting ${delay / 1000}s before retry ${retry + 2}...`);
371
378
  await new Promise(resolve => setTimeout(resolve, delay));
372
379
  }
373
380
  }
@@ -385,7 +392,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
385
392
  else {
386
393
  const chunkIndex = batch + i;
387
394
  failedChunks.push(chunkIndex);
388
- console.log(`[DEBUG] Chunk ${chunkType}[${chunkIndex}] failed permanently: ${result.reason}`);
395
+ debugLog(` Chunk ${chunkType}[${chunkIndex}] failed permanently: ${result.reason}`);
389
396
  }
390
397
  }
391
398
  completedChunks += batchSuccesses;
@@ -400,10 +407,10 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
400
407
  throw new Error(`Too many chunks failed (${failedChunks.length}/${chunks.length}). Network may be unstable. Try again or check your connection.`);
401
408
  }
402
409
  }
403
- console.log(`[DEBUG] Finished uploading ${chunks.length} ${chunkType} chunks (${failedChunks.length} failed)`);
410
+ debugLog(` Finished uploading ${chunks.length} ${chunkType} chunks (${failedChunks.length} failed)`);
404
411
  // If any chunks failed, warn but continue if under threshold
405
412
  if (failedChunks.length > 0) {
406
- console.log(`[DEBUG] Warning: ${failedChunks.length} ${chunkType} chunks failed to upload`);
413
+ debugLog(` Warning: ${failedChunks.length} ${chunkType} chunks failed to upload`);
407
414
  }
408
415
  }
409
416
  // 2. Загружаем ASTs параллельно
@@ -475,7 +482,7 @@ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress)
475
482
  async function uploadMinimalData(url, _projectId, data, accessToken, onProgress) {
476
483
  const symbolCount = data.symbols.length;
477
484
  const fileCount = data.asts.length;
478
- console.log(`[DEBUG] uploadMinimalData: ${symbolCount} symbols, ${fileCount} files`);
485
+ debugLog(` uploadMinimalData: ${symbolCount} symbols, ${fileCount} files`);
479
486
  onProgress?.({
480
487
  phase: 'uploading',
481
488
  current: 50,
@@ -491,9 +498,9 @@ async function uploadMinimalData(url, _projectId, data, accessToken, onProgress)
491
498
  statistics: data.statistics,
492
499
  // Без fileContents - это самая большая часть
493
500
  };
494
- console.log(`[DEBUG] Minimal payload: ${minimalData.asts.length} ASTs, ${minimalData.symbols.length} symbols`);
501
+ debugLog(` Minimal payload: ${minimalData.asts.length} ASTs, ${minimalData.symbols.length} symbols`);
495
502
  try {
496
- console.log(`[DEBUG] Sending minimal data to ${url}`);
503
+ debugLog(` Sending minimal data to ${url}`);
497
504
  const response = await fetchWithRetry(url, {
498
505
  method: 'POST',
499
506
  headers: {
@@ -502,10 +509,10 @@ async function uploadMinimalData(url, _projectId, data, accessToken, onProgress)
502
509
  },
503
510
  body: JSON.stringify(minimalData),
504
511
  }, 180000, 2); // 3 минуты, 2 попытки
505
- console.log(`[DEBUG] Response status: ${response.status}`);
512
+ debugLog(` Response status: ${response.status}`);
506
513
  if (!response.ok) {
507
514
  const errorBody = await response.json().catch(() => ({}));
508
- console.log(`[DEBUG] Error body: ${JSON.stringify(errorBody)}`);
515
+ debugLog(` Error body: ${JSON.stringify(errorBody)}`);
509
516
  const errorDetails = analyzeHttpError(response.status, errorBody);
510
517
  return {
511
518
  success: false,
@@ -529,7 +536,7 @@ async function uploadMinimalData(url, _projectId, data, accessToken, onProgress)
529
536
  };
530
537
  }
531
538
  catch (error) {
532
- console.log(`[DEBUG] uploadMinimalData error: ${error}`);
539
+ debugLog(` uploadMinimalData error: ${error}`);
533
540
  const errorDetails = analyzeNetworkError(error);
534
541
  return {
535
542
  success: false,
@@ -80,7 +80,10 @@ export class FileUtils {
80
80
  const resolvedPath = resolve(rootDir);
81
81
  // Normalize for glob (use forward slashes on all platforms)
82
82
  const absoluteRootDir = resolvedPath.replace(/\\/g, '/');
83
- Logger.info(`getAllFiles: rootDir=${rootDir}, resolvedPath=${resolvedPath}, absoluteRootDir=${absoluteRootDir}`);
83
+ // Logger.debug for verbose output (only in DEBUG mode)
84
+ if (process.env.DEBUG) {
85
+ Logger.info(`getAllFiles: rootDir=${rootDir}, resolvedPath=${resolvedPath}`);
86
+ }
84
87
  // Check if directory exists and is accessible
85
88
  try {
86
89
  const dirStats = await stat(resolvedPath);
@@ -88,7 +91,7 @@ export class FileUtils {
88
91
  Logger.error(`getAllFiles: ${resolvedPath} is not a directory`);
89
92
  return [];
90
93
  }
91
- Logger.info(`getAllFiles: Directory exists and is accessible`);
94
+ // Directory exists - no need to log in production
92
95
  }
93
96
  catch (err) {
94
97
  Logger.error(`getAllFiles: Cannot access directory ${resolvedPath}: ${err}`);
@@ -107,6 +110,9 @@ export class FileUtils {
107
110
  'bower_components/**',
108
111
  '**/bower_components/**',
109
112
  'jspm_packages/**',
113
+ // PHP Composer dependencies
114
+ 'vendor/**',
115
+ '**/vendor/**',
110
116
  // Build/Generated output
111
117
  'dist/**',
112
118
  '**/dist/**',
@@ -168,7 +174,10 @@ export class FileUtils {
168
174
  });
169
175
  files.push(...matches);
170
176
  }
171
- Logger.info(`getAllFiles: glob found ${files.length} raw matches`);
177
+ // Verbose logging only in DEBUG mode
178
+ if (process.env.DEBUG) {
179
+ Logger.info(`getAllFiles: found ${files.length} files`);
180
+ }
172
181
  // If no files found, list directory contents for debugging
173
182
  if (files.length === 0) {
174
183
  try {
@@ -207,12 +216,9 @@ export class FileUtils {
207
216
  // Skip files we can't stat
208
217
  }
209
218
  }
210
- Logger.info(`getAllFiles: returning ${filteredFiles.length} files after filtering`);
211
- if (filteredFiles.length > 0 && filteredFiles.length <= 20) {
212
- Logger.info(`Files found: ${filteredFiles.join(', ')}`);
213
- }
214
- else if (filteredFiles.length > 20) {
215
- Logger.info(`First 20 files: ${filteredFiles.slice(0, 20).join(', ')}`);
219
+ // Only log file list in DEBUG mode
220
+ if (process.env.DEBUG && filteredFiles.length > 0) {
221
+ Logger.info(`getAllFiles: returning ${filteredFiles.length} files`);
216
222
  }
217
223
  return filteredFiles;
218
224
  }
@@ -7,7 +7,8 @@ export var LogLevel;
7
7
  LogLevel[LogLevel["ERROR"] = 3] = "ERROR";
8
8
  })(LogLevel || (LogLevel = {}));
9
9
  export class Logger {
10
- static level = LogLevel.INFO;
10
+ // Default to WARN in production, INFO/DEBUG only when DEBUG env var is set
11
+ static level = process.env.DEBUG ? LogLevel.DEBUG : LogLevel.WARN;
11
12
  static setLevel(level) {
12
13
  this.level = level;
13
14
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "archicore",
3
- "version": "0.3.9",
3
+ "version": "0.4.0",
4
4
  "description": "AI Software Architect - code analysis, impact prediction, semantic search",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",