archicore 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,521 @@
1
+ /**
2
+ * ArchiCore CLI - Upload Utilities
3
+ *
4
+ * Утилиты для загрузки больших проектов:
5
+ * - Chunked uploads для больших файлов
6
+ * - Retry логика
7
+ * - Детальная обработка ошибок
8
+ */
9
+ import { loadConfig } from './config.js';
10
+ // Лимиты для chunked upload
11
+ const MAX_PAYLOAD_SIZE = 10 * 1024 * 1024; // 10MB per chunk
12
+ const MAX_SYMBOLS_PER_CHUNK = 5000;
13
+ const MAX_FILES_PER_CHUNK = 100;
14
+ const UPLOAD_TIMEOUT = 120000; // 120 секунд на chunk (увеличено для больших проектов)
15
+ const MAX_RETRIES = 3;
16
+ // Лимиты для минимальной загрузки
17
+ const MINIMAL_MAX_SYMBOLS = 10000;
18
+ const MINIMAL_MAX_FILES = 500;
19
+ /**
20
+ * Определение размера JSON в байтах
21
+ */
22
+ function estimateJsonSize(data) {
23
+ return new Blob([JSON.stringify(data)]).size;
24
+ }
25
+ /**
26
+ * Детальный анализ ошибки fetch
27
+ */
28
+ export function analyzeNetworkError(error) {
29
+ const errorStr = String(error);
30
+ const errorName = error instanceof Error ? error.name : 'Unknown';
31
+ const errorMessage = error instanceof Error ? error.message : errorStr;
32
+ // Анализ различных типов ошибок
33
+ if (errorStr.includes('fetch failed') || errorStr.includes('ECONNREFUSED')) {
34
+ return {
35
+ code: 'CONNECTION_FAILED',
36
+ message: 'Cannot connect to ArchiCore server',
37
+ suggestion: 'Check that the server is running and accessible. Verify your internet connection.',
38
+ technicalDetails: `Network error: ${errorMessage}`,
39
+ };
40
+ }
41
+ if (errorStr.includes('ETIMEDOUT') || errorStr.includes('timeout')) {
42
+ return {
43
+ code: 'TIMEOUT',
44
+ message: 'Request timed out',
45
+ suggestion: 'The project may be too large. Try indexing a smaller subset first.',
46
+ technicalDetails: `Timeout after request: ${errorMessage}`,
47
+ };
48
+ }
49
+ if (errorStr.includes('ECONNRESET') || errorStr.includes('socket hang up')) {
50
+ return {
51
+ code: 'CONNECTION_RESET',
52
+ message: 'Connection was reset by server',
53
+ suggestion: 'The payload may be too large. ArchiCore will try chunked upload.',
54
+ technicalDetails: `Connection reset: ${errorMessage}`,
55
+ };
56
+ }
57
+ if (errorStr.includes('PayloadTooLargeError') || errorStr.includes('request entity too large')) {
58
+ return {
59
+ code: 'PAYLOAD_TOO_LARGE',
60
+ message: 'Data too large for single upload',
61
+ suggestion: 'Using chunked upload mode for large projects.',
62
+ technicalDetails: `Payload size exceeded: ${errorMessage}`,
63
+ };
64
+ }
65
+ if (errorStr.includes('CERT') || errorStr.includes('SSL') || errorStr.includes('certificate')) {
66
+ return {
67
+ code: 'SSL_ERROR',
68
+ message: 'SSL/TLS certificate error',
69
+ suggestion: 'Check server SSL configuration or try HTTP instead of HTTPS.',
70
+ technicalDetails: `SSL error: ${errorMessage}`,
71
+ };
72
+ }
73
+ if (errorStr.includes('ENOTFOUND') || errorStr.includes('getaddrinfo')) {
74
+ return {
75
+ code: 'DNS_ERROR',
76
+ message: 'Cannot resolve server hostname',
77
+ suggestion: 'Check your internet connection and server URL configuration.',
78
+ technicalDetails: `DNS resolution failed: ${errorMessage}`,
79
+ };
80
+ }
81
+ // Generic network error
82
+ return {
83
+ code: 'NETWORK_ERROR',
84
+ message: 'Network request failed',
85
+ suggestion: 'Check your internet connection and try again.',
86
+ technicalDetails: `${errorName}: ${errorMessage}`,
87
+ };
88
+ }
89
+ /**
90
+ * Детальный анализ HTTP ошибки
91
+ */
92
+ export function analyzeHttpError(status, responseBody) {
93
+ const body = responseBody || {};
94
+ const serverError = body.error || body.message || '';
95
+ switch (status) {
96
+ case 400:
97
+ return {
98
+ code: 'BAD_REQUEST',
99
+ message: 'Invalid request data',
100
+ suggestion: 'The index data format may be corrupted. Try re-indexing.',
101
+ technicalDetails: serverError || 'Server rejected the request as invalid',
102
+ };
103
+ case 401:
104
+ return {
105
+ code: 'UNAUTHORIZED',
106
+ message: 'Authentication required',
107
+ suggestion: 'Your session may have expired. Run /logout and log in again.',
108
+ technicalDetails: serverError || 'Access token is invalid or expired',
109
+ };
110
+ case 403:
111
+ return {
112
+ code: 'FORBIDDEN',
113
+ message: 'Access denied to this project',
114
+ suggestion: 'You may not have permission to this project. Try re-initializing with /index.',
115
+ technicalDetails: serverError || 'Server denied access to the requested resource',
116
+ };
117
+ case 404:
118
+ return {
119
+ code: 'NOT_FOUND',
120
+ message: 'Project not found on server',
121
+ suggestion: 'The project may have been deleted. Try running /index again.',
122
+ technicalDetails: serverError || 'Project ID does not exist on server',
123
+ };
124
+ case 413:
125
+ return {
126
+ code: 'PAYLOAD_TOO_LARGE',
127
+ message: 'Project data too large for single upload',
128
+ suggestion: 'ArchiCore will automatically use chunked upload.',
129
+ technicalDetails: serverError || 'Request payload exceeded server limit',
130
+ };
131
+ case 429:
132
+ return {
133
+ code: 'RATE_LIMITED',
134
+ message: 'Too many requests',
135
+ suggestion: 'Wait a few minutes and try again.',
136
+ technicalDetails: serverError || 'Rate limit exceeded',
137
+ };
138
+ case 500:
139
+ return {
140
+ code: 'SERVER_ERROR',
141
+ message: 'Server internal error',
142
+ suggestion: 'The server encountered an error processing your project. This may be due to project size.',
143
+ technicalDetails: serverError || 'Internal server error - check server logs for details',
144
+ };
145
+ case 502:
146
+ case 503:
147
+ case 504:
148
+ return {
149
+ code: 'SERVER_UNAVAILABLE',
150
+ message: 'Server is temporarily unavailable',
151
+ suggestion: 'Wait a few minutes and try again. The server may be overloaded.',
152
+ technicalDetails: serverError || `Server returned ${status}`,
153
+ };
154
+ default:
155
+ return {
156
+ code: 'HTTP_ERROR',
157
+ message: `Request failed with status ${status}`,
158
+ suggestion: 'Check server status and try again.',
159
+ technicalDetails: serverError || `HTTP ${status} error`,
160
+ };
161
+ }
162
+ }
163
+ /**
164
+ * Выполнение fetch с timeout и retry
165
+ */
166
+ async function fetchWithRetry(url, options, timeout = UPLOAD_TIMEOUT, maxRetries = MAX_RETRIES) {
167
+ let lastError = null;
168
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
169
+ try {
170
+ const controller = new AbortController();
171
+ const timeoutId = setTimeout(() => controller.abort(), timeout);
172
+ const response = await fetch(url, {
173
+ ...options,
174
+ signal: controller.signal,
175
+ });
176
+ clearTimeout(timeoutId);
177
+ return response;
178
+ }
179
+ catch (error) {
180
+ lastError = error instanceof Error ? error : new Error(String(error));
181
+ // Не повторяем для определённых ошибок
182
+ const errorStr = String(error);
183
+ if (errorStr.includes('AbortError') ||
184
+ errorStr.includes('ENOTFOUND') ||
185
+ errorStr.includes('CERT')) {
186
+ throw error;
187
+ }
188
+ // Экспоненциальная задержка перед повтором
189
+ if (attempt < maxRetries) {
190
+ const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000);
191
+ await new Promise(resolve => setTimeout(resolve, delay));
192
+ }
193
+ }
194
+ }
195
+ throw lastError || new Error('Request failed after retries');
196
+ }
197
+ /**
198
+ * Загрузка индекса с автоматическим chunked upload для больших проектов
199
+ */
200
+ export async function uploadIndexData(projectId, data, onProgress) {
201
+ const config = await loadConfig();
202
+ const url = `${config.serverUrl}/api/projects/${projectId}/upload-index`;
203
+ // Оценка размера данных
204
+ const estimatedSize = estimateJsonSize(data);
205
+ const isLargeProject = estimatedSize > MAX_PAYLOAD_SIZE ||
206
+ data.symbols.length > MAX_SYMBOLS_PER_CHUNK * 2;
207
+ onProgress?.({
208
+ phase: 'preparing',
209
+ current: 0,
210
+ total: 100,
211
+ message: `Preparing upload (${(estimatedSize / 1024 / 1024).toFixed(1)} MB)...`,
212
+ });
213
+ // Для небольших проектов - обычная загрузка
214
+ if (!isLargeProject) {
215
+ console.log(`[DEBUG] Using single request upload for ${data.symbols.length} symbols`);
216
+ return uploadSingleRequest(url, projectId, data, config.accessToken || '', onProgress);
217
+ }
218
+ // Для больших проектов - chunked upload
219
+ console.log(`[DEBUG] Using chunked upload for large project (${data.symbols.length} symbols)`);
220
+ return uploadChunked(url, projectId, data, config.accessToken || '', onProgress);
221
+ }
222
+ /**
223
+ * Обычная загрузка одним запросом
224
+ */
225
+ async function uploadSingleRequest(url, projectId, data, accessToken, onProgress) {
226
+ onProgress?.({
227
+ phase: 'uploading',
228
+ current: 50,
229
+ total: 100,
230
+ message: 'Uploading index data...',
231
+ });
232
+ try {
233
+ const response = await fetchWithRetry(url, {
234
+ method: 'POST',
235
+ headers: {
236
+ 'Content-Type': 'application/json',
237
+ 'Authorization': `Bearer ${accessToken}`,
238
+ },
239
+ body: JSON.stringify(data),
240
+ });
241
+ if (!response.ok) {
242
+ const errorBody = await response.json().catch(() => ({}));
243
+ console.log(`[DEBUG] HTTP error ${response.status}: ${JSON.stringify(errorBody)}`);
244
+ const errorDetails = analyzeHttpError(response.status, errorBody);
245
+ // Если payload слишком большой - пробуем chunked
246
+ if (response.status === 413) {
247
+ console.log(`[DEBUG] Payload too large, trying chunked upload`);
248
+ return uploadChunked(url, projectId, data, accessToken, onProgress);
249
+ }
250
+ return {
251
+ success: false,
252
+ error: errorDetails.message,
253
+ errorDetails,
254
+ };
255
+ }
256
+ const result = await response.json();
257
+ onProgress?.({
258
+ phase: 'done',
259
+ current: 100,
260
+ total: 100,
261
+ message: 'Upload complete!',
262
+ });
263
+ return {
264
+ success: true,
265
+ statistics: result.statistics,
266
+ };
267
+ }
268
+ catch (error) {
269
+ console.log(`[DEBUG] uploadSingleRequest caught error: ${error}`);
270
+ const errorDetails = analyzeNetworkError(error);
271
+ console.log(`[DEBUG] Analyzed error: ${JSON.stringify(errorDetails)}`);
272
+ // Если ошибка связана с размером - пробуем chunked
273
+ if (errorDetails.code === 'CONNECTION_RESET' ||
274
+ errorDetails.code === 'PAYLOAD_TOO_LARGE') {
275
+ console.log(`[DEBUG] Trying chunked upload due to ${errorDetails.code}`);
276
+ return uploadChunked(url, projectId, data, accessToken, onProgress);
277
+ }
278
+ return {
279
+ success: false,
280
+ error: errorDetails.message,
281
+ errorDetails,
282
+ };
283
+ }
284
+ }
285
+ /**
286
+ * Chunked upload для больших проектов
287
+ */
288
+ async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress) {
289
+ const config = await loadConfig();
290
+ // Разбиваем данные на chunks
291
+ const symbolChunks = chunkArray(data.symbols, MAX_SYMBOLS_PER_CHUNK);
292
+ const astChunks = chunkArray(data.asts, MAX_FILES_PER_CHUNK);
293
+ const fileChunks = chunkArray(data.fileContents, MAX_FILES_PER_CHUNK);
294
+ const totalChunks = symbolChunks.length + astChunks.length + fileChunks.length + 1; // +1 for graph
295
+ let completedChunks = 0;
296
+ onProgress?.({
297
+ phase: 'uploading',
298
+ current: 0,
299
+ total: totalChunks,
300
+ message: `Uploading in ${totalChunks} chunks...`,
301
+ });
302
+ try {
303
+ // 1. Инициализируем chunked upload
304
+ let initResponse;
305
+ try {
306
+ initResponse = await fetchWithRetry(`${config.serverUrl}/api/projects/${projectId}/upload-index/init`, {
307
+ method: 'POST',
308
+ headers: {
309
+ 'Content-Type': 'application/json',
310
+ 'Authorization': `Bearer ${accessToken}`,
311
+ },
312
+ body: JSON.stringify({
313
+ statistics: data.statistics,
314
+ totalChunks,
315
+ }),
316
+ }, 15000, // 15 секунд - короткий таймаут для проверки поддержки
317
+ 1 // Только 1 попытка
318
+ );
319
+ }
320
+ catch (initError) {
321
+ // Сервер не поддерживает chunked upload или недоступен
322
+ console.log(`[DEBUG] Chunked upload not supported, falling back to minimal data`);
323
+ return uploadMinimalData(baseUrl, projectId, data, accessToken, onProgress);
324
+ }
325
+ if (!initResponse.ok) {
326
+ // Fallback: если сервер не поддерживает chunked upload,
327
+ // отправляем только минимальные данные
328
+ console.log(`[DEBUG] Init returned ${initResponse.status}, falling back to minimal data`);
329
+ return uploadMinimalData(baseUrl, projectId, data, accessToken, onProgress);
330
+ }
331
+ const initResult = await initResponse.json();
332
+ const uploadId = initResult.uploadId;
333
+ // 2. Загружаем ASTs по частям
334
+ for (let i = 0; i < astChunks.length; i++) {
335
+ await uploadChunk(config.serverUrl, projectId, uploadId, 'asts', i, astChunks[i], accessToken);
336
+ completedChunks++;
337
+ onProgress?.({
338
+ phase: 'uploading',
339
+ current: completedChunks,
340
+ total: totalChunks,
341
+ message: `Uploading ASTs (${i + 1}/${astChunks.length})...`,
342
+ });
343
+ }
344
+ // 3. Загружаем символы по частям
345
+ for (let i = 0; i < symbolChunks.length; i++) {
346
+ await uploadChunk(config.serverUrl, projectId, uploadId, 'symbols', i, symbolChunks[i], accessToken);
347
+ completedChunks++;
348
+ onProgress?.({
349
+ phase: 'uploading',
350
+ current: completedChunks,
351
+ total: totalChunks,
352
+ message: `Uploading symbols (${i + 1}/${symbolChunks.length})...`,
353
+ });
354
+ }
355
+ // 4. Загружаем граф
356
+ await uploadChunk(config.serverUrl, projectId, uploadId, 'graph', 0, data.graph, accessToken);
357
+ completedChunks++;
358
+ onProgress?.({
359
+ phase: 'uploading',
360
+ current: completedChunks,
361
+ total: totalChunks,
362
+ message: 'Uploading dependency graph...',
363
+ });
364
+ // 5. Загружаем содержимое файлов (опционально)
365
+ for (let i = 0; i < fileChunks.length; i++) {
366
+ await uploadChunk(config.serverUrl, projectId, uploadId, 'fileContents', i, fileChunks[i], accessToken);
367
+ completedChunks++;
368
+ onProgress?.({
369
+ phase: 'uploading',
370
+ current: completedChunks,
371
+ total: totalChunks,
372
+ message: `Uploading file contents (${i + 1}/${fileChunks.length})...`,
373
+ });
374
+ }
375
+ // 6. Финализируем upload
376
+ onProgress?.({
377
+ phase: 'processing',
378
+ current: totalChunks,
379
+ total: totalChunks,
380
+ message: 'Finalizing index...',
381
+ });
382
+ const finalizeResponse = await fetchWithRetry(`${config.serverUrl}/api/projects/${projectId}/upload-index/finalize`, {
383
+ method: 'POST',
384
+ headers: {
385
+ 'Content-Type': 'application/json',
386
+ 'Authorization': `Bearer ${accessToken}`,
387
+ },
388
+ body: JSON.stringify({ uploadId }),
389
+ }, 120000 // 2 минуты на финализацию
390
+ );
391
+ if (!finalizeResponse.ok) {
392
+ const errorBody = await finalizeResponse.json().catch(() => ({}));
393
+ const errorDetails = analyzeHttpError(finalizeResponse.status, errorBody);
394
+ return {
395
+ success: false,
396
+ error: errorDetails.message,
397
+ errorDetails,
398
+ };
399
+ }
400
+ const result = await finalizeResponse.json();
401
+ onProgress?.({
402
+ phase: 'done',
403
+ current: totalChunks,
404
+ total: totalChunks,
405
+ message: 'Upload complete!',
406
+ });
407
+ return {
408
+ success: true,
409
+ statistics: result.statistics,
410
+ };
411
+ }
412
+ catch (error) {
413
+ const errorDetails = analyzeNetworkError(error);
414
+ return {
415
+ success: false,
416
+ error: errorDetails.message,
417
+ errorDetails,
418
+ };
419
+ }
420
+ }
421
+ /**
422
+ * Загрузка минимальных данных (fallback для серверов без chunked upload)
423
+ */
424
+ async function uploadMinimalData(url, _projectId, data, accessToken, onProgress) {
425
+ const symbolCount = data.symbols.length;
426
+ const fileCount = data.asts.length;
427
+ console.log(`[DEBUG] uploadMinimalData: ${symbolCount} symbols, ${fileCount} files`);
428
+ onProgress?.({
429
+ phase: 'uploading',
430
+ current: 50,
431
+ total: 100,
432
+ message: `Uploading minimal index (${Math.min(symbolCount, MINIMAL_MAX_SYMBOLS)} of ${symbolCount} symbols)...`,
433
+ });
434
+ // Отправляем урезанные данные для очень больших проектов
435
+ // Граф отправляем полностью, но AST и символы урезаем
436
+ const minimalData = {
437
+ asts: data.asts.slice(0, MINIMAL_MAX_FILES),
438
+ symbols: data.symbols.slice(0, MINIMAL_MAX_SYMBOLS),
439
+ graph: data.graph,
440
+ statistics: data.statistics,
441
+ // Без fileContents - это самая большая часть
442
+ };
443
+ console.log(`[DEBUG] Minimal payload: ${minimalData.asts.length} ASTs, ${minimalData.symbols.length} symbols`);
444
+ try {
445
+ console.log(`[DEBUG] Sending minimal data to ${url}`);
446
+ const response = await fetchWithRetry(url, {
447
+ method: 'POST',
448
+ headers: {
449
+ 'Content-Type': 'application/json',
450
+ 'Authorization': `Bearer ${accessToken}`,
451
+ },
452
+ body: JSON.stringify(minimalData),
453
+ }, 180000, 2); // 3 минуты, 2 попытки
454
+ console.log(`[DEBUG] Response status: ${response.status}`);
455
+ if (!response.ok) {
456
+ const errorBody = await response.json().catch(() => ({}));
457
+ console.log(`[DEBUG] Error body: ${JSON.stringify(errorBody)}`);
458
+ const errorDetails = analyzeHttpError(response.status, errorBody);
459
+ return {
460
+ success: false,
461
+ error: errorDetails.message,
462
+ errorDetails,
463
+ };
464
+ }
465
+ const result = await response.json();
466
+ onProgress?.({
467
+ phase: 'done',
468
+ current: 100,
469
+ total: 100,
470
+ message: 'Upload complete (minimal mode)!',
471
+ });
472
+ return {
473
+ success: true,
474
+ statistics: result.statistics || {
475
+ filesCount: minimalData.asts.length,
476
+ symbolsCount: minimalData.symbols.length,
477
+ },
478
+ };
479
+ }
480
+ catch (error) {
481
+ console.log(`[DEBUG] uploadMinimalData error: ${error}`);
482
+ const errorDetails = analyzeNetworkError(error);
483
+ return {
484
+ success: false,
485
+ error: errorDetails.message,
486
+ errorDetails,
487
+ };
488
+ }
489
+ }
490
+ /**
491
+ * Загрузка одного chunk
492
+ */
493
+ async function uploadChunk(serverUrl, projectId, uploadId, chunkType, chunkIndex, chunkData, accessToken) {
494
+ const response = await fetchWithRetry(`${serverUrl}/api/projects/${projectId}/upload-index/chunk`, {
495
+ method: 'POST',
496
+ headers: {
497
+ 'Content-Type': 'application/json',
498
+ 'Authorization': `Bearer ${accessToken}`,
499
+ },
500
+ body: JSON.stringify({
501
+ uploadId,
502
+ chunkType,
503
+ chunkIndex,
504
+ data: chunkData,
505
+ }),
506
+ });
507
+ if (!response.ok) {
508
+ throw new Error(`Failed to upload chunk ${chunkType}[${chunkIndex}]`);
509
+ }
510
+ }
511
+ /**
512
+ * Разбиение массива на chunks
513
+ */
514
+ function chunkArray(array, chunkSize) {
515
+ const chunks = [];
516
+ for (let i = 0; i < array.length; i += chunkSize) {
517
+ chunks.push(array.slice(i, i + chunkSize));
518
+ }
519
+ return chunks.length > 0 ? chunks : [[]];
520
+ }
521
+ //# sourceMappingURL=upload-utils.js.map
@@ -165,7 +165,146 @@ apiRouter.post('/projects/:id/upload-index', authMiddleware, checkProjectAccess,
165
165
  }
166
166
  catch (error) {
167
167
  Logger.error('Failed to upload indexed data:', error);
168
- res.status(500).json({ error: 'Failed to upload indexed data' });
168
+ // Provide more specific error messages
169
+ const errorStr = String(error);
170
+ let errorMessage = 'Failed to upload indexed data';
171
+ let errorCode = 'UPLOAD_FAILED';
172
+ if (errorStr.includes('ENOMEM') || errorStr.includes('heap')) {
173
+ errorMessage = 'Server ran out of memory processing the project';
174
+ errorCode = 'OUT_OF_MEMORY';
175
+ }
176
+ else if (errorStr.includes('ENOSPC')) {
177
+ errorMessage = 'Server ran out of disk space';
178
+ errorCode = 'DISK_FULL';
179
+ }
180
+ else if (errorStr.includes('timeout')) {
181
+ errorMessage = 'Processing timed out for large project';
182
+ errorCode = 'TIMEOUT';
183
+ }
184
+ res.status(500).json({
185
+ error: errorMessage,
186
+ code: errorCode,
187
+ details: process.env.NODE_ENV === 'development' ? errorStr : undefined
188
+ });
189
+ }
190
+ });
191
+ // Хранилище для chunked uploads
192
+ const chunkedUploads = new Map();
193
+ // Очистка старых uploads (старше 1 часа)
194
+ setInterval(() => {
195
+ const oneHourAgo = Date.now() - 60 * 60 * 1000;
196
+ for (const [uploadId, data] of chunkedUploads.entries()) {
197
+ if (data.createdAt < oneHourAgo) {
198
+ chunkedUploads.delete(uploadId);
199
+ }
200
+ }
201
+ }, 5 * 60 * 1000); // Проверка каждые 5 минут
202
+ /**
203
+ * POST /api/projects/:id/upload-index/init
204
+ * Инициализация chunked upload
205
+ */
206
+ apiRouter.post('/projects/:id/upload-index/init', authMiddleware, checkProjectAccess, async (req, res) => {
207
+ try {
208
+ const { id } = req.params;
209
+ const { statistics, totalChunks } = req.body;
210
+ const uploadId = `upload_${id}_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`;
211
+ chunkedUploads.set(uploadId, {
212
+ projectId: id,
213
+ statistics: statistics || { totalFiles: 0, totalSymbols: 0 },
214
+ asts: [],
215
+ symbols: [],
216
+ graph: null,
217
+ fileContents: [],
218
+ createdAt: Date.now(),
219
+ });
220
+ Logger.info(`Chunked upload initialized: ${uploadId} (${totalChunks} chunks expected)`);
221
+ res.json({ uploadId, message: 'Chunked upload initialized' });
222
+ }
223
+ catch (error) {
224
+ Logger.error('Failed to init chunked upload:', error);
225
+ res.status(500).json({ error: 'Failed to initialize chunked upload' });
226
+ }
227
+ });
228
+ /**
229
+ * POST /api/projects/:id/upload-index/chunk
230
+ * Загрузка одного chunk
231
+ */
232
+ apiRouter.post('/projects/:id/upload-index/chunk', authMiddleware, checkProjectAccess, async (req, res) => {
233
+ try {
234
+ const { uploadId, chunkType, chunkIndex, data } = req.body;
235
+ const upload = chunkedUploads.get(uploadId);
236
+ if (!upload) {
237
+ res.status(404).json({ error: 'Upload session not found or expired' });
238
+ return;
239
+ }
240
+ // Добавляем данные в соответствующий массив
241
+ switch (chunkType) {
242
+ case 'asts':
243
+ upload.asts.push(...data);
244
+ break;
245
+ case 'symbols':
246
+ upload.symbols.push(...data);
247
+ break;
248
+ case 'graph':
249
+ upload.graph = data;
250
+ break;
251
+ case 'fileContents':
252
+ upload.fileContents.push(...data);
253
+ break;
254
+ default:
255
+ res.status(400).json({ error: `Unknown chunk type: ${chunkType}` });
256
+ return;
257
+ }
258
+ Logger.debug(`Chunk received: ${chunkType}[${chunkIndex}] for upload ${uploadId}`);
259
+ res.json({ success: true, received: chunkType, index: chunkIndex });
260
+ }
261
+ catch (error) {
262
+ Logger.error('Failed to process chunk:', error);
263
+ res.status(500).json({ error: 'Failed to process chunk' });
264
+ }
265
+ });
266
+ /**
267
+ * POST /api/projects/:id/upload-index/finalize
268
+ * Финализация chunked upload
269
+ */
270
+ apiRouter.post('/projects/:id/upload-index/finalize', authMiddleware, checkProjectAccess, async (req, res) => {
271
+ try {
272
+ const { id } = req.params;
273
+ const { uploadId } = req.body;
274
+ const upload = chunkedUploads.get(uploadId);
275
+ if (!upload) {
276
+ res.status(404).json({ error: 'Upload session not found or expired' });
277
+ return;
278
+ }
279
+ if (upload.projectId !== id) {
280
+ res.status(403).json({ error: 'Upload session does not match project' });
281
+ return;
282
+ }
283
+ Logger.info(`Finalizing chunked upload ${uploadId}: ${upload.asts.length} ASTs, ${upload.symbols.length} symbols`);
284
+ // Собираем все данные и отправляем в projectService
285
+ // Type cast because chunked data comes as unknown from JSON
286
+ const result = await projectService.uploadIndexedData(id, {
287
+ asts: upload.asts,
288
+ symbols: upload.symbols,
289
+ graph: (upload.graph || { nodes: [], edges: [] }),
290
+ fileContents: upload.fileContents,
291
+ statistics: {
292
+ totalFiles: upload.statistics.totalFiles || upload.asts.length,
293
+ totalSymbols: upload.statistics.totalSymbols || upload.symbols.length,
294
+ },
295
+ });
296
+ // Удаляем upload session
297
+ chunkedUploads.delete(uploadId);
298
+ res.json(result);
299
+ }
300
+ catch (error) {
301
+ Logger.error('Failed to finalize chunked upload:', error);
302
+ const errorStr = String(error);
303
+ let errorMessage = 'Failed to finalize upload';
304
+ if (errorStr.includes('ENOMEM') || errorStr.includes('heap')) {
305
+ errorMessage = 'Server ran out of memory processing the project';
306
+ }
307
+ res.status(500).json({ error: errorMessage });
169
308
  }
170
309
  });
171
310
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "archicore",
3
- "version": "0.2.4",
3
+ "version": "0.2.6",
4
4
  "description": "AI Software Architect - code analysis, impact prediction, semantic search",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",