archicore 0.2.4 → 0.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands/interactive.js +336 -89
- package/dist/cli/utils/conversation-history.d.ts +84 -0
- package/dist/cli/utils/conversation-history.js +301 -0
- package/dist/cli/utils/index.d.ts +2 -0
- package/dist/cli/utils/index.js +2 -0
- package/dist/cli/utils/upload-utils.d.ts +66 -0
- package/dist/cli/utils/upload-utils.js +495 -0
- package/dist/server/routes/api.js +140 -1
- package/package.json +1 -1
|
@@ -0,0 +1,495 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ArchiCore CLI - Upload Utilities
|
|
3
|
+
*
|
|
4
|
+
* Утилиты для загрузки больших проектов:
|
|
5
|
+
* - Chunked uploads для больших файлов
|
|
6
|
+
* - Retry логика
|
|
7
|
+
* - Детальная обработка ошибок
|
|
8
|
+
*/
|
|
9
|
+
import { loadConfig } from './config.js';
|
|
10
|
+
// Лимиты для chunked upload
|
|
11
|
+
const MAX_PAYLOAD_SIZE = 10 * 1024 * 1024; // 10MB per chunk
|
|
12
|
+
const MAX_SYMBOLS_PER_CHUNK = 5000;
|
|
13
|
+
const MAX_FILES_PER_CHUNK = 100;
|
|
14
|
+
const UPLOAD_TIMEOUT = 60000; // 60 секунд на chunk
|
|
15
|
+
const MAX_RETRIES = 3;
|
|
16
|
+
/**
|
|
17
|
+
* Определение размера JSON в байтах
|
|
18
|
+
*/
|
|
19
|
+
function estimateJsonSize(data) {
|
|
20
|
+
return new Blob([JSON.stringify(data)]).size;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Детальный анализ ошибки fetch
|
|
24
|
+
*/
|
|
25
|
+
export function analyzeNetworkError(error) {
|
|
26
|
+
const errorStr = String(error);
|
|
27
|
+
const errorName = error instanceof Error ? error.name : 'Unknown';
|
|
28
|
+
const errorMessage = error instanceof Error ? error.message : errorStr;
|
|
29
|
+
// Анализ различных типов ошибок
|
|
30
|
+
if (errorStr.includes('fetch failed') || errorStr.includes('ECONNREFUSED')) {
|
|
31
|
+
return {
|
|
32
|
+
code: 'CONNECTION_FAILED',
|
|
33
|
+
message: 'Cannot connect to ArchiCore server',
|
|
34
|
+
suggestion: 'Check that the server is running and accessible. Verify your internet connection.',
|
|
35
|
+
technicalDetails: `Network error: ${errorMessage}`,
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
if (errorStr.includes('ETIMEDOUT') || errorStr.includes('timeout')) {
|
|
39
|
+
return {
|
|
40
|
+
code: 'TIMEOUT',
|
|
41
|
+
message: 'Request timed out',
|
|
42
|
+
suggestion: 'The project may be too large. Try indexing a smaller subset first.',
|
|
43
|
+
technicalDetails: `Timeout after request: ${errorMessage}`,
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
if (errorStr.includes('ECONNRESET') || errorStr.includes('socket hang up')) {
|
|
47
|
+
return {
|
|
48
|
+
code: 'CONNECTION_RESET',
|
|
49
|
+
message: 'Connection was reset by server',
|
|
50
|
+
suggestion: 'The payload may be too large. ArchiCore will try chunked upload.',
|
|
51
|
+
technicalDetails: `Connection reset: ${errorMessage}`,
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
if (errorStr.includes('PayloadTooLargeError') || errorStr.includes('request entity too large')) {
|
|
55
|
+
return {
|
|
56
|
+
code: 'PAYLOAD_TOO_LARGE',
|
|
57
|
+
message: 'Data too large for single upload',
|
|
58
|
+
suggestion: 'Using chunked upload mode for large projects.',
|
|
59
|
+
technicalDetails: `Payload size exceeded: ${errorMessage}`,
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
if (errorStr.includes('CERT') || errorStr.includes('SSL') || errorStr.includes('certificate')) {
|
|
63
|
+
return {
|
|
64
|
+
code: 'SSL_ERROR',
|
|
65
|
+
message: 'SSL/TLS certificate error',
|
|
66
|
+
suggestion: 'Check server SSL configuration or try HTTP instead of HTTPS.',
|
|
67
|
+
technicalDetails: `SSL error: ${errorMessage}`,
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
if (errorStr.includes('ENOTFOUND') || errorStr.includes('getaddrinfo')) {
|
|
71
|
+
return {
|
|
72
|
+
code: 'DNS_ERROR',
|
|
73
|
+
message: 'Cannot resolve server hostname',
|
|
74
|
+
suggestion: 'Check your internet connection and server URL configuration.',
|
|
75
|
+
technicalDetails: `DNS resolution failed: ${errorMessage}`,
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
// Generic network error
|
|
79
|
+
return {
|
|
80
|
+
code: 'NETWORK_ERROR',
|
|
81
|
+
message: 'Network request failed',
|
|
82
|
+
suggestion: 'Check your internet connection and try again.',
|
|
83
|
+
technicalDetails: `${errorName}: ${errorMessage}`,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Детальный анализ HTTP ошибки
|
|
88
|
+
*/
|
|
89
|
+
export function analyzeHttpError(status, responseBody) {
|
|
90
|
+
const body = responseBody || {};
|
|
91
|
+
const serverError = body.error || body.message || '';
|
|
92
|
+
switch (status) {
|
|
93
|
+
case 400:
|
|
94
|
+
return {
|
|
95
|
+
code: 'BAD_REQUEST',
|
|
96
|
+
message: 'Invalid request data',
|
|
97
|
+
suggestion: 'The index data format may be corrupted. Try re-indexing.',
|
|
98
|
+
technicalDetails: serverError || 'Server rejected the request as invalid',
|
|
99
|
+
};
|
|
100
|
+
case 401:
|
|
101
|
+
return {
|
|
102
|
+
code: 'UNAUTHORIZED',
|
|
103
|
+
message: 'Authentication required',
|
|
104
|
+
suggestion: 'Your session may have expired. Run /logout and log in again.',
|
|
105
|
+
technicalDetails: serverError || 'Access token is invalid or expired',
|
|
106
|
+
};
|
|
107
|
+
case 403:
|
|
108
|
+
return {
|
|
109
|
+
code: 'FORBIDDEN',
|
|
110
|
+
message: 'Access denied to this project',
|
|
111
|
+
suggestion: 'You may not have permission to this project. Try re-initializing with /index.',
|
|
112
|
+
technicalDetails: serverError || 'Server denied access to the requested resource',
|
|
113
|
+
};
|
|
114
|
+
case 404:
|
|
115
|
+
return {
|
|
116
|
+
code: 'NOT_FOUND',
|
|
117
|
+
message: 'Project not found on server',
|
|
118
|
+
suggestion: 'The project may have been deleted. Try running /index again.',
|
|
119
|
+
technicalDetails: serverError || 'Project ID does not exist on server',
|
|
120
|
+
};
|
|
121
|
+
case 413:
|
|
122
|
+
return {
|
|
123
|
+
code: 'PAYLOAD_TOO_LARGE',
|
|
124
|
+
message: 'Project data too large for single upload',
|
|
125
|
+
suggestion: 'ArchiCore will automatically use chunked upload.',
|
|
126
|
+
technicalDetails: serverError || 'Request payload exceeded server limit',
|
|
127
|
+
};
|
|
128
|
+
case 429:
|
|
129
|
+
return {
|
|
130
|
+
code: 'RATE_LIMITED',
|
|
131
|
+
message: 'Too many requests',
|
|
132
|
+
suggestion: 'Wait a few minutes and try again.',
|
|
133
|
+
technicalDetails: serverError || 'Rate limit exceeded',
|
|
134
|
+
};
|
|
135
|
+
case 500:
|
|
136
|
+
return {
|
|
137
|
+
code: 'SERVER_ERROR',
|
|
138
|
+
message: 'Server internal error',
|
|
139
|
+
suggestion: 'The server encountered an error processing your project. This may be due to project size.',
|
|
140
|
+
technicalDetails: serverError || 'Internal server error - check server logs for details',
|
|
141
|
+
};
|
|
142
|
+
case 502:
|
|
143
|
+
case 503:
|
|
144
|
+
case 504:
|
|
145
|
+
return {
|
|
146
|
+
code: 'SERVER_UNAVAILABLE',
|
|
147
|
+
message: 'Server is temporarily unavailable',
|
|
148
|
+
suggestion: 'Wait a few minutes and try again. The server may be overloaded.',
|
|
149
|
+
technicalDetails: serverError || `Server returned ${status}`,
|
|
150
|
+
};
|
|
151
|
+
default:
|
|
152
|
+
return {
|
|
153
|
+
code: 'HTTP_ERROR',
|
|
154
|
+
message: `Request failed with status ${status}`,
|
|
155
|
+
suggestion: 'Check server status and try again.',
|
|
156
|
+
technicalDetails: serverError || `HTTP ${status} error`,
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Выполнение fetch с timeout и retry
|
|
162
|
+
*/
|
|
163
|
+
async function fetchWithRetry(url, options, timeout = UPLOAD_TIMEOUT, maxRetries = MAX_RETRIES) {
|
|
164
|
+
let lastError = null;
|
|
165
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
166
|
+
try {
|
|
167
|
+
const controller = new AbortController();
|
|
168
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
169
|
+
const response = await fetch(url, {
|
|
170
|
+
...options,
|
|
171
|
+
signal: controller.signal,
|
|
172
|
+
});
|
|
173
|
+
clearTimeout(timeoutId);
|
|
174
|
+
return response;
|
|
175
|
+
}
|
|
176
|
+
catch (error) {
|
|
177
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
178
|
+
// Не повторяем для определённых ошибок
|
|
179
|
+
const errorStr = String(error);
|
|
180
|
+
if (errorStr.includes('AbortError') ||
|
|
181
|
+
errorStr.includes('ENOTFOUND') ||
|
|
182
|
+
errorStr.includes('CERT')) {
|
|
183
|
+
throw error;
|
|
184
|
+
}
|
|
185
|
+
// Экспоненциальная задержка перед повтором
|
|
186
|
+
if (attempt < maxRetries) {
|
|
187
|
+
const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000);
|
|
188
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
throw lastError || new Error('Request failed after retries');
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Загрузка индекса с автоматическим chunked upload для больших проектов
|
|
196
|
+
*/
|
|
197
|
+
export async function uploadIndexData(projectId, data, onProgress) {
|
|
198
|
+
const config = await loadConfig();
|
|
199
|
+
const url = `${config.serverUrl}/api/projects/${projectId}/upload-index`;
|
|
200
|
+
// Оценка размера данных
|
|
201
|
+
const estimatedSize = estimateJsonSize(data);
|
|
202
|
+
const isLargeProject = estimatedSize > MAX_PAYLOAD_SIZE ||
|
|
203
|
+
data.symbols.length > MAX_SYMBOLS_PER_CHUNK * 2;
|
|
204
|
+
onProgress?.({
|
|
205
|
+
phase: 'preparing',
|
|
206
|
+
current: 0,
|
|
207
|
+
total: 100,
|
|
208
|
+
message: `Preparing upload (${(estimatedSize / 1024 / 1024).toFixed(1)} MB)...`,
|
|
209
|
+
});
|
|
210
|
+
// Для небольших проектов - обычная загрузка
|
|
211
|
+
if (!isLargeProject) {
|
|
212
|
+
console.log(`[DEBUG] Using single request upload for ${data.symbols.length} symbols`);
|
|
213
|
+
return uploadSingleRequest(url, projectId, data, config.accessToken || '', onProgress);
|
|
214
|
+
}
|
|
215
|
+
// Для больших проектов - chunked upload
|
|
216
|
+
console.log(`[DEBUG] Using chunked upload for large project (${data.symbols.length} symbols)`);
|
|
217
|
+
return uploadChunked(url, projectId, data, config.accessToken || '', onProgress);
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Обычная загрузка одним запросом
|
|
221
|
+
*/
|
|
222
|
+
async function uploadSingleRequest(url, projectId, data, accessToken, onProgress) {
|
|
223
|
+
onProgress?.({
|
|
224
|
+
phase: 'uploading',
|
|
225
|
+
current: 50,
|
|
226
|
+
total: 100,
|
|
227
|
+
message: 'Uploading index data...',
|
|
228
|
+
});
|
|
229
|
+
try {
|
|
230
|
+
const response = await fetchWithRetry(url, {
|
|
231
|
+
method: 'POST',
|
|
232
|
+
headers: {
|
|
233
|
+
'Content-Type': 'application/json',
|
|
234
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
235
|
+
},
|
|
236
|
+
body: JSON.stringify(data),
|
|
237
|
+
});
|
|
238
|
+
if (!response.ok) {
|
|
239
|
+
const errorBody = await response.json().catch(() => ({}));
|
|
240
|
+
console.log(`[DEBUG] HTTP error ${response.status}: ${JSON.stringify(errorBody)}`);
|
|
241
|
+
const errorDetails = analyzeHttpError(response.status, errorBody);
|
|
242
|
+
// Если payload слишком большой - пробуем chunked
|
|
243
|
+
if (response.status === 413) {
|
|
244
|
+
console.log(`[DEBUG] Payload too large, trying chunked upload`);
|
|
245
|
+
return uploadChunked(url, projectId, data, accessToken, onProgress);
|
|
246
|
+
}
|
|
247
|
+
return {
|
|
248
|
+
success: false,
|
|
249
|
+
error: errorDetails.message,
|
|
250
|
+
errorDetails,
|
|
251
|
+
};
|
|
252
|
+
}
|
|
253
|
+
const result = await response.json();
|
|
254
|
+
onProgress?.({
|
|
255
|
+
phase: 'done',
|
|
256
|
+
current: 100,
|
|
257
|
+
total: 100,
|
|
258
|
+
message: 'Upload complete!',
|
|
259
|
+
});
|
|
260
|
+
return {
|
|
261
|
+
success: true,
|
|
262
|
+
statistics: result.statistics,
|
|
263
|
+
};
|
|
264
|
+
}
|
|
265
|
+
catch (error) {
|
|
266
|
+
console.log(`[DEBUG] uploadSingleRequest caught error: ${error}`);
|
|
267
|
+
const errorDetails = analyzeNetworkError(error);
|
|
268
|
+
console.log(`[DEBUG] Analyzed error: ${JSON.stringify(errorDetails)}`);
|
|
269
|
+
// Если ошибка связана с размером - пробуем chunked
|
|
270
|
+
if (errorDetails.code === 'CONNECTION_RESET' ||
|
|
271
|
+
errorDetails.code === 'PAYLOAD_TOO_LARGE') {
|
|
272
|
+
console.log(`[DEBUG] Trying chunked upload due to ${errorDetails.code}`);
|
|
273
|
+
return uploadChunked(url, projectId, data, accessToken, onProgress);
|
|
274
|
+
}
|
|
275
|
+
return {
|
|
276
|
+
success: false,
|
|
277
|
+
error: errorDetails.message,
|
|
278
|
+
errorDetails,
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Chunked upload для больших проектов
|
|
284
|
+
*/
|
|
285
|
+
async function uploadChunked(baseUrl, projectId, data, accessToken, onProgress) {
|
|
286
|
+
const config = await loadConfig();
|
|
287
|
+
// Разбиваем данные на chunks
|
|
288
|
+
const symbolChunks = chunkArray(data.symbols, MAX_SYMBOLS_PER_CHUNK);
|
|
289
|
+
const astChunks = chunkArray(data.asts, MAX_FILES_PER_CHUNK);
|
|
290
|
+
const fileChunks = chunkArray(data.fileContents, MAX_FILES_PER_CHUNK);
|
|
291
|
+
const totalChunks = symbolChunks.length + astChunks.length + fileChunks.length + 1; // +1 for graph
|
|
292
|
+
let completedChunks = 0;
|
|
293
|
+
onProgress?.({
|
|
294
|
+
phase: 'uploading',
|
|
295
|
+
current: 0,
|
|
296
|
+
total: totalChunks,
|
|
297
|
+
message: `Uploading in ${totalChunks} chunks...`,
|
|
298
|
+
});
|
|
299
|
+
try {
|
|
300
|
+
// 1. Инициализируем chunked upload
|
|
301
|
+
const initResponse = await fetchWithRetry(`${config.serverUrl}/api/projects/${projectId}/upload-index/init`, {
|
|
302
|
+
method: 'POST',
|
|
303
|
+
headers: {
|
|
304
|
+
'Content-Type': 'application/json',
|
|
305
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
306
|
+
},
|
|
307
|
+
body: JSON.stringify({
|
|
308
|
+
statistics: data.statistics,
|
|
309
|
+
totalChunks,
|
|
310
|
+
}),
|
|
311
|
+
});
|
|
312
|
+
if (!initResponse.ok) {
|
|
313
|
+
// Fallback: если сервер не поддерживает chunked upload,
|
|
314
|
+
// отправляем только минимальные данные
|
|
315
|
+
return uploadMinimalData(baseUrl, projectId, data, accessToken, onProgress);
|
|
316
|
+
}
|
|
317
|
+
const initResult = await initResponse.json();
|
|
318
|
+
const uploadId = initResult.uploadId;
|
|
319
|
+
// 2. Загружаем ASTs по частям
|
|
320
|
+
for (let i = 0; i < astChunks.length; i++) {
|
|
321
|
+
await uploadChunk(config.serverUrl, projectId, uploadId, 'asts', i, astChunks[i], accessToken);
|
|
322
|
+
completedChunks++;
|
|
323
|
+
onProgress?.({
|
|
324
|
+
phase: 'uploading',
|
|
325
|
+
current: completedChunks,
|
|
326
|
+
total: totalChunks,
|
|
327
|
+
message: `Uploading ASTs (${i + 1}/${astChunks.length})...`,
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
// 3. Загружаем символы по частям
|
|
331
|
+
for (let i = 0; i < symbolChunks.length; i++) {
|
|
332
|
+
await uploadChunk(config.serverUrl, projectId, uploadId, 'symbols', i, symbolChunks[i], accessToken);
|
|
333
|
+
completedChunks++;
|
|
334
|
+
onProgress?.({
|
|
335
|
+
phase: 'uploading',
|
|
336
|
+
current: completedChunks,
|
|
337
|
+
total: totalChunks,
|
|
338
|
+
message: `Uploading symbols (${i + 1}/${symbolChunks.length})...`,
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
// 4. Загружаем граф
|
|
342
|
+
await uploadChunk(config.serverUrl, projectId, uploadId, 'graph', 0, data.graph, accessToken);
|
|
343
|
+
completedChunks++;
|
|
344
|
+
onProgress?.({
|
|
345
|
+
phase: 'uploading',
|
|
346
|
+
current: completedChunks,
|
|
347
|
+
total: totalChunks,
|
|
348
|
+
message: 'Uploading dependency graph...',
|
|
349
|
+
});
|
|
350
|
+
// 5. Загружаем содержимое файлов (опционально)
|
|
351
|
+
for (let i = 0; i < fileChunks.length; i++) {
|
|
352
|
+
await uploadChunk(config.serverUrl, projectId, uploadId, 'fileContents', i, fileChunks[i], accessToken);
|
|
353
|
+
completedChunks++;
|
|
354
|
+
onProgress?.({
|
|
355
|
+
phase: 'uploading',
|
|
356
|
+
current: completedChunks,
|
|
357
|
+
total: totalChunks,
|
|
358
|
+
message: `Uploading file contents (${i + 1}/${fileChunks.length})...`,
|
|
359
|
+
});
|
|
360
|
+
}
|
|
361
|
+
// 6. Финализируем upload
|
|
362
|
+
onProgress?.({
|
|
363
|
+
phase: 'processing',
|
|
364
|
+
current: totalChunks,
|
|
365
|
+
total: totalChunks,
|
|
366
|
+
message: 'Finalizing index...',
|
|
367
|
+
});
|
|
368
|
+
const finalizeResponse = await fetchWithRetry(`${config.serverUrl}/api/projects/${projectId}/upload-index/finalize`, {
|
|
369
|
+
method: 'POST',
|
|
370
|
+
headers: {
|
|
371
|
+
'Content-Type': 'application/json',
|
|
372
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
373
|
+
},
|
|
374
|
+
body: JSON.stringify({ uploadId }),
|
|
375
|
+
}, 120000 // 2 минуты на финализацию
|
|
376
|
+
);
|
|
377
|
+
if (!finalizeResponse.ok) {
|
|
378
|
+
const errorBody = await finalizeResponse.json().catch(() => ({}));
|
|
379
|
+
const errorDetails = analyzeHttpError(finalizeResponse.status, errorBody);
|
|
380
|
+
return {
|
|
381
|
+
success: false,
|
|
382
|
+
error: errorDetails.message,
|
|
383
|
+
errorDetails,
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
const result = await finalizeResponse.json();
|
|
387
|
+
onProgress?.({
|
|
388
|
+
phase: 'done',
|
|
389
|
+
current: totalChunks,
|
|
390
|
+
total: totalChunks,
|
|
391
|
+
message: 'Upload complete!',
|
|
392
|
+
});
|
|
393
|
+
return {
|
|
394
|
+
success: true,
|
|
395
|
+
statistics: result.statistics,
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
catch (error) {
|
|
399
|
+
const errorDetails = analyzeNetworkError(error);
|
|
400
|
+
return {
|
|
401
|
+
success: false,
|
|
402
|
+
error: errorDetails.message,
|
|
403
|
+
errorDetails,
|
|
404
|
+
};
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
/**
|
|
408
|
+
* Загрузка минимальных данных (fallback для серверов без chunked upload)
|
|
409
|
+
*/
|
|
410
|
+
async function uploadMinimalData(url, _projectId, data, accessToken, onProgress) {
|
|
411
|
+
onProgress?.({
|
|
412
|
+
phase: 'uploading',
|
|
413
|
+
current: 50,
|
|
414
|
+
total: 100,
|
|
415
|
+
message: 'Uploading minimal index (large project mode)...',
|
|
416
|
+
});
|
|
417
|
+
// Отправляем только граф и статистику (без содержимого файлов и AST)
|
|
418
|
+
const minimalData = {
|
|
419
|
+
asts: data.asts.slice(0, MAX_FILES_PER_CHUNK), // Только первые N файлов
|
|
420
|
+
symbols: data.symbols.slice(0, MAX_SYMBOLS_PER_CHUNK), // Только первые N символов
|
|
421
|
+
graph: data.graph,
|
|
422
|
+
statistics: data.statistics,
|
|
423
|
+
// Без fileContents - это самая большая часть
|
|
424
|
+
};
|
|
425
|
+
try {
|
|
426
|
+
const response = await fetchWithRetry(url, {
|
|
427
|
+
method: 'POST',
|
|
428
|
+
headers: {
|
|
429
|
+
'Content-Type': 'application/json',
|
|
430
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
431
|
+
},
|
|
432
|
+
body: JSON.stringify(minimalData),
|
|
433
|
+
}, 120000); // 2 минуты
|
|
434
|
+
if (!response.ok) {
|
|
435
|
+
const errorBody = await response.json().catch(() => ({}));
|
|
436
|
+
const errorDetails = analyzeHttpError(response.status, errorBody);
|
|
437
|
+
return {
|
|
438
|
+
success: false,
|
|
439
|
+
error: errorDetails.message,
|
|
440
|
+
errorDetails,
|
|
441
|
+
};
|
|
442
|
+
}
|
|
443
|
+
const result = await response.json();
|
|
444
|
+
onProgress?.({
|
|
445
|
+
phase: 'done',
|
|
446
|
+
current: 100,
|
|
447
|
+
total: 100,
|
|
448
|
+
message: 'Upload complete (minimal mode)!',
|
|
449
|
+
});
|
|
450
|
+
return {
|
|
451
|
+
success: true,
|
|
452
|
+
statistics: result.statistics,
|
|
453
|
+
};
|
|
454
|
+
}
|
|
455
|
+
catch (error) {
|
|
456
|
+
const errorDetails = analyzeNetworkError(error);
|
|
457
|
+
return {
|
|
458
|
+
success: false,
|
|
459
|
+
error: errorDetails.message,
|
|
460
|
+
errorDetails,
|
|
461
|
+
};
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
/**
|
|
465
|
+
* Загрузка одного chunk
|
|
466
|
+
*/
|
|
467
|
+
async function uploadChunk(serverUrl, projectId, uploadId, chunkType, chunkIndex, chunkData, accessToken) {
|
|
468
|
+
const response = await fetchWithRetry(`${serverUrl}/api/projects/${projectId}/upload-index/chunk`, {
|
|
469
|
+
method: 'POST',
|
|
470
|
+
headers: {
|
|
471
|
+
'Content-Type': 'application/json',
|
|
472
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
473
|
+
},
|
|
474
|
+
body: JSON.stringify({
|
|
475
|
+
uploadId,
|
|
476
|
+
chunkType,
|
|
477
|
+
chunkIndex,
|
|
478
|
+
data: chunkData,
|
|
479
|
+
}),
|
|
480
|
+
});
|
|
481
|
+
if (!response.ok) {
|
|
482
|
+
throw new Error(`Failed to upload chunk ${chunkType}[${chunkIndex}]`);
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
/**
|
|
486
|
+
* Разбиение массива на chunks
|
|
487
|
+
*/
|
|
488
|
+
function chunkArray(array, chunkSize) {
|
|
489
|
+
const chunks = [];
|
|
490
|
+
for (let i = 0; i < array.length; i += chunkSize) {
|
|
491
|
+
chunks.push(array.slice(i, i + chunkSize));
|
|
492
|
+
}
|
|
493
|
+
return chunks.length > 0 ? chunks : [[]];
|
|
494
|
+
}
|
|
495
|
+
//# sourceMappingURL=upload-utils.js.map
|
|
@@ -165,7 +165,146 @@ apiRouter.post('/projects/:id/upload-index', authMiddleware, checkProjectAccess,
|
|
|
165
165
|
}
|
|
166
166
|
catch (error) {
|
|
167
167
|
Logger.error('Failed to upload indexed data:', error);
|
|
168
|
-
|
|
168
|
+
// Provide more specific error messages
|
|
169
|
+
const errorStr = String(error);
|
|
170
|
+
let errorMessage = 'Failed to upload indexed data';
|
|
171
|
+
let errorCode = 'UPLOAD_FAILED';
|
|
172
|
+
if (errorStr.includes('ENOMEM') || errorStr.includes('heap')) {
|
|
173
|
+
errorMessage = 'Server ran out of memory processing the project';
|
|
174
|
+
errorCode = 'OUT_OF_MEMORY';
|
|
175
|
+
}
|
|
176
|
+
else if (errorStr.includes('ENOSPC')) {
|
|
177
|
+
errorMessage = 'Server ran out of disk space';
|
|
178
|
+
errorCode = 'DISK_FULL';
|
|
179
|
+
}
|
|
180
|
+
else if (errorStr.includes('timeout')) {
|
|
181
|
+
errorMessage = 'Processing timed out for large project';
|
|
182
|
+
errorCode = 'TIMEOUT';
|
|
183
|
+
}
|
|
184
|
+
res.status(500).json({
|
|
185
|
+
error: errorMessage,
|
|
186
|
+
code: errorCode,
|
|
187
|
+
details: process.env.NODE_ENV === 'development' ? errorStr : undefined
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
// Хранилище для chunked uploads
|
|
192
|
+
const chunkedUploads = new Map();
|
|
193
|
+
// Очистка старых uploads (старше 1 часа)
|
|
194
|
+
setInterval(() => {
|
|
195
|
+
const oneHourAgo = Date.now() - 60 * 60 * 1000;
|
|
196
|
+
for (const [uploadId, data] of chunkedUploads.entries()) {
|
|
197
|
+
if (data.createdAt < oneHourAgo) {
|
|
198
|
+
chunkedUploads.delete(uploadId);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}, 5 * 60 * 1000); // Проверка каждые 5 минут
|
|
202
|
+
/**
|
|
203
|
+
* POST /api/projects/:id/upload-index/init
|
|
204
|
+
* Инициализация chunked upload
|
|
205
|
+
*/
|
|
206
|
+
apiRouter.post('/projects/:id/upload-index/init', authMiddleware, checkProjectAccess, async (req, res) => {
|
|
207
|
+
try {
|
|
208
|
+
const { id } = req.params;
|
|
209
|
+
const { statistics, totalChunks } = req.body;
|
|
210
|
+
const uploadId = `upload_${id}_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`;
|
|
211
|
+
chunkedUploads.set(uploadId, {
|
|
212
|
+
projectId: id,
|
|
213
|
+
statistics: statistics || { totalFiles: 0, totalSymbols: 0 },
|
|
214
|
+
asts: [],
|
|
215
|
+
symbols: [],
|
|
216
|
+
graph: null,
|
|
217
|
+
fileContents: [],
|
|
218
|
+
createdAt: Date.now(),
|
|
219
|
+
});
|
|
220
|
+
Logger.info(`Chunked upload initialized: ${uploadId} (${totalChunks} chunks expected)`);
|
|
221
|
+
res.json({ uploadId, message: 'Chunked upload initialized' });
|
|
222
|
+
}
|
|
223
|
+
catch (error) {
|
|
224
|
+
Logger.error('Failed to init chunked upload:', error);
|
|
225
|
+
res.status(500).json({ error: 'Failed to initialize chunked upload' });
|
|
226
|
+
}
|
|
227
|
+
});
|
|
228
|
+
/**
|
|
229
|
+
* POST /api/projects/:id/upload-index/chunk
|
|
230
|
+
* Загрузка одного chunk
|
|
231
|
+
*/
|
|
232
|
+
apiRouter.post('/projects/:id/upload-index/chunk', authMiddleware, checkProjectAccess, async (req, res) => {
|
|
233
|
+
try {
|
|
234
|
+
const { uploadId, chunkType, chunkIndex, data } = req.body;
|
|
235
|
+
const upload = chunkedUploads.get(uploadId);
|
|
236
|
+
if (!upload) {
|
|
237
|
+
res.status(404).json({ error: 'Upload session not found or expired' });
|
|
238
|
+
return;
|
|
239
|
+
}
|
|
240
|
+
// Добавляем данные в соответствующий массив
|
|
241
|
+
switch (chunkType) {
|
|
242
|
+
case 'asts':
|
|
243
|
+
upload.asts.push(...data);
|
|
244
|
+
break;
|
|
245
|
+
case 'symbols':
|
|
246
|
+
upload.symbols.push(...data);
|
|
247
|
+
break;
|
|
248
|
+
case 'graph':
|
|
249
|
+
upload.graph = data;
|
|
250
|
+
break;
|
|
251
|
+
case 'fileContents':
|
|
252
|
+
upload.fileContents.push(...data);
|
|
253
|
+
break;
|
|
254
|
+
default:
|
|
255
|
+
res.status(400).json({ error: `Unknown chunk type: ${chunkType}` });
|
|
256
|
+
return;
|
|
257
|
+
}
|
|
258
|
+
Logger.debug(`Chunk received: ${chunkType}[${chunkIndex}] for upload ${uploadId}`);
|
|
259
|
+
res.json({ success: true, received: chunkType, index: chunkIndex });
|
|
260
|
+
}
|
|
261
|
+
catch (error) {
|
|
262
|
+
Logger.error('Failed to process chunk:', error);
|
|
263
|
+
res.status(500).json({ error: 'Failed to process chunk' });
|
|
264
|
+
}
|
|
265
|
+
});
|
|
266
|
+
/**
|
|
267
|
+
* POST /api/projects/:id/upload-index/finalize
|
|
268
|
+
* Финализация chunked upload
|
|
269
|
+
*/
|
|
270
|
+
apiRouter.post('/projects/:id/upload-index/finalize', authMiddleware, checkProjectAccess, async (req, res) => {
|
|
271
|
+
try {
|
|
272
|
+
const { id } = req.params;
|
|
273
|
+
const { uploadId } = req.body;
|
|
274
|
+
const upload = chunkedUploads.get(uploadId);
|
|
275
|
+
if (!upload) {
|
|
276
|
+
res.status(404).json({ error: 'Upload session not found or expired' });
|
|
277
|
+
return;
|
|
278
|
+
}
|
|
279
|
+
if (upload.projectId !== id) {
|
|
280
|
+
res.status(403).json({ error: 'Upload session does not match project' });
|
|
281
|
+
return;
|
|
282
|
+
}
|
|
283
|
+
Logger.info(`Finalizing chunked upload ${uploadId}: ${upload.asts.length} ASTs, ${upload.symbols.length} symbols`);
|
|
284
|
+
// Собираем все данные и отправляем в projectService
|
|
285
|
+
// Type cast because chunked data comes as unknown from JSON
|
|
286
|
+
const result = await projectService.uploadIndexedData(id, {
|
|
287
|
+
asts: upload.asts,
|
|
288
|
+
symbols: upload.symbols,
|
|
289
|
+
graph: (upload.graph || { nodes: [], edges: [] }),
|
|
290
|
+
fileContents: upload.fileContents,
|
|
291
|
+
statistics: {
|
|
292
|
+
totalFiles: upload.statistics.totalFiles || upload.asts.length,
|
|
293
|
+
totalSymbols: upload.statistics.totalSymbols || upload.symbols.length,
|
|
294
|
+
},
|
|
295
|
+
});
|
|
296
|
+
// Удаляем upload session
|
|
297
|
+
chunkedUploads.delete(uploadId);
|
|
298
|
+
res.json(result);
|
|
299
|
+
}
|
|
300
|
+
catch (error) {
|
|
301
|
+
Logger.error('Failed to finalize chunked upload:', error);
|
|
302
|
+
const errorStr = String(error);
|
|
303
|
+
let errorMessage = 'Failed to finalize upload';
|
|
304
|
+
if (errorStr.includes('ENOMEM') || errorStr.includes('heap')) {
|
|
305
|
+
errorMessage = 'Server ran out of memory processing the project';
|
|
306
|
+
}
|
|
307
|
+
res.status(500).json({ error: errorMessage });
|
|
169
308
|
}
|
|
170
309
|
});
|
|
171
310
|
/**
|