@arela/uploader 0.2.2 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/index.js +40 -26
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@arela/uploader",
3
- "version": "0.2.2",
3
+ "version": "0.2.3",
4
4
  "description": "CLI to upload files/directories to Arela",
5
5
  "bin": {
6
6
  "arela": "./src/index.js"
package/src/index.js CHANGED
@@ -158,8 +158,8 @@ const checkCredentials = async (forceSupabase = false) => {
158
158
  if (!supabaseUrl || !supabaseKey || !bucket) {
159
159
  console.error(
160
160
  '⚠️ Missing credentials. Please set either:\n' +
161
- ' - ARELA_API_URL and ARELA_API_TOKEN for API mode, or\n' +
162
- ' - SUPABASE_URL, SUPABASE_KEY, and SUPABASE_BUCKET for direct mode',
161
+ ' - ARELA_API_URL and ARELA_API_TOKEN for API mode, or\n' +
162
+ ' - SUPABASE_URL, SUPABASE_KEY, and SUPABASE_BUCKET for direct mode',
163
163
  );
164
164
  process.exit(1);
165
165
  }
@@ -1360,38 +1360,52 @@ const uploadFilesByRfc = async (options = {}) => {
1360
1360
  );
1361
1361
 
1362
1362
  // Step 3: Get ALL files that have these arela_paths (including supporting documents)
1363
- // Use pagination to ensure we get all files, regardless of count
1363
+ // Process arela_paths in smaller chunks to avoid URI length limits
1364
1364
  let allRelatedFiles = [];
1365
- let hasMore = true;
1366
- let offset = 0;
1365
+ const arelaPathChunkSize = 50; // Process 50 arela_paths at a time to avoid URI limits
1367
1366
  const queryBatchSize = 1000;
1368
1367
 
1369
- console.log('📥 Fetching all related files (with pagination)...');
1368
+ console.log('📥 Fetching all related files (processing arela_paths in chunks to avoid URI limits)...');
1370
1369
 
1371
- while (hasMore) {
1372
- const { data: batch, error: queryError } = await supabase
1373
- .from('uploader')
1374
- .select('id, original_path, arela_path, filename, rfc, document_type')
1375
- .in('arela_path', uniqueArelaPaths)
1376
- .not('original_path', 'is', null)
1377
- .range(offset, offset + queryBatchSize - 1);
1370
+ // Process arela_paths in chunks
1371
+ for (let i = 0; i < uniqueArelaPaths.length; i += arelaPathChunkSize) {
1372
+ const arelaPathChunk = uniqueArelaPaths.slice(i, i + arelaPathChunkSize);
1373
+ console.log(` Processing arela_path chunk ${Math.floor(i / arelaPathChunkSize) + 1}/${Math.ceil(uniqueArelaPaths.length / arelaPathChunkSize)} (${arelaPathChunk.length} paths)`);
1378
1374
 
1379
- if (queryError) {
1380
- console.error('❌ Error fetching related files:', queryError.message);
1381
- return { processedCount: 0, uploadedCount: 0, errorCount: 1 };
1382
- }
1375
+ // For each chunk of arela_paths, use pagination to get all related files
1376
+ let hasMore = true;
1377
+ let offset = 0;
1383
1378
 
1384
- if (!batch || batch.length === 0) {
1385
- hasMore = false;
1386
- } else {
1387
- allRelatedFiles = allRelatedFiles.concat(batch);
1388
- offset += queryBatchSize;
1379
+ while (hasMore) {
1380
+ const { data: batch, error: queryError } = await supabase
1381
+ .from('uploader')
1382
+ .select('id, original_path, arela_path, filename, rfc, document_type')
1383
+ .in('arela_path', arelaPathChunk)
1384
+ .not('original_path', 'is', null)
1385
+ .range(offset, offset + queryBatchSize - 1);
1386
+
1387
+ if (queryError) {
1388
+ console.error(`❌ Error fetching related files for chunk ${Math.floor(i / arelaPathChunkSize) + 1}:`, queryError.message);
1389
+ return { processedCount: 0, uploadedCount: 0, errorCount: 1 };
1390
+ }
1389
1391
 
1390
- // If we got less than queryBatchSize, we've reached the end
1391
- if (batch.length < queryBatchSize) {
1392
+ if (!batch || batch.length === 0) {
1392
1393
  hasMore = false;
1394
+ } else {
1395
+ allRelatedFiles = allRelatedFiles.concat(batch);
1396
+ offset += queryBatchSize;
1397
+
1398
+ // If we got less than queryBatchSize, we've reached the end for this chunk
1399
+ if (batch.length < queryBatchSize) {
1400
+ hasMore = false;
1401
+ }
1393
1402
  }
1394
1403
  }
1404
+
1405
+ // Small delay between chunks to avoid overwhelming the database
1406
+ if (i + arelaPathChunkSize < uniqueArelaPaths.length) {
1407
+ await new Promise((resolve) => setTimeout(resolve, 100));
1408
+ }
1395
1409
  }
1396
1410
 
1397
1411
  if (!allRelatedFiles || allRelatedFiles.length === 0) {
@@ -1560,8 +1574,8 @@ const uploadFilesByRfc = async (options = {}) => {
1560
1574
  // Set folder structure for this group - concatenate custom prefix with arela_path
1561
1575
  const folderStructure = options.folderStructure
1562
1576
  ? `${options.folderStructure}/${arelaPath}`
1563
- .replace(/\/+/g, '/')
1564
- .replace(/\/$/, '')
1577
+ .replace(/\/+/g, '/')
1578
+ .replace(/\/$/, '')
1565
1579
  : arelaPath;
1566
1580
  pathFormData.append('folderStructure', folderStructure);
1567
1581
  pathFormData.append('autoDetect', 'true');