@pioneer-platform/pioneer-discovery 8.11.11 → 8.11.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,179 @@
1
+ /**
2
+ * Download icons from ShapeShift icon mapping
3
+ */
4
+
5
+ const fs = require('fs');
6
+ const path = require('path');
7
+ const https = require('https');
8
+ const http = require('http');
9
+
10
+ const TAG = ' | download-shapeshift-icons | ';
11
+
12
+ // Paths
13
+ const MAPPING_PATH = path.join(__dirname, 'shapeshift-icon-mapping.json');
14
+ const DOWNLOAD_DIR = path.join(__dirname, '../../../../../services/pioneer-server/public/coins');
15
+ const PROGRESS_FILE = path.join(__dirname, '.shapeshift-download-progress.json');
16
+ const FAILED_FILE = path.join(__dirname, '.shapeshift-download-failed.json');
17
+
18
+ // Helper: Encode CAIP ID to base64 (matches upload-to-s3.js pattern)
19
+ function encodeAssetId(assetId) {
20
+ return Buffer.from(assetId).toString('base64');
21
+ }
22
+
23
+ // Configuration
24
+ const BATCH_SIZE = 10; // Download 10 icons at a time
25
+ const DELAY_BETWEEN_BATCHES = 1000; // 1 second delay between batches
26
+
27
+ // Ensure download directory exists
28
+ if (!fs.existsSync(DOWNLOAD_DIR)) {
29
+ fs.mkdirSync(DOWNLOAD_DIR, { recursive: true });
30
+ }
31
+
32
+ // Load mapping
33
+ console.log(TAG, 'Loading ShapeShift icon mapping...');
34
+ const mapping = JSON.parse(fs.readFileSync(MAPPING_PATH, 'utf8'));
35
+ const { matches, stats } = mapping;
36
+
37
+ console.log(TAG, `Total matches: ${matches.length}`);
38
+ console.log(TAG, `NEW icons to download: ${stats.newIconsAvailable}`);
39
+
40
+ // Load progress if exists
41
+ let progress = { processed: new Set(), successful: 0, failed: [] };
42
+ if (fs.existsSync(PROGRESS_FILE)) {
43
+ const saved = JSON.parse(fs.readFileSync(PROGRESS_FILE, 'utf8'));
44
+ progress.processed = new Set(saved.processed || []);
45
+ progress.successful = saved.successful || 0;
46
+ progress.failed = saved.failed || [];
47
+ console.log(TAG, `Resuming from previous run: ${progress.processed.size} already processed`);
48
+ }
49
+
50
+ // Download function
51
+ function downloadIcon(url, filePath) {
52
+ return new Promise((resolve, reject) => {
53
+ const protocol = url.startsWith('https') ? https : http;
54
+
55
+ const request = protocol.get(url, (response) => {
56
+ if (response.statusCode === 200) {
57
+ const fileStream = fs.createWriteStream(filePath);
58
+ response.pipe(fileStream);
59
+
60
+ fileStream.on('finish', () => {
61
+ fileStream.close();
62
+ resolve(true);
63
+ });
64
+
65
+ fileStream.on('error', (err) => {
66
+ fs.unlink(filePath, () => {}); // Delete partial file
67
+ reject(err);
68
+ });
69
+ } else if (response.statusCode === 301 || response.statusCode === 302) {
70
+ // Follow redirect
71
+ const redirectUrl = response.headers.location;
72
+ resolve(downloadIcon(redirectUrl, filePath));
73
+ } else {
74
+ reject(new Error(`HTTP ${response.statusCode}: ${url}`));
75
+ }
76
+ });
77
+
78
+ request.on('error', reject);
79
+ request.setTimeout(10000, () => {
80
+ request.destroy();
81
+ reject(new Error('Timeout'));
82
+ });
83
+ });
84
+ }
85
+
86
+ // Save progress
87
+ function saveProgress() {
88
+ fs.writeFileSync(PROGRESS_FILE, JSON.stringify({
89
+ processed: Array.from(progress.processed),
90
+ successful: progress.successful,
91
+ failed: progress.failed,
92
+ timestamp: Date.now()
93
+ }, null, 2));
94
+
95
+ fs.writeFileSync(FAILED_FILE, JSON.stringify(progress.failed, null, 2));
96
+ }
97
+
98
+ // Process matches in batches
99
+ async function processMatches() {
100
+ const toProcess = matches.filter(m => !progress.processed.has(m.caipId));
101
+
102
+ console.log(TAG, `\nProcessing ${toProcess.length} icons in batches of ${BATCH_SIZE}...\n`);
103
+
104
+ for (let i = 0; i < toProcess.length; i += BATCH_SIZE) {
105
+ const batch = toProcess.slice(i, i + BATCH_SIZE);
106
+ const batchNum = Math.floor(i / BATCH_SIZE) + 1;
107
+ const totalBatches = Math.ceil(toProcess.length / BATCH_SIZE);
108
+
109
+ console.log(`\nšŸ“¦ Batch ${batchNum}/${totalBatches} (${i + 1}-${Math.min(i + BATCH_SIZE, toProcess.length)}/${toProcess.length})`);
110
+
111
+ const downloads = batch.map(async (match) => {
112
+ const filename = match.encodedFilename || `${encodeAssetId(match.caipId)}.png`;
113
+ const filePath = path.join(DOWNLOAD_DIR, filename);
114
+
115
+ // Skip if already exists
116
+ if (fs.existsSync(filePath)) {
117
+ console.log(` ā­ļø ${match.pioneerSymbol.padEnd(10)} Already exists`);
118
+ progress.processed.add(match.caipId);
119
+ return { success: true, skipped: true };
120
+ }
121
+
122
+ try {
123
+ await downloadIcon(match.iconUrl, filePath);
124
+ console.log(` āœ… ${match.pioneerSymbol.padEnd(10)} ${match.pioneerName.slice(0, 30)}`);
125
+ progress.processed.add(match.caipId);
126
+ progress.successful++;
127
+ return { success: true };
128
+ } catch (error) {
129
+ console.log(` āŒ ${match.pioneerSymbol.padEnd(10)} ${error.message}`);
130
+ progress.processed.add(match.caipId);
131
+ progress.failed.push({
132
+ caipId: match.caipId,
133
+ symbol: match.pioneerSymbol,
134
+ name: match.pioneerName,
135
+ iconUrl: match.iconUrl,
136
+ error: error.message
137
+ });
138
+ return { success: false };
139
+ }
140
+ });
141
+
142
+ await Promise.all(downloads);
143
+
144
+ // Save progress after each batch
145
+ saveProgress();
146
+
147
+ // Delay between batches
148
+ if (i + BATCH_SIZE < toProcess.length) {
149
+ await new Promise(resolve => setTimeout(resolve, DELAY_BETWEEN_BATCHES));
150
+ }
151
+ }
152
+
153
+ // Final summary
154
+ console.log('\n========================================');
155
+ console.log('šŸ“Š DOWNLOAD COMPLETE');
156
+ console.log('========================================');
157
+ console.log(`Total processed: ${progress.processed.size}`);
158
+ console.log(`Successfully downloaded: ${progress.successful}`);
159
+ console.log(`Failed: ${progress.failed.length}`);
160
+ console.log(`Success rate: ${((progress.successful / progress.processed.size) * 100).toFixed(1)}%`);
161
+ console.log('========================================\n');
162
+
163
+ if (progress.failed.length > 0) {
164
+ console.log(`āš ļø Failed downloads saved to: ${FAILED_FILE}`);
165
+ }
166
+ console.log(`āœ… Progress saved to: ${PROGRESS_FILE}`);
167
+ }
168
+
169
+ // Run
170
+ processMatches()
171
+ .then(() => {
172
+ console.log(TAG, 'āœ… All done!');
173
+ process.exit(0);
174
+ })
175
+ .catch((error) => {
176
+ console.error(TAG, 'āŒ Error:', error);
177
+ saveProgress();
178
+ process.exit(1);
179
+ });
@@ -0,0 +1,152 @@
1
+ /**
2
+ * Extract icon URLs from ShapeShift's asset data and map to Pioneer assets
3
+ *
4
+ * ShapeShift structure:
5
+ * - assetIdPrefixes: array of CAIP prefixes
6
+ * - encodedAssetIds: array of compressed IDs like "6:0xdac17..."
7
+ * - encodedAssets: array of [assetIdx, name, precision, color, icon[], symbol, relatedAssetKey, isPool]
8
+ *
9
+ * FIELDS order: ['assetIdx', 'name', 'precision', 'color', 'icon', 'symbol', 'relatedAssetKey', 'isPool']
10
+ */
11
+
12
+ const fs = require('fs');
13
+ const path = require('path');
14
+
15
+ const TAG = ' | extract-shapeshift-icons | ';
16
+
17
+ // Paths
18
+ const SHAPESHIFT_DATA_PATH = '/Users/highlander/WebstormProjects/web/src/lib/asset-service/service/encodedAssetData.json';
19
+ const PIONEER_ASSETS_PATH = path.join(__dirname, '../src/generatedAssetData.json');
20
+ const OUTPUT_MAPPING = path.join(__dirname, 'shapeshift-icon-mapping.json');
21
+ const DOWNLOAD_DIR = path.join(__dirname, '../../../../../services/pioneer-server/public/coins');
22
+
23
+ console.log(TAG, 'Loading ShapeShift asset data...');
24
+ const shapeshiftData = JSON.parse(fs.readFileSync(SHAPESHIFT_DATA_PATH, 'utf8'));
25
+ const { assetIdPrefixes, encodedAssetIds, encodedAssets } = shapeshiftData;
26
+
27
+ console.log(TAG, `Loaded ${encodedAssetIds.length} ShapeShift assets`);
28
+
29
+ // Decode ShapeShift asset IDs
30
+ function decodeAssetId(encodedId, prefixes) {
31
+ const colonIndex = encodedId.lastIndexOf(':');
32
+ const prefixIdx = Number(encodedId.substring(0, colonIndex));
33
+ const assetReference = encodedId.substring(colonIndex + 1);
34
+ return prefixes[prefixIdx] + ':' + assetReference;
35
+ }
36
+
37
+ // Field indices from FIELDS constant
38
+ const FIELD_ICON = 4;
39
+ const FIELD_SYMBOL = 5;
40
+ const FIELD_NAME = 1;
41
+
42
+ // Build ShapeShift asset map: CAIP ID -> icon URL(s)
43
+ const shapeshiftIconMap = new Map();
44
+ let totalWithIcons = 0;
45
+
46
+ encodedAssetIds.forEach((encodedId, idx) => {
47
+ const assetId = decodeAssetId(encodedId, assetIdPrefixes);
48
+ const encodedAsset = encodedAssets[idx];
49
+ const iconArray = encodedAsset[FIELD_ICON];
50
+ const symbol = encodedAsset[FIELD_SYMBOL];
51
+ const name = encodedAsset[FIELD_NAME];
52
+
53
+ if (iconArray && iconArray.length > 0) {
54
+ shapeshiftIconMap.set(assetId, {
55
+ assetId,
56
+ symbol,
57
+ name,
58
+ icons: iconArray,
59
+ primaryIcon: iconArray[0]
60
+ });
61
+ totalWithIcons++;
62
+ }
63
+ });
64
+
65
+ console.log(TAG, `ShapeShift assets with icons: ${totalWithIcons} / ${encodedAssetIds.length}`);
66
+
67
+ // Load Pioneer assets
68
+ console.log(TAG, 'Loading Pioneer assets...');
69
+ const pioneerAssets = JSON.parse(fs.readFileSync(PIONEER_ASSETS_PATH, 'utf8'));
70
+ console.log(TAG, `Loaded ${Object.keys(pioneerAssets).length} Pioneer assets`);
71
+
72
+ // Helper: Encode CAIP ID to base64 (matches upload-to-s3.js pattern)
73
+ function encodeAssetId(assetId) {
74
+ return Buffer.from(assetId).toString('base64');
75
+ }
76
+
77
+ // Match Pioneer assets to ShapeShift icons
78
+ const matches = [];
79
+ const missing = [];
80
+
81
+ Object.entries(pioneerAssets).forEach(([caipId, asset]) => {
82
+ const shapeshiftAsset = shapeshiftIconMap.get(caipId);
83
+
84
+ if (shapeshiftAsset) {
85
+ const encodedFilename = `${encodeAssetId(caipId)}.png`;
86
+ matches.push({
87
+ caipId,
88
+ encodedFilename,
89
+ pioneerSymbol: asset.symbol,
90
+ pioneerName: asset.name,
91
+ shapeshiftSymbol: shapeshiftAsset.symbol,
92
+ shapeshiftName: shapeshiftAsset.name,
93
+ iconUrl: shapeshiftAsset.primaryIcon,
94
+ allIcons: shapeshiftAsset.icons,
95
+ hasLocalIcon: fs.existsSync(path.join(DOWNLOAD_DIR, encodedFilename))
96
+ });
97
+ } else {
98
+ missing.push({
99
+ caipId,
100
+ symbol: asset.symbol,
101
+ name: asset.name
102
+ });
103
+ }
104
+ });
105
+
106
+ console.log(TAG, '\n========================================');
107
+ console.log(TAG, 'MATCHING RESULTS');
108
+ console.log(TAG, '========================================');
109
+ console.log(TAG, `Total Pioneer assets: ${Object.keys(pioneerAssets).length}`);
110
+ console.log(TAG, `ShapeShift matches found: ${matches.length}`);
111
+ console.log(TAG, `Missing from ShapeShift: ${missing.length}`);
112
+ console.log(TAG, `Match rate: ${((matches.length / Object.keys(pioneerAssets).length) * 100).toFixed(1)}%`);
113
+
114
+ // Count how many we already have locally
115
+ const alreadyHaveLocal = matches.filter(m => m.hasLocalIcon).length;
116
+ const newIconsAvailable = matches.filter(m => !m.hasLocalIcon).length;
117
+
118
+ console.log(TAG, '\n========================================');
119
+ console.log(TAG, 'ICON AVAILABILITY');
120
+ console.log(TAG, '========================================');
121
+ console.log(TAG, `Already have locally: ${alreadyHaveLocal}`);
122
+ console.log(TAG, `NEW icons available: ${newIconsAvailable} šŸŽ‰`);
123
+
124
+ // Save mapping file
125
+ const mapping = {
126
+ generatedAt: new Date().toISOString(),
127
+ stats: {
128
+ totalPioneerAssets: Object.keys(pioneerAssets).length,
129
+ shapeshiftMatches: matches.length,
130
+ alreadyHaveLocal,
131
+ newIconsAvailable,
132
+ missingFromShapeshift: missing.length
133
+ },
134
+ matches,
135
+ missing: missing.slice(0, 100) // Sample of missing
136
+ };
137
+
138
+ fs.writeFileSync(OUTPUT_MAPPING, JSON.stringify(mapping, null, 2));
139
+ console.log(TAG, `\nāœ… Mapping saved to: ${OUTPUT_MAPPING}`);
140
+
141
+ // Show sample of new icons available
142
+ console.log(TAG, '\n========================================');
143
+ console.log(TAG, 'SAMPLE NEW ICONS (first 20)');
144
+ console.log(TAG, '========================================');
145
+ matches
146
+ .filter(m => !m.hasLocalIcon)
147
+ .slice(0, 20)
148
+ .forEach(m => {
149
+ console.log(`${m.pioneerSymbol.padEnd(10)} ${m.pioneerName.slice(0, 30).padEnd(32)} ${m.iconUrl}`);
150
+ });
151
+
152
+ console.log(TAG, '\nāœ… Done! Run download-shapeshift-icons.js to download the new icons');
@@ -0,0 +1,315 @@
1
+ #!/usr/bin/env node
2
+
3
+ /*
4
+ * Fix S3 Permissions for Existing Files
5
+ *
6
+ * This script:
7
+ * 1. Lists all files in the S3 bucket
8
+ * 2. Sets ACL to 'public-read' for each file
9
+ * 3. Verifies accessibility after setting ACL
10
+ * 4. Reports any files that still have issues
11
+ *
12
+ * Use this to fix existing files that return 403 Forbidden
13
+ */
14
+
15
+ const { execSync } = require('child_process');
16
+ const https = require('https');
17
+ const fs = require('fs');
18
+ const path = require('path');
19
+
20
+ // Configuration
21
+ const S3_ENDPOINT = 'https://sfo3.digitaloceanspaces.com';
22
+ const S3_BUCKET = 'keepkey';
23
+ const S3_REGION = 'sfo3';
24
+ const CDN_URL = 'https://keepkey.sfo3.cdn.digitaloceanspaces.com';
25
+
26
+ // AWS credentials
27
+ const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || 'DO00FXP8KK64LCXYAEZP';
28
+ const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || 'Uyw/cq63rrQmFV9yy1HbovTSMNhLkEwImqPa88N/E/s';
29
+
30
+ const PROGRESS_FILE = path.join(__dirname, '.fix-permissions-progress.json');
31
+ const FAILED_FILE = path.join(__dirname, '.fix-permissions-failed.json');
32
+
33
+ /**
34
+ * Check if URL is accessible (returns 200)
35
+ */
36
+ async function urlExists(url, timeout = 5000) {
37
+ return new Promise((resolve) => {
38
+ const req = https.request(url, { method: 'HEAD', timeout }, (res) => {
39
+ resolve(res.statusCode === 200);
40
+ });
41
+
42
+ req.on('error', () => resolve(false));
43
+ req.on('timeout', () => {
44
+ req.destroy();
45
+ resolve(false);
46
+ });
47
+
48
+ req.end();
49
+ });
50
+ }
51
+
52
+ /**
53
+ * Set ACL to public-read for a file
54
+ */
55
+ function setPublicAcl(s3Key) {
56
+ try {
57
+ const cmd = `AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} aws s3api put-object-acl --bucket ${S3_BUCKET} --key "${s3Key}" --acl public-read --endpoint-url=${S3_ENDPOINT}`;
58
+
59
+ execSync(cmd, { stdio: 'pipe' });
60
+ return true;
61
+ } catch (error) {
62
+ console.error(` āŒ ACL set error: ${error.message}`);
63
+ return false;
64
+ }
65
+ }
66
+
67
+ /**
68
+ * Wait for specified milliseconds
69
+ */
70
+ function sleep(ms) {
71
+ return new Promise(resolve => setTimeout(resolve, ms));
72
+ }
73
+
74
+ /**
75
+ * Get list of all files in S3 bucket
76
+ */
77
+ function listS3Files() {
78
+ // Use cached file list to avoid ENOBUFS error
79
+ const cachedFile = path.join(__dirname, '..', '..', '..', '..', 's3-files.txt');
80
+
81
+ if (fs.existsSync(cachedFile)) {
82
+ console.log('šŸ“‹ Using cached S3 file list...');
83
+ const output = fs.readFileSync(cachedFile, 'utf8');
84
+
85
+ const files = output
86
+ .split('\n')
87
+ .filter(line => line.trim())
88
+ .map(line => {
89
+ const parts = line.trim().split(/\s+/);
90
+ return parts[3]; // filename
91
+ })
92
+ .filter(filename => filename && filename.endsWith('.png'));
93
+
94
+ return files;
95
+ }
96
+
97
+ // Fallback to AWS CLI if no cached file
98
+ try {
99
+ console.log('šŸ“‹ Fetching S3 file list (this may take a while)...');
100
+ const cmd = `AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} aws s3 ls s3://${S3_BUCKET}/coins/ --endpoint-url=${S3_ENDPOINT} --recursive`;
101
+
102
+ const output = execSync(cmd, { encoding: 'utf8', maxBuffer: 50 * 1024 * 1024 });
103
+
104
+ const files = output
105
+ .split('\n')
106
+ .filter(line => line.trim())
107
+ .map(line => {
108
+ const parts = line.trim().split(/\s+/);
109
+ return parts[3]; // filename
110
+ })
111
+ .filter(filename => filename && filename.endsWith('.png'));
112
+
113
+ return files;
114
+ } catch (error) {
115
+ console.error('āŒ Failed to list S3 files:', error.message);
116
+ return [];
117
+ }
118
+ }
119
+
120
+ /**
121
+ * Load progress
122
+ */
123
+ function loadProgress() {
124
+ if (fs.existsSync(PROGRESS_FILE)) {
125
+ return JSON.parse(fs.readFileSync(PROGRESS_FILE, 'utf8'));
126
+ }
127
+ return { processed: [], fixed: 0, failed: 0 };
128
+ }
129
+
130
+ /**
131
+ * Save progress
132
+ */
133
+ function saveProgress(progress) {
134
+ fs.writeFileSync(PROGRESS_FILE, JSON.stringify(progress, null, 2));
135
+ }
136
+
137
+ /**
138
+ * Load failed
139
+ */
140
+ function loadFailed() {
141
+ if (fs.existsSync(FAILED_FILE)) {
142
+ return JSON.parse(fs.readFileSync(FAILED_FILE, 'utf8'));
143
+ }
144
+ return { files: [] };
145
+ }
146
+
147
+ /**
148
+ * Save failed
149
+ */
150
+ function saveFailed(failed) {
151
+ fs.writeFileSync(FAILED_FILE, JSON.stringify(failed, null, 2));
152
+ }
153
+
154
+ /**
155
+ * Process a single file
156
+ */
157
+ async function processFile(s3Key, progress, failed, stats) {
158
+ stats.total++;
159
+
160
+ // Skip if already processed
161
+ if (progress.processed.includes(s3Key)) {
162
+ console.log(`ā­ļø [${stats.total}] Skipping (already processed): ${s3Key.substring(0, 80)}`);
163
+ stats.skipped++;
164
+ return;
165
+ }
166
+
167
+ const cdnUrl = `${CDN_URL}/${s3Key}`;
168
+
169
+ console.log(`\nšŸ”„ [${stats.total}/${stats.totalFiles}] Processing: ${s3Key.substring(0, 80)}...`);
170
+
171
+ // Check current accessibility
172
+ console.log(` šŸ” Checking current accessibility...`);
173
+ const accessible = await urlExists(cdnUrl);
174
+
175
+ if (accessible) {
176
+ console.log(` āœ… Already accessible - skipping`);
177
+ progress.processed.push(s3Key);
178
+ stats.alreadyAccessible++;
179
+ return;
180
+ }
181
+
182
+ console.log(` āš ļø Currently returns 403 - setting public-read ACL...`);
183
+
184
+ // Set ACL
185
+ const aclSet = setPublicAcl(s3Key);
186
+
187
+ if (!aclSet) {
188
+ console.log(` āŒ Failed to set ACL`);
189
+ failed.files.push({
190
+ s3Key,
191
+ cdnUrl,
192
+ reason: 'acl_set_failed',
193
+ });
194
+ progress.processed.push(s3Key);
195
+ progress.failed++;
196
+ stats.aclFailed++;
197
+ return;
198
+ }
199
+
200
+ console.log(` āœ… ACL set - waiting for propagation...`);
201
+ await sleep(1000);
202
+
203
+ // Verify accessibility
204
+ console.log(` šŸ” Verifying accessibility...`);
205
+ const nowAccessible = await urlExists(cdnUrl);
206
+
207
+ if (nowAccessible) {
208
+ console.log(` āœ… Now accessible!`);
209
+ progress.fixed++;
210
+ stats.fixed++;
211
+ } else {
212
+ console.log(` āŒ Still not accessible`);
213
+ failed.files.push({
214
+ s3Key,
215
+ cdnUrl,
216
+ reason: 'still_not_accessible',
217
+ });
218
+ progress.failed++;
219
+ stats.stillFailed++;
220
+ }
221
+
222
+ progress.processed.push(s3Key);
223
+
224
+ // Save progress every 10 files
225
+ if (stats.total % 10 === 0) {
226
+ saveProgress(progress);
227
+ saveFailed(failed);
228
+ console.log(`\nšŸ“Š Progress saved - Total: ${stats.total}/${stats.totalFiles}, Fixed: ${stats.fixed}, Failed: ${stats.aclFailed + stats.stillFailed}`);
229
+ }
230
+ }
231
+
232
+ /**
233
+ * Main execution
234
+ */
235
+ async function main() {
236
+ console.log('šŸš€ Starting S3 permissions fix process...\n');
237
+ console.log(`ā˜ļø S3 bucket: ${S3_BUCKET}`);
238
+ console.log(`🌐 CDN URL: ${CDN_URL}\n`);
239
+
240
+ // Get list of all files
241
+ console.log('šŸ“‹ Getting list of all files in S3...');
242
+ const files = listS3Files();
243
+
244
+ if (files.length === 0) {
245
+ console.error('āŒ No files found in S3');
246
+ process.exit(1);
247
+ }
248
+
249
+ console.log(`āœ… Found ${files.length} files\n`);
250
+
251
+ // Load progress
252
+ const progress = loadProgress();
253
+ const failed = loadFailed();
254
+
255
+ const stats = {
256
+ total: 0,
257
+ totalFiles: files.length,
258
+ skipped: 0,
259
+ alreadyAccessible: 0,
260
+ fixed: 0,
261
+ aclFailed: 0,
262
+ stillFailed: 0,
263
+ };
264
+
265
+ console.log(`šŸ“Š Already processed: ${progress.processed.length}`);
266
+ console.log(`šŸ“Š Remaining: ${files.length - progress.processed.length}\n`);
267
+
268
+ // Process each file
269
+ for (const s3Key of files) {
270
+ await processFile(s3Key, progress, failed, stats);
271
+
272
+ // Small delay to be nice to the API
273
+ await sleep(200);
274
+ }
275
+
276
+ // Final save
277
+ saveProgress(progress);
278
+ saveFailed(failed);
279
+
280
+ // Cleanup progress file if complete
281
+ if (progress.processed.length === files.length && failed.files.length === 0) {
282
+ fs.unlinkSync(PROGRESS_FILE);
283
+ console.log('\nāœ… All files processed successfully - progress file cleaned up');
284
+ }
285
+
286
+ // Print summary
287
+ console.log('\n' + '='.repeat(80));
288
+ console.log('šŸ“Š FINAL SUMMARY');
289
+ console.log('='.repeat(80));
290
+ console.log(`Total files: ${files.length}`);
291
+ console.log(`Already processed (skipped): ${stats.skipped}`);
292
+ console.log(`Already accessible: ${stats.alreadyAccessible}`);
293
+ console.log(`Fixed successfully: ${stats.fixed}`);
294
+ console.log(`ACL set failed: ${stats.aclFailed}`);
295
+ console.log(`Still not accessible: ${stats.stillFailed}`);
296
+ console.log(`Total failed: ${stats.aclFailed + stats.stillFailed}`);
297
+ console.log('='.repeat(80));
298
+
299
+ if (failed.files.length > 0) {
300
+ console.log(`\nāš ļø ${failed.files.length} files failed - see ${FAILED_FILE}`);
301
+
302
+ // Sample of failed files
303
+ console.log('\nSample of failed files:');
304
+ failed.files.slice(0, 10).forEach(file => {
305
+ console.log(` ${file.s3Key} - ${file.reason}`);
306
+ });
307
+ }
308
+
309
+ console.log('\nāœ… Permission fix process complete!\n');
310
+ }
311
+
312
+ main().catch(err => {
313
+ console.error('āŒ Fatal error:', err);
314
+ process.exit(1);
315
+ });