replicate-predictions-downloader 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/README.md +131 -0
- package/example.js +40 -0
- package/index.js +430 -0
- package/package.json +45 -0
package/LICENSE.md
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
# Replicate Predictions Downloader
|
|
2
|
+
|
|
3
|
+
A Node.js script to download and organize all your Replicate predictions, including images, metadata, and other outputs before they expire.
|
|
4
|
+
|
|
5
|
+

|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- š„ Downloads all predictions from your Replicate account
|
|
10
|
+
- šļø Organizes files by model and date
|
|
11
|
+
- šļø Creates ZIP archives for each model (optional)
|
|
12
|
+
- š Saves enhanced metadata for all predictions
|
|
13
|
+
- š Shows detailed download statistics
|
|
14
|
+
- ā±ļø Preserves predictions before they expire
|
|
15
|
+
|
|
16
|
+
## Why Use This Tool?
|
|
17
|
+
|
|
18
|
+
Replicate only stores your predictions temporarily. This tool helps you preserve your valuable work before it disappears, with intelligent organization to make your outputs easy to find later.
|
|
19
|
+
|
|
20
|
+
## Setup
|
|
21
|
+
|
|
22
|
+
### Prerequisites
|
|
23
|
+
|
|
24
|
+
- Node.js 18+ installed
|
|
25
|
+
- A Replicate account with API access
|
|
26
|
+
|
|
27
|
+
### Installation
|
|
28
|
+
|
|
29
|
+
#### Option 1: Clone and install locally
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
# Clone the repository
|
|
33
|
+
git clone https://github.com/closestfriend/replicate-predictions-downloader.git
|
|
34
|
+
cd replicate-predictions-downloader
|
|
35
|
+
|
|
36
|
+
# Install dependencies
|
|
37
|
+
npm install
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
#### Option 2: Install via npm (coming soon)
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
npm install -g replicate-predictions-downloader
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
### API Token Setup
|
|
47
|
+
|
|
48
|
+
Get your Replicate API token from: https://replicate.com/account/api-tokens
|
|
49
|
+
|
|
50
|
+
Set your API token using one of these methods:
|
|
51
|
+
|
|
52
|
+
1. Export in terminal (temporary):
|
|
53
|
+
```bash
|
|
54
|
+
export REPLICATE_API_TOKEN=your_token_here
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
2. Create a .env file (permanent):
|
|
58
|
+
```
|
|
59
|
+
REPLICATE_API_TOKEN=your_token_here
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
## Usage
|
|
63
|
+
|
|
64
|
+
Run the script:
|
|
65
|
+
```bash
|
|
66
|
+
# If installed locally
|
|
67
|
+
npm start
|
|
68
|
+
|
|
69
|
+
# Or directly
|
|
70
|
+
node index.js
|
|
71
|
+
|
|
72
|
+
# If installed globally (coming soon)
|
|
73
|
+
replicate-downloader
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
The script will:
|
|
77
|
+
1. Fetch all your predictions from Replicate
|
|
78
|
+
2. Download all outputs
|
|
79
|
+
3. Organize them by model
|
|
80
|
+
4. Create ZIP archives (if enabled)
|
|
81
|
+
5. Save detailed metadata
|
|
82
|
+
|
|
83
|
+
## Configuration
|
|
84
|
+
|
|
85
|
+
You can adjust these settings in the CONFIG object:
|
|
86
|
+
- `requestDelay`: Delay between API requests (ms)
|
|
87
|
+
- `downloadDelay`: Delay between downloads (ms)
|
|
88
|
+
- `maxPromptLength`: Maximum length for prompt in filenames
|
|
89
|
+
- `createZips`: Whether to create ZIP archives
|
|
90
|
+
- `enhancedMetadata`: Whether to save enhanced metadata
|
|
91
|
+
|
|
92
|
+
## Output Structure
|
|
93
|
+
|
|
94
|
+
The tool creates a directory structure like this:
|
|
95
|
+
|
|
96
|
+
```
|
|
97
|
+
replicate_outputs_YYYY-MM-DD/
|
|
98
|
+
āāā by-model/
|
|
99
|
+
ā āāā model1/
|
|
100
|
+
ā ā āāā YYYY-MM-DD_model1_prompt_id.png
|
|
101
|
+
ā ā āāā YYYY-MM-DD_model1_prompt_id.jpg
|
|
102
|
+
ā ā āāā ...
|
|
103
|
+
ā āāā model2/
|
|
104
|
+
ā ā āāā ...
|
|
105
|
+
ā āāā ...
|
|
106
|
+
āāā model1.zip
|
|
107
|
+
āāā model2.zip
|
|
108
|
+
āāā ...
|
|
109
|
+
replicate_metadata_YYYY-MM-DD.json
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
## Support
|
|
113
|
+
|
|
114
|
+
This is a tool I created for personal use. I'm sharing it in case others find it helpful, but I may not be able to provide extensive support. Pull requests are welcome!
|
|
115
|
+
|
|
116
|
+
## Contributing
|
|
117
|
+
|
|
118
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
|
119
|
+
|
|
120
|
+
## Security Note
|
|
121
|
+
|
|
122
|
+
ā ļø Never commit your API token or .env file to version control!
|
|
123
|
+
The included .gitignore will help prevent this.
|
|
124
|
+
|
|
125
|
+
## License
|
|
126
|
+
|
|
127
|
+
This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details.
|
|
128
|
+
|
|
129
|
+
## Background
|
|
130
|
+
|
|
131
|
+
This tool was created while working on LLM behavior/personality research to preserve valuable predictions before they expired.
|
package/example.js
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
// Example of using ReplicateDownloader as a module
|
|
2
|
+
import ReplicateDownloader from './index.js';
|
|
3
|
+
import dotenv from 'dotenv';
|
|
4
|
+
|
|
5
|
+
// Load environment variables from .env file (if present)
|
|
6
|
+
dotenv.config();
|
|
7
|
+
|
|
8
|
+
async function example() {
|
|
9
|
+
// Check if API token is available
|
|
10
|
+
if (!process.env.REPLICATE_API_TOKEN) {
|
|
11
|
+
console.error('ā Error: REPLICATE_API_TOKEN not found in environment variables');
|
|
12
|
+
console.log('Please set your API token before running this example');
|
|
13
|
+
process.exit(1);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
console.log('Starting Replicate Downloader example...');
|
|
17
|
+
|
|
18
|
+
// Create a new instance with your API token
|
|
19
|
+
const downloader = new ReplicateDownloader(process.env.REPLICATE_API_TOKEN);
|
|
20
|
+
|
|
21
|
+
// Optional: Override default configuration
|
|
22
|
+
// downloader.config = {
|
|
23
|
+
// requestDelay: 500, // Increase delay between API requests
|
|
24
|
+
// downloadDelay: 200, // Increase delay between downloads
|
|
25
|
+
// maxPromptLength: 30, // Shorter prompt in filenames
|
|
26
|
+
// createZips: false, // Disable ZIP creation
|
|
27
|
+
// enhancedMetadata: true // Keep enhanced metadata
|
|
28
|
+
// };
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
// Run the downloader
|
|
32
|
+
await downloader.run();
|
|
33
|
+
console.log('Example completed successfully!');
|
|
34
|
+
} catch (error) {
|
|
35
|
+
console.error('Example failed:', error.message);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Run the example
|
|
40
|
+
example().catch(console.error);
|
package/index.js
ADDED
|
@@ -0,0 +1,430 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import https from 'https';
|
|
4
|
+
import archiver from 'archiver';
|
|
5
|
+
|
|
6
|
+
// Get API token from environment variable
|
|
7
|
+
const REPLICATE_API_TOKEN = process.env.REPLICATE_API_TOKEN;
|
|
8
|
+
|
|
9
|
+
// Configuration
|
|
10
|
+
const CONFIG = {
|
|
11
|
+
requestDelay: 200, // ms between requests (adjust for your CPU)
|
|
12
|
+
downloadDelay: 100, // ms between downloads
|
|
13
|
+
maxPromptLength: 50, // chars for filename
|
|
14
|
+
createZips: true,
|
|
15
|
+
enhancedMetadata: true
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
class ReplicateDownloader {
|
|
19
|
+
constructor(apiToken) {
|
|
20
|
+
this.apiToken = apiToken;
|
|
21
|
+
this.baseUrl = 'https://api.replicate.com/v1';
|
|
22
|
+
this.allPredictions = [];
|
|
23
|
+
this.downloadStats = {
|
|
24
|
+
byModel: {},
|
|
25
|
+
totalFiles: 0,
|
|
26
|
+
totalSize: 0
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Make authenticated request to Replicate API
|
|
31
|
+
async makeRequest(url) {
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
const options = {
|
|
34
|
+
headers: {
|
|
35
|
+
'Authorization': `Token ${this.apiToken}`,
|
|
36
|
+
'User-Agent': 'ReplicateDownloader/2.0'
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
https.get(url, options, (res) => {
|
|
41
|
+
let data = '';
|
|
42
|
+
res.on('data', (chunk) => data += chunk);
|
|
43
|
+
res.on('end', () => {
|
|
44
|
+
try {
|
|
45
|
+
resolve(JSON.parse(data));
|
|
46
|
+
} catch (e) {
|
|
47
|
+
reject(new Error(`Failed to parse JSON: ${e.message}`));
|
|
48
|
+
}
|
|
49
|
+
});
|
|
50
|
+
}).on('error', reject);
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Sanitize filename
|
|
55
|
+
sanitizeFilename(str) {
|
|
56
|
+
return str.replace(/[^a-zA-Z0-9\-_\s]/g, '')
|
|
57
|
+
.replace(/\s+/g, '-')
|
|
58
|
+
.toLowerCase()
|
|
59
|
+
.substring(0, CONFIG.maxPromptLength);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Extract model name from version or URL
|
|
63
|
+
extractModelName(prediction) {
|
|
64
|
+
if (prediction.model) {
|
|
65
|
+
return prediction.model.split('/').pop();
|
|
66
|
+
}
|
|
67
|
+
if (prediction.version && prediction.version.model) {
|
|
68
|
+
return prediction.version.model.split('/').pop();
|
|
69
|
+
}
|
|
70
|
+
if (prediction.urls && prediction.urls.get) {
|
|
71
|
+
const urlParts = prediction.urls.get.split('/');
|
|
72
|
+
const modelIndex = urlParts.indexOf('models');
|
|
73
|
+
if (modelIndex !== -1 && urlParts[modelIndex + 1]) {
|
|
74
|
+
return urlParts[modelIndex + 1];
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
return 'unknown-model';
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Extract meaningful prompt/input info
|
|
81
|
+
extractPromptInfo(prediction) {
|
|
82
|
+
const input = prediction.input || {};
|
|
83
|
+
|
|
84
|
+
// Common prompt fields
|
|
85
|
+
const promptFields = ['prompt', 'text', 'description', 'input_text', 'query'];
|
|
86
|
+
|
|
87
|
+
for (const field of promptFields) {
|
|
88
|
+
if (input[field] && typeof input[field] === 'string') {
|
|
89
|
+
return this.sanitizeFilename(input[field]);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Try to find any string input that looks like a prompt
|
|
94
|
+
for (const [key, value] of Object.entries(input)) {
|
|
95
|
+
if (typeof value === 'string' && value.length > 5 && value.length < 200) {
|
|
96
|
+
return this.sanitizeFilename(value);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return 'no-prompt';
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Generate enhanced filename
|
|
104
|
+
generateFilename(prediction, fileIndex, extension) {
|
|
105
|
+
const date = new Date(prediction.created_at).toISOString().split('T')[0];
|
|
106
|
+
const modelName = this.extractModelName(prediction);
|
|
107
|
+
const promptInfo = this.extractPromptInfo(prediction);
|
|
108
|
+
const predictionId = prediction.id.substring(0, 8); // Short ID
|
|
109
|
+
|
|
110
|
+
const suffix = fileIndex > 0 ? `_${fileIndex + 1}` : '';
|
|
111
|
+
return `${date}_${modelName}_${promptInfo}_${predictionId}${suffix}.${extension}`;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Download a file from URL
|
|
115
|
+
async downloadFile(url, filepath) {
|
|
116
|
+
return new Promise((resolve, reject) => {
|
|
117
|
+
const file = fs.createWriteStream(filepath);
|
|
118
|
+
https.get(url, (response) => {
|
|
119
|
+
const contentLength = parseInt(response.headers['content-length'] || '0');
|
|
120
|
+
|
|
121
|
+
response.pipe(file);
|
|
122
|
+
file.on('finish', () => {
|
|
123
|
+
file.close();
|
|
124
|
+
resolve({
|
|
125
|
+
filepath,
|
|
126
|
+
size: contentLength
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
}).on('error', (err) => {
|
|
130
|
+
fs.unlink(filepath, () => {}); // Delete incomplete file
|
|
131
|
+
reject(err);
|
|
132
|
+
});
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Fetch all predictions with pagination
|
|
137
|
+
async fetchAllPredictions() {
|
|
138
|
+
let nextUrl = `${this.baseUrl}/predictions`;
|
|
139
|
+
let page = 1;
|
|
140
|
+
|
|
141
|
+
console.log('Starting to fetch predictions...');
|
|
142
|
+
|
|
143
|
+
while (nextUrl) {
|
|
144
|
+
console.log(`Fetching page ${page}...`);
|
|
145
|
+
|
|
146
|
+
try {
|
|
147
|
+
const response = await this.makeRequest(nextUrl);
|
|
148
|
+
|
|
149
|
+
if (response.results) {
|
|
150
|
+
this.allPredictions.push(...response.results);
|
|
151
|
+
console.log(`Found ${response.results.length} predictions on page ${page}`);
|
|
152
|
+
console.log(`Total so far: ${this.allPredictions.length}`);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
nextUrl = response.next;
|
|
156
|
+
page++;
|
|
157
|
+
|
|
158
|
+
// Custom delay
|
|
159
|
+
await new Promise(resolve => setTimeout(resolve, CONFIG.requestDelay));
|
|
160
|
+
|
|
161
|
+
} catch (error) {
|
|
162
|
+
console.error(`Error fetching page ${page}:`, error.message);
|
|
163
|
+
break;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
console.log(`\nTotal predictions fetched: ${this.allPredictions.length}`);
|
|
168
|
+
return this.allPredictions;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Save enhanced metadata
|
|
172
|
+
async saveEnhancedMetadata() {
|
|
173
|
+
const timestamp = new Date().toISOString().split('T')[0];
|
|
174
|
+
const filename = `replicate_metadata_${timestamp}.json`;
|
|
175
|
+
|
|
176
|
+
// Organize metadata by model
|
|
177
|
+
const byModel = {};
|
|
178
|
+
const modelStats = {};
|
|
179
|
+
|
|
180
|
+
this.allPredictions.forEach(prediction => {
|
|
181
|
+
const modelName = this.extractModelName(prediction);
|
|
182
|
+
|
|
183
|
+
if (!byModel[modelName]) {
|
|
184
|
+
byModel[modelName] = [];
|
|
185
|
+
modelStats[modelName] = {
|
|
186
|
+
total: 0,
|
|
187
|
+
successful: 0,
|
|
188
|
+
failed: 0,
|
|
189
|
+
canceled: 0
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
byModel[modelName].push({
|
|
194
|
+
id: prediction.id,
|
|
195
|
+
created_at: prediction.created_at,
|
|
196
|
+
status: prediction.status,
|
|
197
|
+
input: prediction.input,
|
|
198
|
+
prompt_summary: this.extractPromptInfo(prediction),
|
|
199
|
+
has_output: !!prediction.output,
|
|
200
|
+
urls: prediction.urls
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
modelStats[modelName].total++;
|
|
204
|
+
modelStats[modelName][prediction.status]++;
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
const metadata = {
|
|
208
|
+
exported_at: new Date().toISOString(),
|
|
209
|
+
total_predictions: this.allPredictions.length,
|
|
210
|
+
model_stats: modelStats,
|
|
211
|
+
predictions_by_model: byModel,
|
|
212
|
+
raw_predictions: this.allPredictions // Full data for reference
|
|
213
|
+
};
|
|
214
|
+
|
|
215
|
+
fs.writeFileSync(filename, JSON.stringify(metadata, null, 2));
|
|
216
|
+
console.log(`Saved enhanced metadata to: ${filename}`);
|
|
217
|
+
return filename;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// Create ZIP archive for a model
|
|
221
|
+
async createModelZip(modelName, modelDir) {
|
|
222
|
+
return new Promise((resolve, reject) => {
|
|
223
|
+
const zipPath = `${modelDir}.zip`;
|
|
224
|
+
const output = fs.createWriteStream(zipPath);
|
|
225
|
+
const archive = archiver('zip', { zlib: { level: 9 } });
|
|
226
|
+
|
|
227
|
+
output.on('close', () => {
|
|
228
|
+
console.log(` Created ZIP: ${zipPath} (${archive.pointer()} bytes)`);
|
|
229
|
+
resolve(zipPath);
|
|
230
|
+
});
|
|
231
|
+
|
|
232
|
+
archive.on('error', reject);
|
|
233
|
+
archive.pipe(output);
|
|
234
|
+
|
|
235
|
+
// Add all files from the model directory
|
|
236
|
+
archive.directory(modelDir, false);
|
|
237
|
+
archive.finalize();
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// Download and organize outputs by model
|
|
242
|
+
async downloadAndOrganizeOutputs() {
|
|
243
|
+
const timestamp = new Date().toISOString().split('T')[0];
|
|
244
|
+
const baseDir = `replicate_outputs_${timestamp}`;
|
|
245
|
+
|
|
246
|
+
if (!fs.existsSync(baseDir)) {
|
|
247
|
+
fs.mkdirSync(baseDir);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
console.log(`\nDownloading and organizing files in: ${baseDir}/`);
|
|
251
|
+
|
|
252
|
+
let totalDownloaded = 0;
|
|
253
|
+
let totalErrors = 0;
|
|
254
|
+
const modelDirs = new Set();
|
|
255
|
+
|
|
256
|
+
// Process successful predictions only
|
|
257
|
+
const successfulPredictions = this.allPredictions.filter(p =>
|
|
258
|
+
p.status === 'succeeded' && p.output
|
|
259
|
+
);
|
|
260
|
+
|
|
261
|
+
console.log(`Processing ${successfulPredictions.length} successful predictions...`);
|
|
262
|
+
|
|
263
|
+
for (let i = 0; i < successfulPredictions.length; i++) {
|
|
264
|
+
const prediction = successfulPredictions[i];
|
|
265
|
+
const modelName = this.extractModelName(prediction);
|
|
266
|
+
|
|
267
|
+
// Create model directory
|
|
268
|
+
const modelDir = path.join(baseDir, 'by-model', modelName);
|
|
269
|
+
if (!fs.existsSync(modelDir)) {
|
|
270
|
+
fs.mkdirSync(modelDir, { recursive: true });
|
|
271
|
+
}
|
|
272
|
+
modelDirs.add(modelDir);
|
|
273
|
+
|
|
274
|
+
console.log(`[${i + 1}/${successfulPredictions.length}] Processing ${modelName} prediction ${prediction.id.substring(0, 8)}...`);
|
|
275
|
+
|
|
276
|
+
try {
|
|
277
|
+
// Extract output URLs
|
|
278
|
+
let outputs = [];
|
|
279
|
+
|
|
280
|
+
if (Array.isArray(prediction.output)) {
|
|
281
|
+
outputs = prediction.output;
|
|
282
|
+
} else if (typeof prediction.output === 'string' && prediction.output.startsWith('http')) {
|
|
283
|
+
outputs = [prediction.output];
|
|
284
|
+
} else if (prediction.output && typeof prediction.output === 'object') {
|
|
285
|
+
outputs = Object.values(prediction.output).filter(val =>
|
|
286
|
+
typeof val === 'string' && val.startsWith('http')
|
|
287
|
+
);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// Download each output file
|
|
291
|
+
for (let j = 0; j < outputs.length; j++) {
|
|
292
|
+
const url = outputs[j];
|
|
293
|
+
if (url && url.startsWith('http')) {
|
|
294
|
+
try {
|
|
295
|
+
const extension = url.split('.').pop().split('?')[0] || 'bin';
|
|
296
|
+
const filename = this.generateFilename(prediction, j, extension);
|
|
297
|
+
const filepath = path.join(modelDir, filename);
|
|
298
|
+
|
|
299
|
+
const result = await this.downloadFile(url, filepath);
|
|
300
|
+
totalDownloaded++;
|
|
301
|
+
this.downloadStats.totalFiles++;
|
|
302
|
+
this.downloadStats.totalSize += result.size;
|
|
303
|
+
|
|
304
|
+
if (!this.downloadStats.byModel[modelName]) {
|
|
305
|
+
this.downloadStats.byModel[modelName] = { files: 0, size: 0 };
|
|
306
|
+
}
|
|
307
|
+
this.downloadStats.byModel[modelName].files++;
|
|
308
|
+
this.downloadStats.byModel[modelName].size += result.size;
|
|
309
|
+
|
|
310
|
+
console.log(` ā ${filename}`);
|
|
311
|
+
} catch (downloadError) {
|
|
312
|
+
console.error(` ā Failed to download: ${downloadError.message}`);
|
|
313
|
+
totalErrors++;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// Delay between predictions
|
|
319
|
+
await new Promise(resolve => setTimeout(resolve, CONFIG.downloadDelay));
|
|
320
|
+
|
|
321
|
+
} catch (error) {
|
|
322
|
+
console.error(`Error processing prediction ${prediction.id}:`, error.message);
|
|
323
|
+
totalErrors++;
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// Create ZIP files for each model if enabled
|
|
328
|
+
if (CONFIG.createZips) {
|
|
329
|
+
console.log('\nCreating ZIP archives...');
|
|
330
|
+
for (const modelDir of modelDirs) {
|
|
331
|
+
try {
|
|
332
|
+
await this.createModelZip(path.basename(modelDir), modelDir);
|
|
333
|
+
} catch (error) {
|
|
334
|
+
console.error(`Failed to create ZIP for ${modelDir}:`, error.message);
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
return {
|
|
340
|
+
totalDownloaded,
|
|
341
|
+
totalErrors,
|
|
342
|
+
baseDir,
|
|
343
|
+
modelCount: modelDirs.size
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
// Format file size
|
|
348
|
+
formatSize(bytes) {
|
|
349
|
+
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
350
|
+
if (bytes === 0) return '0 B';
|
|
351
|
+
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
|
352
|
+
return Math.round(bytes / Math.pow(1024, i) * 100) / 100 + ' ' + sizes[i];
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
// Main execution function
|
|
356
|
+
async run() {
|
|
357
|
+
try {
|
|
358
|
+
console.log('šµ Enhanced Replicate Predictions Downloader');
|
|
359
|
+
console.log('============================================\n');
|
|
360
|
+
|
|
361
|
+
// Fetch all predictions
|
|
362
|
+
await this.fetchAllPredictions();
|
|
363
|
+
|
|
364
|
+
if (this.allPredictions.length === 0) {
|
|
365
|
+
console.log('No predictions found!');
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// Save enhanced metadata
|
|
370
|
+
const metadataFile = await this.saveEnhancedMetadata();
|
|
371
|
+
|
|
372
|
+
// Download and organize outputs
|
|
373
|
+
const downloadResults = await this.downloadAndOrganizeOutputs();
|
|
374
|
+
|
|
375
|
+
console.log('\nš Download Complete!');
|
|
376
|
+
console.log('====================');
|
|
377
|
+
console.log(`š Files organized in: ${downloadResults.baseDir}/`);
|
|
378
|
+
console.log(`š Metadata saved to: ${metadataFile}`);
|
|
379
|
+
console.log(`š¦ Models processed: ${downloadResults.modelCount}`);
|
|
380
|
+
console.log(`ā
Files downloaded: ${downloadResults.totalDownloaded}`);
|
|
381
|
+
console.log(`ā Errors: ${downloadResults.totalErrors}`);
|
|
382
|
+
console.log(`š¾ Total size: ${this.formatSize(this.downloadStats.totalSize)}`);
|
|
383
|
+
|
|
384
|
+
// Show breakdown by model
|
|
385
|
+
console.log('\nš Breakdown by Model:');
|
|
386
|
+
Object.entries(this.downloadStats.byModel)
|
|
387
|
+
.sort(([,a], [,b]) => b.files - a.files)
|
|
388
|
+
.forEach(([model, stats]) => {
|
|
389
|
+
console.log(` ${model}: ${stats.files} files (${this.formatSize(stats.size)})`);
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
// Show prediction stats
|
|
393
|
+
const successful = this.allPredictions.filter(p => p.status === 'succeeded').length;
|
|
394
|
+
const failed = this.allPredictions.filter(p => p.status === 'failed').length;
|
|
395
|
+
const canceled = this.allPredictions.filter(p => p.status === 'canceled').length;
|
|
396
|
+
|
|
397
|
+
console.log('\nš Prediction Stats:');
|
|
398
|
+
console.log(` ā
Successful: ${successful}`);
|
|
399
|
+
console.log(` ā Failed: ${failed}`);
|
|
400
|
+
console.log(` ā¹ļø Canceled: ${canceled}`);
|
|
401
|
+
|
|
402
|
+
} catch (error) {
|
|
403
|
+
console.error('š„ Fatal error:', error.message);
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
// Usage
|
|
409
|
+
async function main() {
|
|
410
|
+
if (!REPLICATE_API_TOKEN) {
|
|
411
|
+
console.error('ā Please set your Replicate API token in the REPLICATE_API_TOKEN environment variable!');
|
|
412
|
+
console.log('š Find your token at: https://replicate.com/account/api-tokens');
|
|
413
|
+
console.log('\nTo set the token, you can:');
|
|
414
|
+
console.log('1. Export it in your terminal:');
|
|
415
|
+
console.log(' export REPLICATE_API_TOKEN=your_token_here');
|
|
416
|
+
console.log('\n2. Or create a .env file with:');
|
|
417
|
+
console.log(' REPLICATE_API_TOKEN=your_token_here');
|
|
418
|
+
return;
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
const downloader = new ReplicateDownloader(REPLICATE_API_TOKEN);
|
|
422
|
+
await downloader.run();
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
// Run if this is the main module
|
|
426
|
+
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
427
|
+
main().catch(console.error);
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
export default ReplicateDownloader;
|
package/package.json
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "replicate-predictions-downloader",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "A Node.js script to download and organize all your Replicate predictions, including images, metadata, and other outputs",
|
|
5
|
+
"main": "index.js",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"start": "node index.js",
|
|
9
|
+
"download": "node index.js",
|
|
10
|
+
"example": "node example.js"
|
|
11
|
+
},
|
|
12
|
+
"keywords": [
|
|
13
|
+
"replicate",
|
|
14
|
+
"ai",
|
|
15
|
+
"machine-learning",
|
|
16
|
+
"downloader",
|
|
17
|
+
"predictions",
|
|
18
|
+
"organization",
|
|
19
|
+
"replicate-api",
|
|
20
|
+
"model-outputs"
|
|
21
|
+
],
|
|
22
|
+
"author": "closestfriend",
|
|
23
|
+
"license": "MIT",
|
|
24
|
+
"repository": {
|
|
25
|
+
"type": "git",
|
|
26
|
+
"url": "git+https://github.com/closestfriend/replicate-predictions-downloader.git"
|
|
27
|
+
},
|
|
28
|
+
"bugs": {
|
|
29
|
+
"url": "https://github.com/closestfriend/replicate-predictions-downloader/issues"
|
|
30
|
+
},
|
|
31
|
+
"homepage": "https://github.com/closestfriend/replicate-predictions-downloader#readme",
|
|
32
|
+
"engines": {
|
|
33
|
+
"node": ">=18.0.0"
|
|
34
|
+
},
|
|
35
|
+
"dependencies": {
|
|
36
|
+
"archiver": "^5.3.1",
|
|
37
|
+
"dotenv": "^16.3.1"
|
|
38
|
+
},
|
|
39
|
+
"files": [
|
|
40
|
+
"index.js",
|
|
41
|
+
"example.js",
|
|
42
|
+
"README.md",
|
|
43
|
+
"LICENSE.md"
|
|
44
|
+
]
|
|
45
|
+
}
|