@xiboplayer/cache 0.2.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -90
- package/package.json +2 -2
- package/src/cache-proxy.js +8 -3
- package/src/cache.test.js +12 -2
- package/src/download-manager.js +767 -301
- package/src/download-manager.test.js +1130 -325
- package/src/index.js +3 -1
package/src/download-manager.js
CHANGED
|
@@ -1,343 +1,811 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* DownloadManager -
|
|
2
|
+
* DownloadManager - Flat queue download orchestration
|
|
3
3
|
*
|
|
4
4
|
* Works in both browser and Service Worker contexts.
|
|
5
5
|
* Handles download queue, concurrency control, parallel chunks, and MD5 verification.
|
|
6
6
|
*
|
|
7
|
-
* Architecture:
|
|
8
|
-
* -
|
|
9
|
-
* -
|
|
10
|
-
* -
|
|
7
|
+
* Architecture (flat queue):
|
|
8
|
+
* - DownloadTask: Single HTTP fetch unit (one GET request — full file or one chunk)
|
|
9
|
+
* - FileDownload: Orchestrator that creates DownloadTasks for a file (HEAD + chunks)
|
|
10
|
+
* - DownloadQueue: Flat queue where every download unit competes equally for connection slots
|
|
11
|
+
* - DownloadManager: Public facade
|
|
12
|
+
*
|
|
13
|
+
* BEFORE: Queue[File, File, File] → each File internally spawns N chunk fetches
|
|
14
|
+
* AFTER: Queue[chunk, chunk, file, chunk, chunk, file, chunk] → flat, 1 fetch per slot
|
|
15
|
+
*
|
|
16
|
+
* This eliminates the two-layer concurrency problem where N files × M chunks per file
|
|
17
|
+
* could exceed Chromium's 6-per-host connection limit, causing head-of-line blocking.
|
|
18
|
+
*
|
|
19
|
+
* Per-file chunk limit (maxChunksPerFile) prevents one large file from hogging all
|
|
20
|
+
* connection slots, ensuring bandwidth is shared fairly and chunk 0 arrives fast.
|
|
11
21
|
*
|
|
12
22
|
* Usage:
|
|
13
|
-
* const dm = new DownloadManager({ concurrency:
|
|
14
|
-
* const
|
|
15
|
-
* const blob = await
|
|
23
|
+
* const dm = new DownloadManager({ concurrency: 6, chunkSize: 50MB, chunksPerFile: 2 });
|
|
24
|
+
* const file = dm.enqueue({ id, type, path, md5 });
|
|
25
|
+
* const blob = await file.wait();
|
|
16
26
|
*/
|
|
17
27
|
|
|
18
|
-
const DEFAULT_CONCURRENCY =
|
|
28
|
+
const DEFAULT_CONCURRENCY = 6; // Max concurrent HTTP connections (matches Chromium per-host limit)
|
|
19
29
|
const DEFAULT_CHUNK_SIZE = 50 * 1024 * 1024; // 50MB chunks
|
|
20
|
-
const
|
|
30
|
+
const DEFAULT_MAX_CHUNKS_PER_FILE = 3; // Max parallel chunk downloads per file
|
|
31
|
+
const CHUNK_THRESHOLD = 100 * 1024 * 1024; // Files > 100MB get chunked
|
|
32
|
+
const MAX_RETRIES = 3;
|
|
33
|
+
const RETRY_DELAY_MS = 500; // Fast: 500ms, 1s, 1.5s → total ~3s
|
|
34
|
+
const URGENT_CONCURRENCY = 2; // Slots when urgent chunk is active (bandwidth focus)
|
|
35
|
+
const FETCH_TIMEOUT_MS = 600_000; // 10 minutes — 100MB chunk at ~2 Mbps
|
|
36
|
+
const HEAD_TIMEOUT_MS = 15_000; // 15 seconds for HEAD requests
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Infer Content-Type from file path extension.
|
|
40
|
+
* Used when we skip HEAD (size already known from RequiredFiles).
|
|
41
|
+
*/
|
|
42
|
+
function inferContentType(fileInfo) {
|
|
43
|
+
const path = fileInfo.path || fileInfo.code || '';
|
|
44
|
+
const ext = path.split('.').pop()?.split('?')[0]?.toLowerCase();
|
|
45
|
+
const types = {
|
|
46
|
+
mp4: 'video/mp4', webm: 'video/webm', mp3: 'audio/mpeg',
|
|
47
|
+
png: 'image/png', jpg: 'image/jpeg', jpeg: 'image/jpeg',
|
|
48
|
+
gif: 'image/gif', svg: 'image/svg+xml', webp: 'image/webp',
|
|
49
|
+
css: 'text/css', js: 'application/javascript',
|
|
50
|
+
ttf: 'font/ttf', otf: 'font/otf', woff: 'font/woff', woff2: 'font/woff2',
|
|
51
|
+
xml: 'application/xml', xlf: 'application/xml',
|
|
52
|
+
};
|
|
53
|
+
return types[ext] || 'application/octet-stream';
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Priority levels — higher number = starts first
|
|
57
|
+
export const PRIORITY = { normal: 0, layout: 1, high: 2, urgent: 3 };
|
|
21
58
|
|
|
22
59
|
/**
|
|
23
|
-
*
|
|
60
|
+
* BARRIER sentinel — hard gate in the download queue.
|
|
61
|
+
*
|
|
62
|
+
* When processQueue() encounters a BARRIER:
|
|
63
|
+
* - If tasks are still in-flight above it → STOP (slots stay empty)
|
|
64
|
+
* - If running === 0 → remove barrier, continue with tasks below
|
|
65
|
+
*
|
|
66
|
+
* Used by LayoutQueueBuilder to separate critical chunks (chunk-0, chunk-last)
|
|
67
|
+
* from remaining bulk chunks. Ensures video playback can start before all
|
|
68
|
+
* chunks finish downloading.
|
|
69
|
+
*/
|
|
70
|
+
export const BARRIER = Symbol('BARRIER');
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Parse the X-Amz-Expires absolute timestamp from a signed URL.
|
|
74
|
+
* Returns the expiry as a Unix timestamp (seconds), or Infinity if not found.
|
|
75
|
+
*/
|
|
76
|
+
function getUrlExpiry(url) {
|
|
77
|
+
try {
|
|
78
|
+
const match = url.match(/X-Amz-Expires=(\d+)/);
|
|
79
|
+
return match ? parseInt(match[1], 10) : Infinity;
|
|
80
|
+
} catch {
|
|
81
|
+
return Infinity;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Check if a signed URL has expired (or will expire within a grace period).
|
|
87
|
+
* @param {string} url - Signed URL with X-Amz-Expires parameter
|
|
88
|
+
* @param {number} graceSeconds - Seconds before actual expiry to consider it expired (default: 30)
|
|
89
|
+
* @returns {boolean}
|
|
90
|
+
*/
|
|
91
|
+
export function isUrlExpired(url, graceSeconds = 30) {
|
|
92
|
+
const expiry = getUrlExpiry(url);
|
|
93
|
+
if (expiry === Infinity) return false;
|
|
94
|
+
return (Date.now() / 1000) >= (expiry - graceSeconds);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* DownloadTask - Single HTTP fetch unit
|
|
99
|
+
*
|
|
100
|
+
* Handles exactly one HTTP request: either a full small file GET or a single Range GET
|
|
101
|
+
* for one chunk of a larger file. Includes retry logic with exponential backoff.
|
|
24
102
|
*/
|
|
25
103
|
export class DownloadTask {
|
|
26
104
|
constructor(fileInfo, options = {}) {
|
|
27
105
|
this.fileInfo = fileInfo;
|
|
28
|
-
this.
|
|
29
|
-
this.
|
|
30
|
-
this.
|
|
31
|
-
this.
|
|
32
|
-
this.
|
|
33
|
-
this.
|
|
34
|
-
|
|
35
|
-
// Set externally before download starts: (chunkIndex, chunkBlob, totalChunks) => Promise
|
|
36
|
-
this.onChunkDownloaded = null;
|
|
106
|
+
this.chunkIndex = options.chunkIndex ?? null;
|
|
107
|
+
this.rangeStart = options.rangeStart ?? null;
|
|
108
|
+
this.rangeEnd = options.rangeEnd ?? null;
|
|
109
|
+
this.state = 'pending';
|
|
110
|
+
this.blob = null;
|
|
111
|
+
this._parentFile = null;
|
|
112
|
+
this._priority = PRIORITY.normal;
|
|
37
113
|
}
|
|
38
114
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
115
|
+
getUrl() {
|
|
116
|
+
const url = this.fileInfo.path;
|
|
117
|
+
if (isUrlExpired(url)) {
|
|
118
|
+
throw new Error(`URL expired for ${this.fileInfo.type}/${this.fileInfo.id} — waiting for fresh URL from next collection cycle`);
|
|
119
|
+
}
|
|
120
|
+
return url;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async start() {
|
|
124
|
+
this.state = 'downloading';
|
|
125
|
+
const headers = {};
|
|
126
|
+
if (this.rangeStart != null) {
|
|
127
|
+
headers['Range'] = `bytes=${this.rangeStart}-${this.rangeEnd}`;
|
|
46
128
|
}
|
|
47
129
|
|
|
48
|
-
|
|
49
|
-
|
|
130
|
+
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
131
|
+
const ac = new AbortController();
|
|
132
|
+
const timer = setTimeout(() => ac.abort(), FETCH_TIMEOUT_MS);
|
|
133
|
+
try {
|
|
134
|
+
const url = this.getUrl();
|
|
135
|
+
const fetchOpts = { signal: ac.signal };
|
|
136
|
+
if (Object.keys(headers).length > 0) fetchOpts.headers = headers;
|
|
137
|
+
const response = await fetch(url, fetchOpts);
|
|
138
|
+
|
|
139
|
+
if (!response.ok && response.status !== 206) {
|
|
140
|
+
throw new Error(`Fetch failed: ${response.status}`);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
this.blob = await response.blob();
|
|
144
|
+
this.state = 'complete';
|
|
145
|
+
return this.blob;
|
|
146
|
+
|
|
147
|
+
} catch (error) {
|
|
148
|
+
const msg = ac.signal.aborted ? `Timeout after ${FETCH_TIMEOUT_MS / 1000}s` : error.message;
|
|
149
|
+
if (attempt < MAX_RETRIES) {
|
|
150
|
+
const delay = RETRY_DELAY_MS * attempt;
|
|
151
|
+
const chunkLabel = this.chunkIndex != null ? ` chunk ${this.chunkIndex}` : '';
|
|
152
|
+
console.warn(`[DownloadTask] ${this.fileInfo.type}/${this.fileInfo.id}${chunkLabel} attempt ${attempt}/${MAX_RETRIES} failed: ${msg}. Retrying in ${delay / 1000}s...`);
|
|
153
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
154
|
+
} else {
|
|
155
|
+
this.state = 'failed';
|
|
156
|
+
throw ac.signal.aborted ? new Error(msg) : error;
|
|
157
|
+
}
|
|
158
|
+
} finally {
|
|
159
|
+
clearTimeout(timer);
|
|
160
|
+
}
|
|
50
161
|
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
51
164
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
165
|
+
/**
|
|
166
|
+
* FileDownload - Orchestrates downloading a single file
|
|
167
|
+
*
|
|
168
|
+
* Does the HEAD request to determine file size, then:
|
|
169
|
+
* - Small file (≤ 100MB): creates 1 DownloadTask for the full file
|
|
170
|
+
* - Large file (> 100MB): creates N DownloadTasks, one per chunk
|
|
171
|
+
*
|
|
172
|
+
* All tasks are enqueued into the flat DownloadQueue where they compete
|
|
173
|
+
* equally for HTTP connection slots with tasks from other files.
|
|
174
|
+
*
|
|
175
|
+
* Provides wait() that resolves when ALL tasks for this file complete.
|
|
176
|
+
* Supports progressive caching via onChunkDownloaded callback.
|
|
177
|
+
*/
|
|
178
|
+
export class FileDownload {
|
|
179
|
+
constructor(fileInfo, options = {}) {
|
|
180
|
+
this.fileInfo = fileInfo;
|
|
181
|
+
this.options = options;
|
|
182
|
+
this.state = 'pending';
|
|
183
|
+
this.tasks = [];
|
|
184
|
+
this.completedChunks = 0;
|
|
185
|
+
this.totalChunks = 0;
|
|
186
|
+
this.totalBytes = 0;
|
|
187
|
+
this.downloadedBytes = 0;
|
|
188
|
+
this.onChunkDownloaded = null;
|
|
189
|
+
this.skipChunks = fileInfo.skipChunks || new Set();
|
|
190
|
+
this._contentType = 'application/octet-stream';
|
|
191
|
+
this._chunkBlobs = new Map();
|
|
192
|
+
this._runningCount = 0; // Currently running tasks for this file
|
|
193
|
+
this._resolve = null;
|
|
194
|
+
this._reject = null;
|
|
195
|
+
this._promise = new Promise((res, rej) => {
|
|
196
|
+
this._resolve = res;
|
|
197
|
+
this._reject = rej;
|
|
55
198
|
});
|
|
199
|
+
this._promise.catch(() => {});
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
getUrl() {
|
|
203
|
+
const url = this.fileInfo.path;
|
|
204
|
+
if (isUrlExpired(url)) {
|
|
205
|
+
throw new Error(`URL expired for ${this.fileInfo.type}/${this.fileInfo.id} — waiting for fresh URL from next collection cycle`);
|
|
206
|
+
}
|
|
207
|
+
return url;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
wait() {
|
|
211
|
+
return this._promise;
|
|
56
212
|
}
|
|
57
213
|
|
|
58
214
|
/**
|
|
59
|
-
*
|
|
215
|
+
* Determine file size and create DownloadTasks.
|
|
216
|
+
* Uses RequiredFiles size when available (instant, no network).
|
|
217
|
+
* Falls back to HEAD request only when size is unknown.
|
|
60
218
|
*/
|
|
61
|
-
async
|
|
62
|
-
const { id, type, path, md5 } = this.fileInfo;
|
|
63
|
-
|
|
219
|
+
async prepare(queue) {
|
|
64
220
|
try {
|
|
65
|
-
this.state = '
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
221
|
+
this.state = 'preparing';
|
|
222
|
+
const { id, type, size } = this.fileInfo;
|
|
223
|
+
console.log('[FileDownload] Starting:', `${type}/${id}`);
|
|
224
|
+
|
|
225
|
+
// Use declared size from RequiredFiles — no HEAD needed for queue building
|
|
226
|
+
this.totalBytes = (size && size > 0) ? parseInt(size) : 0;
|
|
227
|
+
this._contentType = inferContentType(this.fileInfo);
|
|
228
|
+
|
|
229
|
+
if (this.totalBytes === 0) {
|
|
230
|
+
// No size declared — HEAD fallback (rare: only for files without CMS size)
|
|
231
|
+
const url = this.getUrl();
|
|
232
|
+
const ac = new AbortController();
|
|
233
|
+
const timer = setTimeout(() => ac.abort(), HEAD_TIMEOUT_MS);
|
|
234
|
+
try {
|
|
235
|
+
const head = await fetch(url, { method: 'HEAD', signal: ac.signal });
|
|
236
|
+
if (head.ok) {
|
|
237
|
+
this.totalBytes = parseInt(head.headers.get('Content-Length') || '0');
|
|
238
|
+
this._contentType = head.headers.get('Content-Type') || this._contentType;
|
|
239
|
+
}
|
|
240
|
+
} finally {
|
|
241
|
+
clearTimeout(timer);
|
|
242
|
+
}
|
|
72
243
|
}
|
|
73
244
|
|
|
74
|
-
this.totalBytes
|
|
75
|
-
const contentType = headResponse.headers.get('Content-Type') || 'application/octet-stream';
|
|
245
|
+
console.log('[FileDownload] File size:', (this.totalBytes / 1024 / 1024).toFixed(1), 'MB');
|
|
76
246
|
|
|
77
|
-
console.log('[DownloadTask] File size:', (this.totalBytes / 1024 / 1024).toFixed(1), 'MB');
|
|
78
|
-
|
|
79
|
-
// Download in chunks if large file
|
|
80
|
-
let blob;
|
|
81
247
|
const chunkSize = this.options.chunkSize || DEFAULT_CHUNK_SIZE;
|
|
82
|
-
const chunksPerFile = this.options.chunksPerFile || DEFAULT_CHUNKS_PER_FILE;
|
|
83
248
|
|
|
84
|
-
if (this.totalBytes >
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
249
|
+
if (this.totalBytes > CHUNK_THRESHOLD) {
|
|
250
|
+
const ranges = [];
|
|
251
|
+
for (let start = 0; start < this.totalBytes; start += chunkSize) {
|
|
252
|
+
ranges.push({
|
|
253
|
+
start,
|
|
254
|
+
end: Math.min(start + chunkSize - 1, this.totalBytes - 1),
|
|
255
|
+
index: ranges.length
|
|
256
|
+
});
|
|
257
|
+
}
|
|
258
|
+
this.totalChunks = ranges.length;
|
|
89
259
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
// Continue anyway (kiosk mode)
|
|
260
|
+
const needed = ranges.filter(r => !this.skipChunks.has(r.index));
|
|
261
|
+
const skippedCount = ranges.length - needed.length;
|
|
262
|
+
|
|
263
|
+
for (const r of ranges) {
|
|
264
|
+
if (this.skipChunks.has(r.index)) {
|
|
265
|
+
this.downloadedBytes += (r.end - r.start + 1);
|
|
266
|
+
}
|
|
98
267
|
}
|
|
99
|
-
}
|
|
100
268
|
|
|
101
|
-
|
|
269
|
+
if (needed.length === 0) {
|
|
270
|
+
console.log('[FileDownload] All chunks already cached, nothing to download');
|
|
271
|
+
this.state = 'complete';
|
|
272
|
+
this._resolve(new Blob([], { type: this._contentType }));
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
102
275
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
276
|
+
if (skippedCount > 0) {
|
|
277
|
+
console.log(`[FileDownload] Resuming: ${skippedCount} chunks cached, ${needed.length} to download`);
|
|
278
|
+
}
|
|
106
279
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
280
|
+
const isResume = skippedCount > 0;
|
|
281
|
+
|
|
282
|
+
if (isResume) {
|
|
283
|
+
const sorted = needed.sort((a, b) => a.index - b.index);
|
|
284
|
+
for (const r of sorted) {
|
|
285
|
+
const task = new DownloadTask(this.fileInfo, {
|
|
286
|
+
chunkIndex: r.index, rangeStart: r.start, rangeEnd: r.end
|
|
287
|
+
});
|
|
288
|
+
task._parentFile = this;
|
|
289
|
+
task._priority = PRIORITY.normal;
|
|
290
|
+
this.tasks.push(task);
|
|
291
|
+
}
|
|
292
|
+
} else {
|
|
293
|
+
for (const r of needed) {
|
|
294
|
+
const task = new DownloadTask(this.fileInfo, {
|
|
295
|
+
chunkIndex: r.index, rangeStart: r.start, rangeEnd: r.end
|
|
296
|
+
});
|
|
297
|
+
task._parentFile = this;
|
|
298
|
+
task._priority = (r.index === 0 || r.index === ranges.length - 1) ? PRIORITY.high : PRIORITY.normal;
|
|
299
|
+
this.tasks.push(task);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
const highCount = this.tasks.filter(t => t._priority >= PRIORITY.high).length;
|
|
304
|
+
console.log(`[FileDownload] ${type}/${id}: ${this.tasks.length} chunks` +
|
|
305
|
+
(highCount > 0 ? ` (${highCount} priority)` : '') +
|
|
306
|
+
(isResume ? ' (resume)' : ''));
|
|
307
|
+
|
|
308
|
+
} else {
|
|
309
|
+
this.totalChunks = 1;
|
|
310
|
+
const task = new DownloadTask(this.fileInfo, {});
|
|
311
|
+
task._parentFile = this;
|
|
312
|
+
this.tasks.push(task);
|
|
111
313
|
}
|
|
112
|
-
this.waiters = [];
|
|
113
314
|
|
|
114
|
-
|
|
315
|
+
queue.enqueueChunkTasks(this.tasks);
|
|
316
|
+
this.state = 'downloading';
|
|
115
317
|
|
|
116
318
|
} catch (error) {
|
|
117
|
-
console.error('[
|
|
319
|
+
console.error('[FileDownload] Prepare failed:', `${this.fileInfo.type}/${this.fileInfo.id}`, error);
|
|
118
320
|
this.state = 'failed';
|
|
119
|
-
|
|
120
|
-
// Reject all waiters
|
|
121
|
-
this.promise = Promise.reject(error);
|
|
122
|
-
this.promise.catch(() => {}); // Prevent unhandled rejection if nobody calls wait()
|
|
123
|
-
for (const waiter of this.waiters) {
|
|
124
|
-
waiter.reject(error);
|
|
125
|
-
}
|
|
126
|
-
this.waiters = [];
|
|
127
|
-
|
|
128
|
-
throw error;
|
|
321
|
+
this._reject(error);
|
|
129
322
|
}
|
|
130
323
|
}
|
|
131
324
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
async downloadFull(url) {
|
|
136
|
-
const response = await fetch(url);
|
|
137
|
-
if (!response.ok) {
|
|
138
|
-
throw new Error(`Download failed: ${response.status}`);
|
|
139
|
-
}
|
|
325
|
+
async onTaskComplete(task) {
|
|
326
|
+
this.completedChunks++;
|
|
327
|
+
this.downloadedBytes += task.blob.size;
|
|
140
328
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
}
|
|
329
|
+
if (task.chunkIndex != null) {
|
|
330
|
+
this._chunkBlobs.set(task.chunkIndex, task.blob);
|
|
331
|
+
}
|
|
145
332
|
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
* If onChunkDownloaded callback is set, fires it for each chunk as it arrives
|
|
149
|
-
* so the caller can cache chunks progressively (enabling streaming before
|
|
150
|
-
* the entire file is downloaded).
|
|
151
|
-
*/
|
|
152
|
-
async downloadChunks(url, contentType, chunkSize, concurrentChunks) {
|
|
153
|
-
// Calculate chunk ranges
|
|
154
|
-
const chunkRanges = [];
|
|
155
|
-
for (let start = 0; start < this.totalBytes; start += chunkSize) {
|
|
156
|
-
const end = Math.min(start + chunkSize - 1, this.totalBytes - 1);
|
|
157
|
-
chunkRanges.push({ start, end, index: chunkRanges.length });
|
|
333
|
+
if (this.options.onProgress) {
|
|
334
|
+
this.options.onProgress(this.downloadedBytes, this.totalBytes);
|
|
158
335
|
}
|
|
159
336
|
|
|
160
|
-
//
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
337
|
+
// Fire progressive chunk callback
|
|
338
|
+
if (this.onChunkDownloaded && task.chunkIndex != null) {
|
|
339
|
+
try {
|
|
340
|
+
await this.onChunkDownloaded(task.chunkIndex, task.blob, this.totalChunks);
|
|
341
|
+
} catch (e) {
|
|
342
|
+
console.warn('[FileDownload] onChunkDownloaded callback error:', e);
|
|
343
|
+
}
|
|
165
344
|
}
|
|
166
345
|
|
|
167
|
-
|
|
168
|
-
|
|
346
|
+
if (this.completedChunks === this.tasks.length && this.state !== 'complete') {
|
|
347
|
+
this.state = 'complete';
|
|
348
|
+
const { type, id } = this.fileInfo;
|
|
349
|
+
|
|
350
|
+
if (task.chunkIndex == null) {
|
|
351
|
+
console.log('[FileDownload] Complete:', `${type}/${id}`, `(${task.blob.size} bytes)`);
|
|
352
|
+
this._resolve(task.blob);
|
|
353
|
+
} else if (this.onChunkDownloaded) {
|
|
354
|
+
console.log('[FileDownload] Complete:', `${type}/${id}`, `(progressive, ${this.totalChunks} chunks)`);
|
|
355
|
+
this._resolve(new Blob([], { type: this._contentType }));
|
|
356
|
+
} else {
|
|
357
|
+
const ordered = [];
|
|
358
|
+
for (let i = 0; i < this.totalChunks; i++) {
|
|
359
|
+
const blob = this._chunkBlobs.get(i);
|
|
360
|
+
if (blob) ordered.push(blob);
|
|
361
|
+
}
|
|
362
|
+
const assembled = new Blob(ordered, { type: this._contentType });
|
|
363
|
+
console.log('[FileDownload] Complete:', `${type}/${id}`, `(${assembled.size} bytes, reassembled)`);
|
|
364
|
+
this._resolve(assembled);
|
|
365
|
+
}
|
|
169
366
|
|
|
170
|
-
|
|
367
|
+
this._chunkBlobs.clear();
|
|
368
|
+
}
|
|
369
|
+
}
|
|
171
370
|
|
|
172
|
-
|
|
371
|
+
onTaskFailed(task, error) {
|
|
372
|
+
if (this.state === 'complete' || this.state === 'failed') return;
|
|
373
|
+
|
|
374
|
+
// URL expiration is transient — drop this task, don't fail the file.
|
|
375
|
+
// Already-downloaded chunks are safe in cache. Next collection cycle
|
|
376
|
+
// provides fresh URLs and the resume logic (skipChunks) fills the gaps.
|
|
377
|
+
if (error.message?.includes('URL expired')) {
|
|
378
|
+
const chunkLabel = task.chunkIndex != null ? ` chunk ${task.chunkIndex}` : '';
|
|
379
|
+
console.warn(`[FileDownload] URL expired, dropping${chunkLabel}:`, `${this.fileInfo.type}/${this.fileInfo.id}`);
|
|
380
|
+
this.tasks = this.tasks.filter(t => t !== task);
|
|
381
|
+
// If all remaining tasks completed, resolve as partial
|
|
382
|
+
if (this.tasks.length === 0 || this.completedChunks >= this.tasks.length) {
|
|
383
|
+
this.state = 'complete';
|
|
384
|
+
this._urlExpired = true;
|
|
385
|
+
this._resolve(new Blob([], { type: this._contentType }));
|
|
386
|
+
}
|
|
387
|
+
return;
|
|
388
|
+
}
|
|
173
389
|
|
|
174
|
-
|
|
175
|
-
|
|
390
|
+
console.error('[FileDownload] Failed:', `${this.fileInfo.type}/${this.fileInfo.id}`, error);
|
|
391
|
+
this.state = 'failed';
|
|
392
|
+
this._reject(error);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
176
395
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
396
|
+
/**
|
|
397
|
+
* LayoutTaskBuilder — Smart builder that produces a sorted, barrier-embedded
|
|
398
|
+
* task list for a single layout.
|
|
399
|
+
*
|
|
400
|
+
* Usage:
|
|
401
|
+
* const builder = new LayoutTaskBuilder(queue);
|
|
402
|
+
* builder.addFile(fileInfo);
|
|
403
|
+
* const orderedTasks = await builder.build();
|
|
404
|
+
* queue.enqueueOrderedTasks(orderedTasks);
|
|
405
|
+
*
|
|
406
|
+
* The builder runs HEAD requests (throttled), collects the resulting
|
|
407
|
+
* DownloadTasks, sorts them optimally, and embeds BARRIERs between
|
|
408
|
+
* critical chunks (chunk-0, chunk-last) and bulk chunks.
|
|
409
|
+
*
|
|
410
|
+
* Duck-typing: implements enqueueChunkTasks() so FileDownload.prepare()
|
|
411
|
+
* works unchanged — it just collects tasks instead of processing them.
|
|
412
|
+
*/
|
|
413
|
+
export class LayoutTaskBuilder {
|
|
414
|
+
constructor(queue) {
|
|
415
|
+
this.queue = queue; // Main DownloadQueue (for dedup via active map)
|
|
416
|
+
this._filesToPrepare = []; // FileDownloads needing HEAD requests
|
|
417
|
+
this._tasks = []; // Collected DownloadTasks (from prepare callbacks)
|
|
418
|
+
this._maxPreparing = 2; // HEAD request throttle
|
|
419
|
+
}
|
|
181
420
|
|
|
182
|
-
|
|
183
|
-
|
|
421
|
+
/**
|
|
422
|
+
* Register a file. Uses queue.active for dedup/URL refresh.
|
|
423
|
+
* Does NOT trigger prepare — that happens in build().
|
|
424
|
+
*/
|
|
425
|
+
addFile(fileInfo) {
|
|
426
|
+
const key = DownloadQueue.stableKey(fileInfo);
|
|
427
|
+
|
|
428
|
+
if (this.queue.active.has(key)) {
|
|
429
|
+
const existing = this.queue.active.get(key);
|
|
430
|
+
// URL refresh (same logic as queue.enqueue)
|
|
431
|
+
if (fileInfo.path && fileInfo.path !== existing.fileInfo.path) {
|
|
432
|
+
const oldExpiry = getUrlExpiry(existing.fileInfo.path);
|
|
433
|
+
const newExpiry = getUrlExpiry(fileInfo.path);
|
|
434
|
+
if (newExpiry > oldExpiry) {
|
|
435
|
+
existing.fileInfo.path = fileInfo.path;
|
|
184
436
|
}
|
|
437
|
+
}
|
|
438
|
+
return existing;
|
|
439
|
+
}
|
|
185
440
|
|
|
186
|
-
|
|
187
|
-
|
|
441
|
+
const file = new FileDownload(fileInfo, {
|
|
442
|
+
chunkSize: this.queue.chunkSize,
|
|
443
|
+
calculateMD5: this.queue.calculateMD5,
|
|
444
|
+
onProgress: this.queue.onProgress
|
|
445
|
+
});
|
|
188
446
|
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
447
|
+
this.queue.active.set(key, file);
|
|
448
|
+
this._filesToPrepare.push(file);
|
|
449
|
+
return file;
|
|
450
|
+
}
|
|
192
451
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
}
|
|
452
|
+
/**
|
|
453
|
+
* Duck-type interface for FileDownload.prepare().
|
|
454
|
+
* Collects tasks instead of processing them.
|
|
455
|
+
*/
|
|
456
|
+
enqueueChunkTasks(tasks) {
|
|
457
|
+
this._tasks.push(...tasks);
|
|
458
|
+
}
|
|
201
459
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
460
|
+
/**
|
|
461
|
+
* Run all HEAD requests (throttled) and return sorted tasks with barriers.
|
|
462
|
+
*/
|
|
463
|
+
async build() {
|
|
464
|
+
await this._prepareAll();
|
|
465
|
+
return this._sortWithBarriers();
|
|
466
|
+
}
|
|
206
467
|
|
|
207
|
-
|
|
468
|
+
async _prepareAll() {
|
|
469
|
+
await new Promise((resolve) => {
|
|
470
|
+
let running = 0;
|
|
471
|
+
let idx = 0;
|
|
472
|
+
const next = () => {
|
|
473
|
+
while (running < this._maxPreparing && idx < this._filesToPrepare.length) {
|
|
474
|
+
const file = this._filesToPrepare[idx++];
|
|
475
|
+
running++;
|
|
476
|
+
file.prepare(this).finally(() => {
|
|
477
|
+
running--;
|
|
478
|
+
if (idx >= this._filesToPrepare.length && running === 0) {
|
|
479
|
+
resolve();
|
|
480
|
+
} else {
|
|
481
|
+
next();
|
|
482
|
+
}
|
|
483
|
+
});
|
|
484
|
+
}
|
|
485
|
+
};
|
|
486
|
+
if (this._filesToPrepare.length === 0) resolve();
|
|
487
|
+
else next();
|
|
488
|
+
});
|
|
489
|
+
}
|
|
208
490
|
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
491
|
+
_sortWithBarriers() {
|
|
492
|
+
const nonChunked = [];
|
|
493
|
+
const chunk0s = [];
|
|
494
|
+
const chunkLasts = [];
|
|
495
|
+
const remaining = [];
|
|
496
|
+
|
|
497
|
+
for (const t of this._tasks) {
|
|
498
|
+
if (t.chunkIndex == null) {
|
|
499
|
+
nonChunked.push(t);
|
|
500
|
+
} else if (t.chunkIndex === 0) {
|
|
501
|
+
chunk0s.push(t);
|
|
502
|
+
} else {
|
|
503
|
+
const total = t._parentFile?.totalChunks || 0;
|
|
504
|
+
if (total > 1 && t.chunkIndex === total - 1) {
|
|
505
|
+
chunkLasts.push(t);
|
|
506
|
+
} else {
|
|
507
|
+
remaining.push(t);
|
|
508
|
+
}
|
|
212
509
|
}
|
|
213
|
-
};
|
|
214
|
-
|
|
215
|
-
// Phase 1: chunk 0 + last chunk in parallel (both needed for playback start)
|
|
216
|
-
await Promise.all(priorityChunks.map(range => downloadChunk(range)));
|
|
217
|
-
|
|
218
|
-
// Phase 2: remaining chunks strictly sequential (guarantees gap-free playback)
|
|
219
|
-
for (const range of remainingChunks) {
|
|
220
|
-
await downloadChunk(range);
|
|
221
510
|
}
|
|
222
511
|
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
// Return a lightweight marker — the real data is already in cache
|
|
226
|
-
return new Blob([], { type: contentType });
|
|
227
|
-
}
|
|
512
|
+
nonChunked.sort((a, b) => (a._parentFile?.totalBytes || 0) - (b._parentFile?.totalBytes || 0));
|
|
513
|
+
remaining.sort((a, b) => a.chunkIndex - b.chunkIndex);
|
|
228
514
|
|
|
229
|
-
//
|
|
230
|
-
const
|
|
231
|
-
|
|
232
|
-
|
|
515
|
+
// Build: small files + critical chunks → BARRIER → bulk chunks
|
|
516
|
+
const result = [...nonChunked, ...chunk0s, ...chunkLasts];
|
|
517
|
+
if (remaining.length > 0) {
|
|
518
|
+
result.push(BARRIER, ...remaining);
|
|
233
519
|
}
|
|
234
|
-
|
|
235
|
-
return new Blob(orderedChunks, { type: contentType });
|
|
520
|
+
return result;
|
|
236
521
|
}
|
|
237
522
|
}
|
|
238
523
|
|
|
239
524
|
/**
|
|
240
|
-
* DownloadQueue -
|
|
525
|
+
* DownloadQueue - Flat queue with per-file and global concurrency limits
|
|
526
|
+
*
|
|
527
|
+
* Global concurrency limit (e.g., 6) controls total HTTP connections.
|
|
528
|
+
* Per-file chunk limit (e.g., 2) prevents one large file from hogging all
|
|
529
|
+
* connections, ensuring bandwidth per chunk is high and chunk 0 arrives fast.
|
|
530
|
+
* HEAD requests are throttled to avoid flooding browser connection pool.
|
|
241
531
|
*/
|
|
242
532
|
export class DownloadQueue {
|
|
243
533
|
constructor(options = {}) {
|
|
244
534
|
this.concurrency = options.concurrency || DEFAULT_CONCURRENCY;
|
|
245
535
|
this.chunkSize = options.chunkSize || DEFAULT_CHUNK_SIZE;
|
|
246
|
-
this.
|
|
247
|
-
this.calculateMD5 = options.calculateMD5;
|
|
248
|
-
this.onProgress = options.onProgress;
|
|
536
|
+
this.maxChunksPerFile = options.chunksPerFile || DEFAULT_MAX_CHUNKS_PER_FILE;
|
|
537
|
+
this.calculateMD5 = options.calculateMD5;
|
|
538
|
+
this.onProgress = options.onProgress;
|
|
249
539
|
|
|
250
|
-
this.queue = [];
|
|
251
|
-
this.active = new Map();
|
|
540
|
+
this.queue = []; // DownloadTask[] — flat queue of chunk/file tasks
|
|
541
|
+
this.active = new Map(); // stableKey → FileDownload
|
|
542
|
+
this._activeTasks = []; // DownloadTask[] — currently in-flight tasks
|
|
252
543
|
this.running = 0;
|
|
544
|
+
|
|
545
|
+
// HEAD request throttling: prevents prepare() from flooding browser connections
|
|
546
|
+
this._prepareQueue = [];
|
|
547
|
+
this._preparingCount = 0;
|
|
548
|
+
this._maxPreparing = 2; // Max concurrent HEAD requests
|
|
549
|
+
|
|
550
|
+
// When paused, processQueue() is a no-op (used during barrier setup)
|
|
551
|
+
this.paused = false;
|
|
253
552
|
}
|
|
254
553
|
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
*/
|
|
259
|
-
enqueue(fileInfo) {
|
|
260
|
-
const { path } = fileInfo;
|
|
554
|
+
static stableKey(fileInfo) {
|
|
555
|
+
return `${fileInfo.type}/${fileInfo.id}`;
|
|
556
|
+
}
|
|
261
557
|
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
558
|
+
enqueue(fileInfo) {
|
|
559
|
+
const key = DownloadQueue.stableKey(fileInfo);
|
|
560
|
+
|
|
561
|
+
if (this.active.has(key)) {
|
|
562
|
+
const existing = this.active.get(key);
|
|
563
|
+
if (fileInfo.path && fileInfo.path !== existing.fileInfo.path) {
|
|
564
|
+
const oldExpiry = getUrlExpiry(existing.fileInfo.path);
|
|
565
|
+
const newExpiry = getUrlExpiry(fileInfo.path);
|
|
566
|
+
if (newExpiry > oldExpiry) {
|
|
567
|
+
console.log('[DownloadQueue] Refreshing URL for', key);
|
|
568
|
+
existing.fileInfo.path = fileInfo.path;
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
return existing;
|
|
266
572
|
}
|
|
267
573
|
|
|
268
|
-
|
|
269
|
-
const task = new DownloadTask(fileInfo, {
|
|
574
|
+
const file = new FileDownload(fileInfo, {
|
|
270
575
|
chunkSize: this.chunkSize,
|
|
271
|
-
chunksPerFile: this.chunksPerFile,
|
|
272
576
|
calculateMD5: this.calculateMD5,
|
|
273
577
|
onProgress: this.onProgress
|
|
274
578
|
});
|
|
275
579
|
|
|
276
|
-
this.active.set(
|
|
277
|
-
|
|
580
|
+
this.active.set(key, file);
|
|
581
|
+
console.log('[DownloadQueue] Enqueued:', key);
|
|
278
582
|
|
|
279
|
-
|
|
583
|
+
// Throttled prepare: HEAD requests are limited to avoid flooding connections
|
|
584
|
+
this._schedulePrepare(file);
|
|
280
585
|
|
|
281
|
-
|
|
586
|
+
return file;
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
/**
|
|
590
|
+
* Schedule a FileDownload's prepare() with throttling.
|
|
591
|
+
* Only N HEAD requests run concurrently to preserve connections for data transfers.
|
|
592
|
+
*/
|
|
593
|
+
_schedulePrepare(file) {
|
|
594
|
+
this._prepareQueue.push(file);
|
|
595
|
+
this._processPrepareQueue();
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
_processPrepareQueue() {
|
|
599
|
+
while (this._preparingCount < this._maxPreparing && this._prepareQueue.length > 0) {
|
|
600
|
+
const file = this._prepareQueue.shift();
|
|
601
|
+
this._preparingCount++;
|
|
602
|
+
file.prepare(this).finally(() => {
|
|
603
|
+
this._preparingCount--;
|
|
604
|
+
this._processPrepareQueue();
|
|
605
|
+
});
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
enqueueChunkTasks(tasks) {
|
|
610
|
+
for (const task of tasks) {
|
|
611
|
+
this.queue.push(task);
|
|
612
|
+
}
|
|
613
|
+
this._sortQueue();
|
|
614
|
+
|
|
615
|
+
console.log(`[DownloadQueue] ${tasks.length} tasks added (${this.queue.length} pending, ${this.running} active)`);
|
|
616
|
+
this.processQueue();
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
/**
|
|
620
|
+
* Enqueue a pre-ordered list of tasks (with optional BARRIER sentinels).
|
|
621
|
+
* Preserves insertion order — no sorting. Position = priority.
|
|
622
|
+
*
|
|
623
|
+
* Used by LayoutQueueBuilder to push the entire download queue in layout
|
|
624
|
+
* playback order with barriers separating critical chunks from bulk.
|
|
625
|
+
*
|
|
626
|
+
* @param {Array<DownloadTask|Symbol>} items - Tasks and BARRIERs in order
|
|
627
|
+
*/
|
|
628
|
+
enqueueOrderedTasks(items) {
|
|
629
|
+
let taskCount = 0;
|
|
630
|
+
let barrierCount = 0;
|
|
631
|
+
for (const item of items) {
|
|
632
|
+
if (item === BARRIER) {
|
|
633
|
+
this.queue.push(BARRIER);
|
|
634
|
+
barrierCount++;
|
|
635
|
+
} else {
|
|
636
|
+
this.queue.push(item);
|
|
637
|
+
taskCount++;
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
console.log(`[DownloadQueue] Ordered queue: ${taskCount} tasks, ${barrierCount} barriers (${this.queue.length} pending, ${this.running} active)`);
|
|
282
642
|
this.processQueue();
|
|
643
|
+
}
|
|
283
644
|
|
|
284
|
-
|
|
645
|
+
/** Sort queue by priority (highest first), stable within same priority. */
|
|
646
|
+
_sortQueue() {
|
|
647
|
+
this.queue.sort((a, b) => b._priority - a._priority);
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
prioritize(fileType, fileId) {
|
|
651
|
+
const key = `${fileType}/${fileId}`;
|
|
652
|
+
const file = this.active.get(key);
|
|
653
|
+
|
|
654
|
+
if (!file) {
|
|
655
|
+
console.log('[DownloadQueue] Not found:', key);
|
|
656
|
+
return false;
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
let boosted = 0;
|
|
660
|
+
for (const t of this.queue) {
|
|
661
|
+
if (t._parentFile === file && t._priority < PRIORITY.high) {
|
|
662
|
+
t._priority = PRIORITY.high;
|
|
663
|
+
boosted++;
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
this._sortQueue();
|
|
667
|
+
|
|
668
|
+
console.log('[DownloadQueue] Prioritized:', key, `(${boosted} tasks boosted)`);
|
|
669
|
+
return true;
|
|
285
670
|
}
|
|
286
671
|
|
|
287
672
|
/**
|
|
288
|
-
*
|
|
289
|
-
*
|
|
290
|
-
* @param {
|
|
673
|
+
* Boost priority for files needed by the current/next layout.
|
|
674
|
+
* @param {Array} fileIds - Media IDs needed by the layout
|
|
675
|
+
* @param {number} priority - Priority level (default: PRIORITY.high)
|
|
291
676
|
*/
|
|
292
|
-
prioritizeLayoutFiles(fileIds) {
|
|
677
|
+
prioritizeLayoutFiles(fileIds, priority = PRIORITY.high) {
|
|
293
678
|
const idSet = new Set(fileIds.map(String));
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
if (idSet.has(String(task.fileInfo.id))) {
|
|
301
|
-
prioritized.push(task);
|
|
302
|
-
} else {
|
|
303
|
-
rest.push(task);
|
|
679
|
+
|
|
680
|
+
let boosted = 0;
|
|
681
|
+
for (const t of this.queue) {
|
|
682
|
+
if (idSet.has(String(t._parentFile?.fileInfo.id)) && t._priority < priority) {
|
|
683
|
+
t._priority = priority;
|
|
684
|
+
boosted++;
|
|
304
685
|
}
|
|
305
686
|
}
|
|
306
|
-
this.
|
|
307
|
-
|
|
687
|
+
for (const t of this._activeTasks) {
|
|
688
|
+
if (idSet.has(String(t._parentFile?.fileInfo.id)) && t._priority < priority) {
|
|
689
|
+
t._priority = priority;
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
this._sortQueue();
|
|
693
|
+
|
|
694
|
+
console.log('[DownloadQueue] Layout files prioritized:', idSet.size, 'files,', boosted, 'tasks boosted to', priority);
|
|
308
695
|
}
|
|
309
696
|
|
|
310
697
|
/**
|
|
311
|
-
*
|
|
312
|
-
*
|
|
698
|
+
* Emergency priority for a specific streaming chunk.
|
|
699
|
+
* Called by the Service Worker when a video is stalled waiting for chunk N.
|
|
700
|
+
* Sets urgent priority → queue re-sorts → processQueue() limits concurrency.
|
|
313
701
|
*/
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
702
|
+
urgentChunk(fileType, fileId, chunkIndex) {
|
|
703
|
+
const key = `${fileType}/${fileId}`;
|
|
704
|
+
const file = this.active.get(key);
|
|
705
|
+
|
|
706
|
+
if (!file) {
|
|
707
|
+
console.log('[DownloadQueue] urgentChunk: file not active:', key, 'chunk', chunkIndex);
|
|
708
|
+
return false;
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
// Already in-flight — nothing to do
|
|
712
|
+
const isActive = this._activeTasks.some(
|
|
713
|
+
t => t._parentFile === file && t.chunkIndex === chunkIndex && t.state === 'downloading'
|
|
714
|
+
);
|
|
715
|
+
if (isActive) {
|
|
716
|
+
// Mark the in-flight task as urgent so processQueue() limits concurrency
|
|
717
|
+
const activeTask = this._activeTasks.find(
|
|
718
|
+
t => t._parentFile === file && t.chunkIndex === chunkIndex
|
|
320
719
|
);
|
|
720
|
+
if (activeTask && activeTask._priority < PRIORITY.urgent) {
|
|
721
|
+
activeTask._priority = PRIORITY.urgent;
|
|
722
|
+
console.log(`[DownloadQueue] URGENT: ${key} chunk ${chunkIndex} (already in-flight, limiting slots)`);
|
|
723
|
+
// Don't call processQueue() — can't stop in-flight tasks, but next
|
|
724
|
+
// processQueue() call (when any task completes) will see hasUrgent
|
|
725
|
+
// and limit new starts to URGENT_CONCURRENCY.
|
|
726
|
+
return true;
|
|
727
|
+
}
|
|
728
|
+
console.log('[DownloadQueue] urgentChunk: already urgent:', key, 'chunk', chunkIndex);
|
|
729
|
+
return false;
|
|
730
|
+
}
|
|
731
|
+
|
|
732
|
+
// Find task in queue (may be past a barrier)
|
|
733
|
+
const idx = this.queue.findIndex(
|
|
734
|
+
t => t !== BARRIER && t._parentFile === file && t.chunkIndex === chunkIndex
|
|
735
|
+
);
|
|
321
736
|
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
737
|
+
if (idx === -1) {
|
|
738
|
+
console.log('[DownloadQueue] urgentChunk: chunk not in queue:', key, 'chunk', chunkIndex);
|
|
739
|
+
return false;
|
|
740
|
+
}
|
|
741
|
+
|
|
742
|
+
const task = this.queue.splice(idx, 1)[0];
|
|
743
|
+
task._priority = PRIORITY.urgent;
|
|
744
|
+
// Move to front of queue (past any barriers)
|
|
745
|
+
this.queue.unshift(task);
|
|
746
|
+
|
|
747
|
+
console.log(`[DownloadQueue] URGENT: ${key} chunk ${chunkIndex} (moved to front)`);
|
|
748
|
+
this.processQueue();
|
|
749
|
+
return true;
|
|
750
|
+
}
|
|
751
|
+
|
|
752
|
+
/**
|
|
753
|
+
* Process queue — barrier-aware loop.
|
|
754
|
+
*
|
|
755
|
+
* Supports two modes:
|
|
756
|
+
* 1. Priority-sorted (legacy): queue sorted by priority, urgent reduces concurrency
|
|
757
|
+
* 2. Barrier-ordered: queue contains BARRIER sentinels that act as hard gates
|
|
758
|
+
*
|
|
759
|
+
* BARRIER behavior:
|
|
760
|
+
* - When processQueue() hits a BARRIER and running > 0 → STOP (slots stay empty)
|
|
761
|
+
* - When running === 0 → remove barrier, continue with tasks below
|
|
762
|
+
* - Tasks are never reordered past a BARRIER (except urgentChunk which bypasses)
|
|
763
|
+
*
|
|
764
|
+
* Urgent mode: when any task has PRIORITY.urgent, concurrency drops to
|
|
765
|
+
* URGENT_CONCURRENCY so the stalled chunk gets maximum bandwidth.
|
|
766
|
+
*/
|
|
767
|
+
processQueue() {
|
|
768
|
+
if (this.paused) return;
|
|
769
|
+
|
|
770
|
+
// Determine effective concurrency and minimum priority to start
|
|
771
|
+
const hasUrgent = this.queue.some(t => t !== BARRIER && t._priority >= PRIORITY.urgent) ||
|
|
772
|
+
this._activeTasks?.some(t => t._priority >= PRIORITY.urgent && t.state === 'downloading');
|
|
773
|
+
const maxSlots = hasUrgent ? URGENT_CONCURRENCY : this.concurrency;
|
|
774
|
+
const minPriority = hasUrgent ? PRIORITY.urgent : 0; // Urgent = only urgent tasks run
|
|
775
|
+
|
|
776
|
+
// Fill slots from front of queue
|
|
777
|
+
while (this.running < maxSlots && this.queue.length > 0) {
|
|
778
|
+
const next = this.queue[0];
|
|
779
|
+
|
|
780
|
+
// Hit a BARRIER — hard gate
|
|
781
|
+
if (next === BARRIER) {
|
|
782
|
+
if (this.running > 0) {
|
|
783
|
+
break; // In-flight tasks still running — slots stay empty
|
|
329
784
|
}
|
|
330
|
-
|
|
785
|
+
// All above-barrier tasks done → raise barrier, continue
|
|
786
|
+
this.queue.shift();
|
|
787
|
+
continue;
|
|
331
788
|
}
|
|
332
|
-
// All held files done — clear hold
|
|
333
|
-
console.log('[DownloadQueue] Layout hold cleared, resuming normal downloads');
|
|
334
|
-
this._layoutHoldIds = null;
|
|
335
|
-
}
|
|
336
789
|
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
790
|
+
// Per-file limit: skip to next eligible task (but don't cross barrier)
|
|
791
|
+
if (next._priority < minPriority || !this._canStartTask(next)) {
|
|
792
|
+
let found = false;
|
|
793
|
+
for (let i = 1; i < this.queue.length; i++) {
|
|
794
|
+
if (this.queue[i] === BARRIER) break; // Don't look past barrier
|
|
795
|
+
const task = this.queue[i];
|
|
796
|
+
if (task._priority >= minPriority && this._canStartTask(task)) {
|
|
797
|
+
this.queue.splice(i, 1);
|
|
798
|
+
this._startTask(task);
|
|
799
|
+
found = true;
|
|
800
|
+
break;
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
if (!found) break;
|
|
804
|
+
continue;
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
this.queue.shift();
|
|
808
|
+
this._startTask(next);
|
|
341
809
|
}
|
|
342
810
|
|
|
343
811
|
if (this.queue.length === 0 && this.running === 0) {
|
|
@@ -346,87 +814,88 @@ export class DownloadQueue {
|
|
|
346
814
|
}
|
|
347
815
|
|
|
348
816
|
/**
|
|
349
|
-
*
|
|
817
|
+
* Per-file concurrency check. Priority sorting decides order,
|
|
818
|
+
* this just prevents one file from hogging all connections.
|
|
350
819
|
*/
|
|
820
|
+
_canStartTask(task) {
|
|
821
|
+
return task._parentFile._runningCount < this.maxChunksPerFile;
|
|
822
|
+
}
|
|
823
|
+
|
|
351
824
|
_startTask(task) {
|
|
352
825
|
this.running++;
|
|
353
|
-
|
|
826
|
+
task._parentFile._runningCount++;
|
|
827
|
+
this._activeTasks.push(task);
|
|
828
|
+
const key = `${task.fileInfo.type}/${task.fileInfo.id}`;
|
|
829
|
+
const chunkLabel = task.chunkIndex != null ? ` chunk ${task.chunkIndex}` : '';
|
|
830
|
+
console.log(`[DownloadQueue] Starting: ${key}${chunkLabel} (${this.running}/${this.concurrency} active)`);
|
|
354
831
|
|
|
355
832
|
task.start()
|
|
356
|
-
.
|
|
357
|
-
.finally(() => {
|
|
833
|
+
.then(() => {
|
|
358
834
|
this.running--;
|
|
359
|
-
|
|
360
|
-
|
|
835
|
+
task._parentFile._runningCount--;
|
|
836
|
+
this._activeTasks = this._activeTasks.filter(t => t !== task);
|
|
837
|
+
console.log(`[DownloadQueue] Fetched: ${key}${chunkLabel} (${this.running} active, ${this.queue.length} pending)`);
|
|
361
838
|
this.processQueue();
|
|
839
|
+
return task._parentFile.onTaskComplete(task);
|
|
840
|
+
})
|
|
841
|
+
.catch(err => {
|
|
842
|
+
this.running--;
|
|
843
|
+
task._parentFile._runningCount--;
|
|
844
|
+
this._activeTasks = this._activeTasks.filter(t => t !== task);
|
|
845
|
+
this.processQueue();
|
|
846
|
+
task._parentFile.onTaskFailed(task, err);
|
|
362
847
|
});
|
|
363
848
|
}
|
|
364
849
|
|
|
365
850
|
/**
|
|
366
|
-
*
|
|
367
|
-
*
|
|
368
|
-
*
|
|
369
|
-
* @returns {boolean} true if file was found (queued or active)
|
|
851
|
+
* Wait for all queued prepare (HEAD) operations to finish.
|
|
852
|
+
* Returns when the prepare queue is drained and all FileDownloads have
|
|
853
|
+
* either created their tasks or failed.
|
|
370
854
|
*/
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
}
|
|
855
|
+
awaitAllPrepared() {
|
|
856
|
+
return new Promise((resolve) => {
|
|
857
|
+
const check = () => {
|
|
858
|
+
if (this._preparingCount === 0 && this._prepareQueue.length === 0) {
|
|
859
|
+
resolve();
|
|
860
|
+
} else {
|
|
861
|
+
setTimeout(check, 50);
|
|
862
|
+
}
|
|
863
|
+
};
|
|
864
|
+
check();
|
|
865
|
+
});
|
|
866
|
+
}
|
|
382
867
|
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
868
|
+
removeCompleted(key) {
|
|
869
|
+
const file = this.active.get(key);
|
|
870
|
+
if (file && (file.state === 'complete' || file.state === 'failed')) {
|
|
871
|
+
this.queue = this.queue.filter(t => t === BARRIER || t._parentFile !== file);
|
|
872
|
+
this.active.delete(key);
|
|
386
873
|
}
|
|
387
|
-
|
|
388
|
-
// Check if already downloading
|
|
389
|
-
for (const [, task] of this.active) {
|
|
390
|
-
if (task.fileInfo.type === fileType && String(task.fileInfo.id) === String(fileId)) {
|
|
391
|
-
console.log('[DownloadQueue] Already downloading:', `${fileType}/${fileId}`);
|
|
392
|
-
return true;
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
console.log('[DownloadQueue] Not found in queue:', `${fileType}/${fileId}`);
|
|
397
|
-
return false;
|
|
398
874
|
}
|
|
399
875
|
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
*/
|
|
403
|
-
getTask(url) {
|
|
404
|
-
return this.active.get(url) || null;
|
|
876
|
+
getTask(key) {
|
|
877
|
+
return this.active.get(key) || null;
|
|
405
878
|
}
|
|
406
879
|
|
|
407
|
-
/**
|
|
408
|
-
* Get progress for all active downloads
|
|
409
|
-
*/
|
|
410
880
|
getProgress() {
|
|
411
881
|
const progress = {};
|
|
412
|
-
for (const [
|
|
413
|
-
progress[
|
|
414
|
-
downloaded:
|
|
415
|
-
total:
|
|
416
|
-
percent:
|
|
417
|
-
state:
|
|
882
|
+
for (const [key, file] of this.active.entries()) {
|
|
883
|
+
progress[key] = {
|
|
884
|
+
downloaded: file.downloadedBytes,
|
|
885
|
+
total: file.totalBytes,
|
|
886
|
+
percent: file.totalBytes > 0 ? (file.downloadedBytes / file.totalBytes * 100).toFixed(1) : 0,
|
|
887
|
+
state: file.state
|
|
418
888
|
};
|
|
419
889
|
}
|
|
420
890
|
return progress;
|
|
421
891
|
}
|
|
422
892
|
|
|
423
|
-
/**
|
|
424
|
-
* Cancel all downloads
|
|
425
|
-
*/
|
|
426
893
|
clear() {
|
|
427
894
|
this.queue = [];
|
|
428
895
|
this.active.clear();
|
|
429
896
|
this.running = 0;
|
|
897
|
+
this._prepareQueue = [];
|
|
898
|
+
this._preparingCount = 0;
|
|
430
899
|
}
|
|
431
900
|
}
|
|
432
901
|
|
|
@@ -438,40 +907,37 @@ export class DownloadManager {
|
|
|
438
907
|
this.queue = new DownloadQueue(options);
|
|
439
908
|
}
|
|
440
909
|
|
|
441
|
-
/**
|
|
442
|
-
* Enqueue file for download
|
|
443
|
-
* @param {Object} fileInfo - { id, type, path, md5 }
|
|
444
|
-
* @returns {DownloadTask}
|
|
445
|
-
*/
|
|
446
910
|
enqueue(fileInfo) {
|
|
447
911
|
return this.queue.enqueue(fileInfo);
|
|
448
912
|
}
|
|
449
913
|
|
|
450
914
|
/**
|
|
451
|
-
*
|
|
915
|
+
* Enqueue a file for layout-grouped downloading.
|
|
916
|
+
* Layout grouping is now handled externally by LayoutTaskBuilder.
|
|
917
|
+
* @param {Object} fileInfo - File info
|
|
918
|
+
* @returns {FileDownload}
|
|
452
919
|
*/
|
|
453
|
-
|
|
454
|
-
return this.queue.
|
|
920
|
+
enqueueForLayout(fileInfo) {
|
|
921
|
+
return this.queue.enqueue(fileInfo);
|
|
922
|
+
}
|
|
923
|
+
|
|
924
|
+
getTask(key) {
|
|
925
|
+
return this.queue.getTask(key);
|
|
455
926
|
}
|
|
456
927
|
|
|
457
|
-
/**
|
|
458
|
-
* Get progress for all downloads
|
|
459
|
-
*/
|
|
460
928
|
getProgress() {
|
|
461
929
|
return this.queue.getProgress();
|
|
462
930
|
}
|
|
463
931
|
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
*/
|
|
467
|
-
prioritizeLayoutFiles(fileIds) {
|
|
468
|
-
this.queue.prioritizeLayoutFiles(fileIds);
|
|
932
|
+
prioritizeLayoutFiles(fileIds, priority) {
|
|
933
|
+
this.queue.prioritizeLayoutFiles(fileIds, priority);
|
|
469
934
|
this.queue.processQueue();
|
|
470
935
|
}
|
|
471
936
|
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
937
|
+
urgentChunk(fileType, fileId, chunkIndex) {
|
|
938
|
+
return this.queue.urgentChunk(fileType, fileId, chunkIndex);
|
|
939
|
+
}
|
|
940
|
+
|
|
475
941
|
clear() {
|
|
476
942
|
this.queue.clear();
|
|
477
943
|
}
|