@xiboplayer/cache 0.1.3 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -0
- package/package.json +2 -2
- package/src/cache-proxy.js +12 -17
- package/src/cache.test.js +12 -2
- package/src/download-manager.js +767 -303
- package/src/download-manager.test.js +1130 -325
- package/src/index.js +3 -1
- package/docs/README.md +0 -118
package/src/download-manager.js
CHANGED
|
@@ -1,345 +1,811 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* DownloadManager -
|
|
2
|
+
* DownloadManager - Flat queue download orchestration
|
|
3
3
|
*
|
|
4
4
|
* Works in both browser and Service Worker contexts.
|
|
5
5
|
* Handles download queue, concurrency control, parallel chunks, and MD5 verification.
|
|
6
6
|
*
|
|
7
|
-
* Architecture:
|
|
8
|
-
* -
|
|
9
|
-
* -
|
|
10
|
-
* -
|
|
7
|
+
* Architecture (flat queue):
|
|
8
|
+
* - DownloadTask: Single HTTP fetch unit (one GET request — full file or one chunk)
|
|
9
|
+
* - FileDownload: Orchestrator that creates DownloadTasks for a file (HEAD + chunks)
|
|
10
|
+
* - DownloadQueue: Flat queue where every download unit competes equally for connection slots
|
|
11
|
+
* - DownloadManager: Public facade
|
|
12
|
+
*
|
|
13
|
+
* BEFORE: Queue[File, File, File] → each File internally spawns N chunk fetches
|
|
14
|
+
* AFTER: Queue[chunk, chunk, file, chunk, chunk, file, chunk] → flat, 1 fetch per slot
|
|
15
|
+
*
|
|
16
|
+
* This eliminates the two-layer concurrency problem where N files × M chunks per file
|
|
17
|
+
* could exceed Chromium's 6-per-host connection limit, causing head-of-line blocking.
|
|
18
|
+
*
|
|
19
|
+
* Per-file chunk limit (maxChunksPerFile) prevents one large file from hogging all
|
|
20
|
+
* connection slots, ensuring bandwidth is shared fairly and chunk 0 arrives fast.
|
|
11
21
|
*
|
|
12
22
|
* Usage:
|
|
13
|
-
* const dm = new DownloadManager({ concurrency:
|
|
14
|
-
* const
|
|
15
|
-
* const blob = await
|
|
23
|
+
* const dm = new DownloadManager({ concurrency: 6, chunkSize: 50MB, chunksPerFile: 2 });
|
|
24
|
+
* const file = dm.enqueue({ id, type, path, md5 });
|
|
25
|
+
* const blob = await file.wait();
|
|
16
26
|
*/
|
|
17
27
|
|
|
18
|
-
const DEFAULT_CONCURRENCY =
|
|
28
|
+
const DEFAULT_CONCURRENCY = 6; // Max concurrent HTTP connections (matches Chromium per-host limit)
|
|
19
29
|
const DEFAULT_CHUNK_SIZE = 50 * 1024 * 1024; // 50MB chunks
|
|
20
|
-
const
|
|
30
|
+
const DEFAULT_MAX_CHUNKS_PER_FILE = 3; // Max parallel chunk downloads per file
|
|
31
|
+
const CHUNK_THRESHOLD = 100 * 1024 * 1024; // Files > 100MB get chunked
|
|
32
|
+
const MAX_RETRIES = 3;
|
|
33
|
+
const RETRY_DELAY_MS = 500; // Fast: 500ms, 1s, 1.5s → total ~3s
|
|
34
|
+
const URGENT_CONCURRENCY = 2; // Slots when urgent chunk is active (bandwidth focus)
|
|
35
|
+
const FETCH_TIMEOUT_MS = 600_000; // 10 minutes — 100MB chunk at ~2 Mbps
|
|
36
|
+
const HEAD_TIMEOUT_MS = 15_000; // 15 seconds for HEAD requests
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Infer Content-Type from file path extension.
|
|
40
|
+
* Used when we skip HEAD (size already known from RequiredFiles).
|
|
41
|
+
*/
|
|
42
|
+
function inferContentType(fileInfo) {
|
|
43
|
+
const path = fileInfo.path || fileInfo.code || '';
|
|
44
|
+
const ext = path.split('.').pop()?.split('?')[0]?.toLowerCase();
|
|
45
|
+
const types = {
|
|
46
|
+
mp4: 'video/mp4', webm: 'video/webm', mp3: 'audio/mpeg',
|
|
47
|
+
png: 'image/png', jpg: 'image/jpeg', jpeg: 'image/jpeg',
|
|
48
|
+
gif: 'image/gif', svg: 'image/svg+xml', webp: 'image/webp',
|
|
49
|
+
css: 'text/css', js: 'application/javascript',
|
|
50
|
+
ttf: 'font/ttf', otf: 'font/otf', woff: 'font/woff', woff2: 'font/woff2',
|
|
51
|
+
xml: 'application/xml', xlf: 'application/xml',
|
|
52
|
+
};
|
|
53
|
+
return types[ext] || 'application/octet-stream';
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Priority levels — higher number = starts first
|
|
57
|
+
export const PRIORITY = { normal: 0, layout: 1, high: 2, urgent: 3 };
|
|
21
58
|
|
|
22
59
|
/**
|
|
23
|
-
*
|
|
60
|
+
* BARRIER sentinel — hard gate in the download queue.
|
|
61
|
+
*
|
|
62
|
+
* When processQueue() encounters a BARRIER:
|
|
63
|
+
* - If tasks are still in-flight above it → STOP (slots stay empty)
|
|
64
|
+
* - If running === 0 → remove barrier, continue with tasks below
|
|
65
|
+
*
|
|
66
|
+
* Used by LayoutQueueBuilder to separate critical chunks (chunk-0, chunk-last)
|
|
67
|
+
* from remaining bulk chunks. Ensures video playback can start before all
|
|
68
|
+
* chunks finish downloading.
|
|
69
|
+
*/
|
|
70
|
+
export const BARRIER = Symbol('BARRIER');
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Parse the X-Amz-Expires absolute timestamp from a signed URL.
|
|
74
|
+
* Returns the expiry as a Unix timestamp (seconds), or Infinity if not found.
|
|
75
|
+
*/
|
|
76
|
+
function getUrlExpiry(url) {
|
|
77
|
+
try {
|
|
78
|
+
const match = url.match(/X-Amz-Expires=(\d+)/);
|
|
79
|
+
return match ? parseInt(match[1], 10) : Infinity;
|
|
80
|
+
} catch {
|
|
81
|
+
return Infinity;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Check if a signed URL has expired (or will expire within a grace period).
|
|
87
|
+
* @param {string} url - Signed URL with X-Amz-Expires parameter
|
|
88
|
+
* @param {number} graceSeconds - Seconds before actual expiry to consider it expired (default: 30)
|
|
89
|
+
* @returns {boolean}
|
|
90
|
+
*/
|
|
91
|
+
export function isUrlExpired(url, graceSeconds = 30) {
|
|
92
|
+
const expiry = getUrlExpiry(url);
|
|
93
|
+
if (expiry === Infinity) return false;
|
|
94
|
+
return (Date.now() / 1000) >= (expiry - graceSeconds);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* DownloadTask - Single HTTP fetch unit
|
|
99
|
+
*
|
|
100
|
+
* Handles exactly one HTTP request: either a full small file GET or a single Range GET
|
|
101
|
+
* for one chunk of a larger file. Includes retry logic with exponential backoff.
|
|
24
102
|
*/
|
|
25
103
|
export class DownloadTask {
|
|
26
104
|
constructor(fileInfo, options = {}) {
|
|
27
105
|
this.fileInfo = fileInfo;
|
|
28
|
-
this.
|
|
29
|
-
this.
|
|
30
|
-
this.
|
|
31
|
-
this.
|
|
32
|
-
this.
|
|
33
|
-
this.
|
|
34
|
-
this.
|
|
35
|
-
this.state = 'pending'; // pending, downloading, complete, failed
|
|
36
|
-
// Progressive streaming: callback fired for each chunk as it downloads
|
|
37
|
-
// Set externally before download starts: (chunkIndex, chunkBlob, totalChunks) => Promise
|
|
38
|
-
this.onChunkDownloaded = null;
|
|
106
|
+
this.chunkIndex = options.chunkIndex ?? null;
|
|
107
|
+
this.rangeStart = options.rangeStart ?? null;
|
|
108
|
+
this.rangeEnd = options.rangeEnd ?? null;
|
|
109
|
+
this.state = 'pending';
|
|
110
|
+
this.blob = null;
|
|
111
|
+
this._parentFile = null;
|
|
112
|
+
this._priority = PRIORITY.normal;
|
|
39
113
|
}
|
|
40
114
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
115
|
+
getUrl() {
|
|
116
|
+
const url = this.fileInfo.path;
|
|
117
|
+
if (isUrlExpired(url)) {
|
|
118
|
+
throw new Error(`URL expired for ${this.fileInfo.type}/${this.fileInfo.id} — waiting for fresh URL from next collection cycle`);
|
|
119
|
+
}
|
|
120
|
+
return url;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async start() {
|
|
124
|
+
this.state = 'downloading';
|
|
125
|
+
const headers = {};
|
|
126
|
+
if (this.rangeStart != null) {
|
|
127
|
+
headers['Range'] = `bytes=${this.rangeStart}-${this.rangeEnd}`;
|
|
48
128
|
}
|
|
49
129
|
|
|
50
|
-
|
|
51
|
-
|
|
130
|
+
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
131
|
+
const ac = new AbortController();
|
|
132
|
+
const timer = setTimeout(() => ac.abort(), FETCH_TIMEOUT_MS);
|
|
133
|
+
try {
|
|
134
|
+
const url = this.getUrl();
|
|
135
|
+
const fetchOpts = { signal: ac.signal };
|
|
136
|
+
if (Object.keys(headers).length > 0) fetchOpts.headers = headers;
|
|
137
|
+
const response = await fetch(url, fetchOpts);
|
|
138
|
+
|
|
139
|
+
if (!response.ok && response.status !== 206) {
|
|
140
|
+
throw new Error(`Fetch failed: ${response.status}`);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
this.blob = await response.blob();
|
|
144
|
+
this.state = 'complete';
|
|
145
|
+
return this.blob;
|
|
146
|
+
|
|
147
|
+
} catch (error) {
|
|
148
|
+
const msg = ac.signal.aborted ? `Timeout after ${FETCH_TIMEOUT_MS / 1000}s` : error.message;
|
|
149
|
+
if (attempt < MAX_RETRIES) {
|
|
150
|
+
const delay = RETRY_DELAY_MS * attempt;
|
|
151
|
+
const chunkLabel = this.chunkIndex != null ? ` chunk ${this.chunkIndex}` : '';
|
|
152
|
+
console.warn(`[DownloadTask] ${this.fileInfo.type}/${this.fileInfo.id}${chunkLabel} attempt ${attempt}/${MAX_RETRIES} failed: ${msg}. Retrying in ${delay / 1000}s...`);
|
|
153
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
154
|
+
} else {
|
|
155
|
+
this.state = 'failed';
|
|
156
|
+
throw ac.signal.aborted ? new Error(msg) : error;
|
|
157
|
+
}
|
|
158
|
+
} finally {
|
|
159
|
+
clearTimeout(timer);
|
|
160
|
+
}
|
|
52
161
|
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
53
164
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
165
|
+
/**
|
|
166
|
+
* FileDownload - Orchestrates downloading a single file
|
|
167
|
+
*
|
|
168
|
+
* Does the HEAD request to determine file size, then:
|
|
169
|
+
* - Small file (≤ 100MB): creates 1 DownloadTask for the full file
|
|
170
|
+
* - Large file (> 100MB): creates N DownloadTasks, one per chunk
|
|
171
|
+
*
|
|
172
|
+
* All tasks are enqueued into the flat DownloadQueue where they compete
|
|
173
|
+
* equally for HTTP connection slots with tasks from other files.
|
|
174
|
+
*
|
|
175
|
+
* Provides wait() that resolves when ALL tasks for this file complete.
|
|
176
|
+
* Supports progressive caching via onChunkDownloaded callback.
|
|
177
|
+
*/
|
|
178
|
+
export class FileDownload {
|
|
179
|
+
constructor(fileInfo, options = {}) {
|
|
180
|
+
this.fileInfo = fileInfo;
|
|
181
|
+
this.options = options;
|
|
182
|
+
this.state = 'pending';
|
|
183
|
+
this.tasks = [];
|
|
184
|
+
this.completedChunks = 0;
|
|
185
|
+
this.totalChunks = 0;
|
|
186
|
+
this.totalBytes = 0;
|
|
187
|
+
this.downloadedBytes = 0;
|
|
188
|
+
this.onChunkDownloaded = null;
|
|
189
|
+
this.skipChunks = fileInfo.skipChunks || new Set();
|
|
190
|
+
this._contentType = 'application/octet-stream';
|
|
191
|
+
this._chunkBlobs = new Map();
|
|
192
|
+
this._runningCount = 0; // Currently running tasks for this file
|
|
193
|
+
this._resolve = null;
|
|
194
|
+
this._reject = null;
|
|
195
|
+
this._promise = new Promise((res, rej) => {
|
|
196
|
+
this._resolve = res;
|
|
197
|
+
this._reject = rej;
|
|
57
198
|
});
|
|
199
|
+
this._promise.catch(() => {});
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
getUrl() {
|
|
203
|
+
const url = this.fileInfo.path;
|
|
204
|
+
if (isUrlExpired(url)) {
|
|
205
|
+
throw new Error(`URL expired for ${this.fileInfo.type}/${this.fileInfo.id} — waiting for fresh URL from next collection cycle`);
|
|
206
|
+
}
|
|
207
|
+
return url;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
wait() {
|
|
211
|
+
return this._promise;
|
|
58
212
|
}
|
|
59
213
|
|
|
60
214
|
/**
|
|
61
|
-
*
|
|
215
|
+
* Determine file size and create DownloadTasks.
|
|
216
|
+
* Uses RequiredFiles size when available (instant, no network).
|
|
217
|
+
* Falls back to HEAD request only when size is unknown.
|
|
62
218
|
*/
|
|
63
|
-
async
|
|
64
|
-
const { id, type, path, md5 } = this.fileInfo;
|
|
65
|
-
|
|
219
|
+
async prepare(queue) {
|
|
66
220
|
try {
|
|
67
|
-
this.state = '
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
221
|
+
this.state = 'preparing';
|
|
222
|
+
const { id, type, size } = this.fileInfo;
|
|
223
|
+
console.log('[FileDownload] Starting:', `${type}/${id}`);
|
|
224
|
+
|
|
225
|
+
// Use declared size from RequiredFiles — no HEAD needed for queue building
|
|
226
|
+
this.totalBytes = (size && size > 0) ? parseInt(size) : 0;
|
|
227
|
+
this._contentType = inferContentType(this.fileInfo);
|
|
228
|
+
|
|
229
|
+
if (this.totalBytes === 0) {
|
|
230
|
+
// No size declared — HEAD fallback (rare: only for files without CMS size)
|
|
231
|
+
const url = this.getUrl();
|
|
232
|
+
const ac = new AbortController();
|
|
233
|
+
const timer = setTimeout(() => ac.abort(), HEAD_TIMEOUT_MS);
|
|
234
|
+
try {
|
|
235
|
+
const head = await fetch(url, { method: 'HEAD', signal: ac.signal });
|
|
236
|
+
if (head.ok) {
|
|
237
|
+
this.totalBytes = parseInt(head.headers.get('Content-Length') || '0');
|
|
238
|
+
this._contentType = head.headers.get('Content-Type') || this._contentType;
|
|
239
|
+
}
|
|
240
|
+
} finally {
|
|
241
|
+
clearTimeout(timer);
|
|
242
|
+
}
|
|
74
243
|
}
|
|
75
244
|
|
|
76
|
-
this.totalBytes
|
|
77
|
-
const contentType = headResponse.headers.get('Content-Type') || 'application/octet-stream';
|
|
245
|
+
console.log('[FileDownload] File size:', (this.totalBytes / 1024 / 1024).toFixed(1), 'MB');
|
|
78
246
|
|
|
79
|
-
console.log('[DownloadTask] File size:', (this.totalBytes / 1024 / 1024).toFixed(1), 'MB');
|
|
80
|
-
|
|
81
|
-
// Download in chunks if large file
|
|
82
|
-
let blob;
|
|
83
247
|
const chunkSize = this.options.chunkSize || DEFAULT_CHUNK_SIZE;
|
|
84
|
-
const chunksPerFile = this.options.chunksPerFile || DEFAULT_CHUNKS_PER_FILE;
|
|
85
248
|
|
|
86
|
-
if (this.totalBytes >
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
249
|
+
if (this.totalBytes > CHUNK_THRESHOLD) {
|
|
250
|
+
const ranges = [];
|
|
251
|
+
for (let start = 0; start < this.totalBytes; start += chunkSize) {
|
|
252
|
+
ranges.push({
|
|
253
|
+
start,
|
|
254
|
+
end: Math.min(start + chunkSize - 1, this.totalBytes - 1),
|
|
255
|
+
index: ranges.length
|
|
256
|
+
});
|
|
257
|
+
}
|
|
258
|
+
this.totalChunks = ranges.length;
|
|
91
259
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
// Continue anyway (kiosk mode)
|
|
260
|
+
const needed = ranges.filter(r => !this.skipChunks.has(r.index));
|
|
261
|
+
const skippedCount = ranges.length - needed.length;
|
|
262
|
+
|
|
263
|
+
for (const r of ranges) {
|
|
264
|
+
if (this.skipChunks.has(r.index)) {
|
|
265
|
+
this.downloadedBytes += (r.end - r.start + 1);
|
|
266
|
+
}
|
|
100
267
|
}
|
|
101
|
-
}
|
|
102
268
|
|
|
103
|
-
|
|
269
|
+
if (needed.length === 0) {
|
|
270
|
+
console.log('[FileDownload] All chunks already cached, nothing to download');
|
|
271
|
+
this.state = 'complete';
|
|
272
|
+
this._resolve(new Blob([], { type: this._contentType }));
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
104
275
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
276
|
+
if (skippedCount > 0) {
|
|
277
|
+
console.log(`[FileDownload] Resuming: ${skippedCount} chunks cached, ${needed.length} to download`);
|
|
278
|
+
}
|
|
108
279
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
280
|
+
const isResume = skippedCount > 0;
|
|
281
|
+
|
|
282
|
+
if (isResume) {
|
|
283
|
+
const sorted = needed.sort((a, b) => a.index - b.index);
|
|
284
|
+
for (const r of sorted) {
|
|
285
|
+
const task = new DownloadTask(this.fileInfo, {
|
|
286
|
+
chunkIndex: r.index, rangeStart: r.start, rangeEnd: r.end
|
|
287
|
+
});
|
|
288
|
+
task._parentFile = this;
|
|
289
|
+
task._priority = PRIORITY.normal;
|
|
290
|
+
this.tasks.push(task);
|
|
291
|
+
}
|
|
292
|
+
} else {
|
|
293
|
+
for (const r of needed) {
|
|
294
|
+
const task = new DownloadTask(this.fileInfo, {
|
|
295
|
+
chunkIndex: r.index, rangeStart: r.start, rangeEnd: r.end
|
|
296
|
+
});
|
|
297
|
+
task._parentFile = this;
|
|
298
|
+
task._priority = (r.index === 0 || r.index === ranges.length - 1) ? PRIORITY.high : PRIORITY.normal;
|
|
299
|
+
this.tasks.push(task);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
const highCount = this.tasks.filter(t => t._priority >= PRIORITY.high).length;
|
|
304
|
+
console.log(`[FileDownload] ${type}/${id}: ${this.tasks.length} chunks` +
|
|
305
|
+
(highCount > 0 ? ` (${highCount} priority)` : '') +
|
|
306
|
+
(isResume ? ' (resume)' : ''));
|
|
307
|
+
|
|
308
|
+
} else {
|
|
309
|
+
this.totalChunks = 1;
|
|
310
|
+
const task = new DownloadTask(this.fileInfo, {});
|
|
311
|
+
task._parentFile = this;
|
|
312
|
+
this.tasks.push(task);
|
|
113
313
|
}
|
|
114
|
-
this.waiters = [];
|
|
115
314
|
|
|
116
|
-
|
|
315
|
+
queue.enqueueChunkTasks(this.tasks);
|
|
316
|
+
this.state = 'downloading';
|
|
117
317
|
|
|
118
318
|
} catch (error) {
|
|
119
|
-
console.error('[
|
|
319
|
+
console.error('[FileDownload] Prepare failed:', `${this.fileInfo.type}/${this.fileInfo.id}`, error);
|
|
120
320
|
this.state = 'failed';
|
|
121
|
-
|
|
122
|
-
// Reject all waiters
|
|
123
|
-
this.promise = Promise.reject(error);
|
|
124
|
-
this.promise.catch(() => {}); // Prevent unhandled rejection if nobody calls wait()
|
|
125
|
-
for (const waiter of this.waiters) {
|
|
126
|
-
waiter.reject(error);
|
|
127
|
-
}
|
|
128
|
-
this.waiters = [];
|
|
129
|
-
|
|
130
|
-
throw error;
|
|
321
|
+
this._reject(error);
|
|
131
322
|
}
|
|
132
323
|
}
|
|
133
324
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
async downloadFull(url) {
|
|
138
|
-
const response = await fetch(url);
|
|
139
|
-
if (!response.ok) {
|
|
140
|
-
throw new Error(`Download failed: ${response.status}`);
|
|
141
|
-
}
|
|
325
|
+
async onTaskComplete(task) {
|
|
326
|
+
this.completedChunks++;
|
|
327
|
+
this.downloadedBytes += task.blob.size;
|
|
142
328
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
}
|
|
329
|
+
if (task.chunkIndex != null) {
|
|
330
|
+
this._chunkBlobs.set(task.chunkIndex, task.blob);
|
|
331
|
+
}
|
|
147
332
|
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
* If onChunkDownloaded callback is set, fires it for each chunk as it arrives
|
|
151
|
-
* so the caller can cache chunks progressively (enabling streaming before
|
|
152
|
-
* the entire file is downloaded).
|
|
153
|
-
*/
|
|
154
|
-
async downloadChunks(url, contentType, chunkSize, concurrentChunks) {
|
|
155
|
-
// Calculate chunk ranges
|
|
156
|
-
const chunkRanges = [];
|
|
157
|
-
for (let start = 0; start < this.totalBytes; start += chunkSize) {
|
|
158
|
-
const end = Math.min(start + chunkSize - 1, this.totalBytes - 1);
|
|
159
|
-
chunkRanges.push({ start, end, index: chunkRanges.length });
|
|
333
|
+
if (this.options.onProgress) {
|
|
334
|
+
this.options.onProgress(this.downloadedBytes, this.totalBytes);
|
|
160
335
|
}
|
|
161
336
|
|
|
162
|
-
//
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
337
|
+
// Fire progressive chunk callback
|
|
338
|
+
if (this.onChunkDownloaded && task.chunkIndex != null) {
|
|
339
|
+
try {
|
|
340
|
+
await this.onChunkDownloaded(task.chunkIndex, task.blob, this.totalChunks);
|
|
341
|
+
} catch (e) {
|
|
342
|
+
console.warn('[FileDownload] onChunkDownloaded callback error:', e);
|
|
343
|
+
}
|
|
167
344
|
}
|
|
168
345
|
|
|
169
|
-
|
|
170
|
-
|
|
346
|
+
if (this.completedChunks === this.tasks.length && this.state !== 'complete') {
|
|
347
|
+
this.state = 'complete';
|
|
348
|
+
const { type, id } = this.fileInfo;
|
|
349
|
+
|
|
350
|
+
if (task.chunkIndex == null) {
|
|
351
|
+
console.log('[FileDownload] Complete:', `${type}/${id}`, `(${task.blob.size} bytes)`);
|
|
352
|
+
this._resolve(task.blob);
|
|
353
|
+
} else if (this.onChunkDownloaded) {
|
|
354
|
+
console.log('[FileDownload] Complete:', `${type}/${id}`, `(progressive, ${this.totalChunks} chunks)`);
|
|
355
|
+
this._resolve(new Blob([], { type: this._contentType }));
|
|
356
|
+
} else {
|
|
357
|
+
const ordered = [];
|
|
358
|
+
for (let i = 0; i < this.totalChunks; i++) {
|
|
359
|
+
const blob = this._chunkBlobs.get(i);
|
|
360
|
+
if (blob) ordered.push(blob);
|
|
361
|
+
}
|
|
362
|
+
const assembled = new Blob(ordered, { type: this._contentType });
|
|
363
|
+
console.log('[FileDownload] Complete:', `${type}/${id}`, `(${assembled.size} bytes, reassembled)`);
|
|
364
|
+
this._resolve(assembled);
|
|
365
|
+
}
|
|
171
366
|
|
|
172
|
-
|
|
367
|
+
this._chunkBlobs.clear();
|
|
368
|
+
}
|
|
369
|
+
}
|
|
173
370
|
|
|
174
|
-
|
|
371
|
+
onTaskFailed(task, error) {
|
|
372
|
+
if (this.state === 'complete' || this.state === 'failed') return;
|
|
373
|
+
|
|
374
|
+
// URL expiration is transient — drop this task, don't fail the file.
|
|
375
|
+
// Already-downloaded chunks are safe in cache. Next collection cycle
|
|
376
|
+
// provides fresh URLs and the resume logic (skipChunks) fills the gaps.
|
|
377
|
+
if (error.message?.includes('URL expired')) {
|
|
378
|
+
const chunkLabel = task.chunkIndex != null ? ` chunk ${task.chunkIndex}` : '';
|
|
379
|
+
console.warn(`[FileDownload] URL expired, dropping${chunkLabel}:`, `${this.fileInfo.type}/${this.fileInfo.id}`);
|
|
380
|
+
this.tasks = this.tasks.filter(t => t !== task);
|
|
381
|
+
// If all remaining tasks completed, resolve as partial
|
|
382
|
+
if (this.tasks.length === 0 || this.completedChunks >= this.tasks.length) {
|
|
383
|
+
this.state = 'complete';
|
|
384
|
+
this._urlExpired = true;
|
|
385
|
+
this._resolve(new Blob([], { type: this._contentType }));
|
|
386
|
+
}
|
|
387
|
+
return;
|
|
388
|
+
}
|
|
175
389
|
|
|
176
|
-
|
|
177
|
-
|
|
390
|
+
console.error('[FileDownload] Failed:', `${this.fileInfo.type}/${this.fileInfo.id}`, error);
|
|
391
|
+
this.state = 'failed';
|
|
392
|
+
this._reject(error);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
178
395
|
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
396
|
+
/**
|
|
397
|
+
* LayoutTaskBuilder — Smart builder that produces a sorted, barrier-embedded
|
|
398
|
+
* task list for a single layout.
|
|
399
|
+
*
|
|
400
|
+
* Usage:
|
|
401
|
+
* const builder = new LayoutTaskBuilder(queue);
|
|
402
|
+
* builder.addFile(fileInfo);
|
|
403
|
+
* const orderedTasks = await builder.build();
|
|
404
|
+
* queue.enqueueOrderedTasks(orderedTasks);
|
|
405
|
+
*
|
|
406
|
+
* The builder runs HEAD requests (throttled), collects the resulting
|
|
407
|
+
* DownloadTasks, sorts them optimally, and embeds BARRIERs between
|
|
408
|
+
* critical chunks (chunk-0, chunk-last) and bulk chunks.
|
|
409
|
+
*
|
|
410
|
+
* Duck-typing: implements enqueueChunkTasks() so FileDownload.prepare()
|
|
411
|
+
* works unchanged — it just collects tasks instead of processing them.
|
|
412
|
+
*/
|
|
413
|
+
export class LayoutTaskBuilder {
|
|
414
|
+
constructor(queue) {
|
|
415
|
+
this.queue = queue; // Main DownloadQueue (for dedup via active map)
|
|
416
|
+
this._filesToPrepare = []; // FileDownloads needing HEAD requests
|
|
417
|
+
this._tasks = []; // Collected DownloadTasks (from prepare callbacks)
|
|
418
|
+
this._maxPreparing = 2; // HEAD request throttle
|
|
419
|
+
}
|
|
183
420
|
|
|
184
|
-
|
|
185
|
-
|
|
421
|
+
/**
|
|
422
|
+
* Register a file. Uses queue.active for dedup/URL refresh.
|
|
423
|
+
* Does NOT trigger prepare — that happens in build().
|
|
424
|
+
*/
|
|
425
|
+
addFile(fileInfo) {
|
|
426
|
+
const key = DownloadQueue.stableKey(fileInfo);
|
|
427
|
+
|
|
428
|
+
if (this.queue.active.has(key)) {
|
|
429
|
+
const existing = this.queue.active.get(key);
|
|
430
|
+
// URL refresh (same logic as queue.enqueue)
|
|
431
|
+
if (fileInfo.path && fileInfo.path !== existing.fileInfo.path) {
|
|
432
|
+
const oldExpiry = getUrlExpiry(existing.fileInfo.path);
|
|
433
|
+
const newExpiry = getUrlExpiry(fileInfo.path);
|
|
434
|
+
if (newExpiry > oldExpiry) {
|
|
435
|
+
existing.fileInfo.path = fileInfo.path;
|
|
186
436
|
}
|
|
437
|
+
}
|
|
438
|
+
return existing;
|
|
439
|
+
}
|
|
187
440
|
|
|
188
|
-
|
|
189
|
-
|
|
441
|
+
const file = new FileDownload(fileInfo, {
|
|
442
|
+
chunkSize: this.queue.chunkSize,
|
|
443
|
+
calculateMD5: this.queue.calculateMD5,
|
|
444
|
+
onProgress: this.queue.onProgress
|
|
445
|
+
});
|
|
190
446
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
447
|
+
this.queue.active.set(key, file);
|
|
448
|
+
this._filesToPrepare.push(file);
|
|
449
|
+
return file;
|
|
450
|
+
}
|
|
194
451
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
}
|
|
452
|
+
/**
|
|
453
|
+
* Duck-type interface for FileDownload.prepare().
|
|
454
|
+
* Collects tasks instead of processing them.
|
|
455
|
+
*/
|
|
456
|
+
enqueueChunkTasks(tasks) {
|
|
457
|
+
this._tasks.push(...tasks);
|
|
458
|
+
}
|
|
203
459
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
460
|
+
/**
|
|
461
|
+
* Run all HEAD requests (throttled) and return sorted tasks with barriers.
|
|
462
|
+
*/
|
|
463
|
+
async build() {
|
|
464
|
+
await this._prepareAll();
|
|
465
|
+
return this._sortWithBarriers();
|
|
466
|
+
}
|
|
208
467
|
|
|
209
|
-
|
|
468
|
+
async _prepareAll() {
|
|
469
|
+
await new Promise((resolve) => {
|
|
470
|
+
let running = 0;
|
|
471
|
+
let idx = 0;
|
|
472
|
+
const next = () => {
|
|
473
|
+
while (running < this._maxPreparing && idx < this._filesToPrepare.length) {
|
|
474
|
+
const file = this._filesToPrepare[idx++];
|
|
475
|
+
running++;
|
|
476
|
+
file.prepare(this).finally(() => {
|
|
477
|
+
running--;
|
|
478
|
+
if (idx >= this._filesToPrepare.length && running === 0) {
|
|
479
|
+
resolve();
|
|
480
|
+
} else {
|
|
481
|
+
next();
|
|
482
|
+
}
|
|
483
|
+
});
|
|
484
|
+
}
|
|
485
|
+
};
|
|
486
|
+
if (this._filesToPrepare.length === 0) resolve();
|
|
487
|
+
else next();
|
|
488
|
+
});
|
|
489
|
+
}
|
|
210
490
|
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
491
|
+
_sortWithBarriers() {
|
|
492
|
+
const nonChunked = [];
|
|
493
|
+
const chunk0s = [];
|
|
494
|
+
const chunkLasts = [];
|
|
495
|
+
const remaining = [];
|
|
496
|
+
|
|
497
|
+
for (const t of this._tasks) {
|
|
498
|
+
if (t.chunkIndex == null) {
|
|
499
|
+
nonChunked.push(t);
|
|
500
|
+
} else if (t.chunkIndex === 0) {
|
|
501
|
+
chunk0s.push(t);
|
|
502
|
+
} else {
|
|
503
|
+
const total = t._parentFile?.totalChunks || 0;
|
|
504
|
+
if (total > 1 && t.chunkIndex === total - 1) {
|
|
505
|
+
chunkLasts.push(t);
|
|
506
|
+
} else {
|
|
507
|
+
remaining.push(t);
|
|
508
|
+
}
|
|
214
509
|
}
|
|
215
|
-
};
|
|
216
|
-
|
|
217
|
-
// Phase 1: chunk 0 + last chunk in parallel (both needed for playback start)
|
|
218
|
-
await Promise.all(priorityChunks.map(range => downloadChunk(range)));
|
|
219
|
-
|
|
220
|
-
// Phase 2: remaining chunks strictly sequential (guarantees gap-free playback)
|
|
221
|
-
for (const range of remainingChunks) {
|
|
222
|
-
await downloadChunk(range);
|
|
223
510
|
}
|
|
224
511
|
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
// Return a lightweight marker — the real data is already in cache
|
|
228
|
-
return new Blob([], { type: contentType });
|
|
229
|
-
}
|
|
512
|
+
nonChunked.sort((a, b) => (a._parentFile?.totalBytes || 0) - (b._parentFile?.totalBytes || 0));
|
|
513
|
+
remaining.sort((a, b) => a.chunkIndex - b.chunkIndex);
|
|
230
514
|
|
|
231
|
-
//
|
|
232
|
-
const
|
|
233
|
-
|
|
234
|
-
|
|
515
|
+
// Build: small files + critical chunks → BARRIER → bulk chunks
|
|
516
|
+
const result = [...nonChunked, ...chunk0s, ...chunkLasts];
|
|
517
|
+
if (remaining.length > 0) {
|
|
518
|
+
result.push(BARRIER, ...remaining);
|
|
235
519
|
}
|
|
236
|
-
|
|
237
|
-
return new Blob(orderedChunks, { type: contentType });
|
|
520
|
+
return result;
|
|
238
521
|
}
|
|
239
522
|
}
|
|
240
523
|
|
|
241
524
|
/**
|
|
242
|
-
* DownloadQueue -
|
|
525
|
+
* DownloadQueue - Flat queue with per-file and global concurrency limits
|
|
526
|
+
*
|
|
527
|
+
* Global concurrency limit (e.g., 6) controls total HTTP connections.
|
|
528
|
+
* Per-file chunk limit (e.g., 2) prevents one large file from hogging all
|
|
529
|
+
* connections, ensuring bandwidth per chunk is high and chunk 0 arrives fast.
|
|
530
|
+
* HEAD requests are throttled to avoid flooding browser connection pool.
|
|
243
531
|
*/
|
|
244
532
|
export class DownloadQueue {
|
|
245
533
|
constructor(options = {}) {
|
|
246
534
|
this.concurrency = options.concurrency || DEFAULT_CONCURRENCY;
|
|
247
535
|
this.chunkSize = options.chunkSize || DEFAULT_CHUNK_SIZE;
|
|
248
|
-
this.
|
|
249
|
-
this.calculateMD5 = options.calculateMD5;
|
|
250
|
-
this.onProgress = options.onProgress;
|
|
536
|
+
this.maxChunksPerFile = options.chunksPerFile || DEFAULT_MAX_CHUNKS_PER_FILE;
|
|
537
|
+
this.calculateMD5 = options.calculateMD5;
|
|
538
|
+
this.onProgress = options.onProgress;
|
|
251
539
|
|
|
252
|
-
this.queue = [];
|
|
253
|
-
this.active = new Map();
|
|
540
|
+
this.queue = []; // DownloadTask[] — flat queue of chunk/file tasks
|
|
541
|
+
this.active = new Map(); // stableKey → FileDownload
|
|
542
|
+
this._activeTasks = []; // DownloadTask[] — currently in-flight tasks
|
|
254
543
|
this.running = 0;
|
|
544
|
+
|
|
545
|
+
// HEAD request throttling: prevents prepare() from flooding browser connections
|
|
546
|
+
this._prepareQueue = [];
|
|
547
|
+
this._preparingCount = 0;
|
|
548
|
+
this._maxPreparing = 2; // Max concurrent HEAD requests
|
|
549
|
+
|
|
550
|
+
// When paused, processQueue() is a no-op (used during barrier setup)
|
|
551
|
+
this.paused = false;
|
|
255
552
|
}
|
|
256
553
|
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
*/
|
|
261
|
-
enqueue(fileInfo) {
|
|
262
|
-
const { path } = fileInfo;
|
|
554
|
+
static stableKey(fileInfo) {
|
|
555
|
+
return `${fileInfo.type}/${fileInfo.id}`;
|
|
556
|
+
}
|
|
263
557
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
558
|
+
enqueue(fileInfo) {
|
|
559
|
+
const key = DownloadQueue.stableKey(fileInfo);
|
|
560
|
+
|
|
561
|
+
if (this.active.has(key)) {
|
|
562
|
+
const existing = this.active.get(key);
|
|
563
|
+
if (fileInfo.path && fileInfo.path !== existing.fileInfo.path) {
|
|
564
|
+
const oldExpiry = getUrlExpiry(existing.fileInfo.path);
|
|
565
|
+
const newExpiry = getUrlExpiry(fileInfo.path);
|
|
566
|
+
if (newExpiry > oldExpiry) {
|
|
567
|
+
console.log('[DownloadQueue] Refreshing URL for', key);
|
|
568
|
+
existing.fileInfo.path = fileInfo.path;
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
return existing;
|
|
268
572
|
}
|
|
269
573
|
|
|
270
|
-
|
|
271
|
-
const task = new DownloadTask(fileInfo, {
|
|
574
|
+
const file = new FileDownload(fileInfo, {
|
|
272
575
|
chunkSize: this.chunkSize,
|
|
273
|
-
chunksPerFile: this.chunksPerFile,
|
|
274
576
|
calculateMD5: this.calculateMD5,
|
|
275
577
|
onProgress: this.onProgress
|
|
276
578
|
});
|
|
277
579
|
|
|
278
|
-
this.active.set(
|
|
279
|
-
|
|
580
|
+
this.active.set(key, file);
|
|
581
|
+
console.log('[DownloadQueue] Enqueued:', key);
|
|
280
582
|
|
|
281
|
-
|
|
583
|
+
// Throttled prepare: HEAD requests are limited to avoid flooding connections
|
|
584
|
+
this._schedulePrepare(file);
|
|
282
585
|
|
|
283
|
-
|
|
586
|
+
return file;
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
/**
|
|
590
|
+
* Schedule a FileDownload's prepare() with throttling.
|
|
591
|
+
* Only N HEAD requests run concurrently to preserve connections for data transfers.
|
|
592
|
+
*/
|
|
593
|
+
_schedulePrepare(file) {
|
|
594
|
+
this._prepareQueue.push(file);
|
|
595
|
+
this._processPrepareQueue();
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
_processPrepareQueue() {
|
|
599
|
+
while (this._preparingCount < this._maxPreparing && this._prepareQueue.length > 0) {
|
|
600
|
+
const file = this._prepareQueue.shift();
|
|
601
|
+
this._preparingCount++;
|
|
602
|
+
file.prepare(this).finally(() => {
|
|
603
|
+
this._preparingCount--;
|
|
604
|
+
this._processPrepareQueue();
|
|
605
|
+
});
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
enqueueChunkTasks(tasks) {
|
|
610
|
+
for (const task of tasks) {
|
|
611
|
+
this.queue.push(task);
|
|
612
|
+
}
|
|
613
|
+
this._sortQueue();
|
|
614
|
+
|
|
615
|
+
console.log(`[DownloadQueue] ${tasks.length} tasks added (${this.queue.length} pending, ${this.running} active)`);
|
|
616
|
+
this.processQueue();
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
/**
|
|
620
|
+
* Enqueue a pre-ordered list of tasks (with optional BARRIER sentinels).
|
|
621
|
+
* Preserves insertion order — no sorting. Position = priority.
|
|
622
|
+
*
|
|
623
|
+
* Used by LayoutQueueBuilder to push the entire download queue in layout
|
|
624
|
+
* playback order with barriers separating critical chunks from bulk.
|
|
625
|
+
*
|
|
626
|
+
* @param {Array<DownloadTask|Symbol>} items - Tasks and BARRIERs in order
|
|
627
|
+
*/
|
|
628
|
+
enqueueOrderedTasks(items) {
|
|
629
|
+
let taskCount = 0;
|
|
630
|
+
let barrierCount = 0;
|
|
631
|
+
for (const item of items) {
|
|
632
|
+
if (item === BARRIER) {
|
|
633
|
+
this.queue.push(BARRIER);
|
|
634
|
+
barrierCount++;
|
|
635
|
+
} else {
|
|
636
|
+
this.queue.push(item);
|
|
637
|
+
taskCount++;
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
console.log(`[DownloadQueue] Ordered queue: ${taskCount} tasks, ${barrierCount} barriers (${this.queue.length} pending, ${this.running} active)`);
|
|
284
642
|
this.processQueue();
|
|
643
|
+
}
|
|
285
644
|
|
|
286
|
-
|
|
645
|
+
/** Sort queue by priority (highest first), stable within same priority. */
|
|
646
|
+
_sortQueue() {
|
|
647
|
+
this.queue.sort((a, b) => b._priority - a._priority);
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
prioritize(fileType, fileId) {
|
|
651
|
+
const key = `${fileType}/${fileId}`;
|
|
652
|
+
const file = this.active.get(key);
|
|
653
|
+
|
|
654
|
+
if (!file) {
|
|
655
|
+
console.log('[DownloadQueue] Not found:', key);
|
|
656
|
+
return false;
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
let boosted = 0;
|
|
660
|
+
for (const t of this.queue) {
|
|
661
|
+
if (t._parentFile === file && t._priority < PRIORITY.high) {
|
|
662
|
+
t._priority = PRIORITY.high;
|
|
663
|
+
boosted++;
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
this._sortQueue();
|
|
667
|
+
|
|
668
|
+
console.log('[DownloadQueue] Prioritized:', key, `(${boosted} tasks boosted)`);
|
|
669
|
+
return true;
|
|
287
670
|
}
|
|
288
671
|
|
|
289
672
|
/**
|
|
290
|
-
*
|
|
291
|
-
*
|
|
292
|
-
* @param {
|
|
673
|
+
* Boost priority for files needed by the current/next layout.
|
|
674
|
+
* @param {Array} fileIds - Media IDs needed by the layout
|
|
675
|
+
* @param {number} priority - Priority level (default: PRIORITY.high)
|
|
293
676
|
*/
|
|
294
|
-
prioritizeLayoutFiles(fileIds) {
|
|
677
|
+
prioritizeLayoutFiles(fileIds, priority = PRIORITY.high) {
|
|
295
678
|
const idSet = new Set(fileIds.map(String));
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
if (idSet.has(String(task.fileInfo.id))) {
|
|
303
|
-
prioritized.push(task);
|
|
304
|
-
} else {
|
|
305
|
-
rest.push(task);
|
|
679
|
+
|
|
680
|
+
let boosted = 0;
|
|
681
|
+
for (const t of this.queue) {
|
|
682
|
+
if (idSet.has(String(t._parentFile?.fileInfo.id)) && t._priority < priority) {
|
|
683
|
+
t._priority = priority;
|
|
684
|
+
boosted++;
|
|
306
685
|
}
|
|
307
686
|
}
|
|
308
|
-
this.
|
|
309
|
-
|
|
687
|
+
for (const t of this._activeTasks) {
|
|
688
|
+
if (idSet.has(String(t._parentFile?.fileInfo.id)) && t._priority < priority) {
|
|
689
|
+
t._priority = priority;
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
this._sortQueue();
|
|
693
|
+
|
|
694
|
+
console.log('[DownloadQueue] Layout files prioritized:', idSet.size, 'files,', boosted, 'tasks boosted to', priority);
|
|
310
695
|
}
|
|
311
696
|
|
|
312
697
|
/**
|
|
313
|
-
*
|
|
314
|
-
*
|
|
698
|
+
* Emergency priority for a specific streaming chunk.
|
|
699
|
+
* Called by the Service Worker when a video is stalled waiting for chunk N.
|
|
700
|
+
* Sets urgent priority → queue re-sorts → processQueue() limits concurrency.
|
|
315
701
|
*/
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
702
|
+
urgentChunk(fileType, fileId, chunkIndex) {
|
|
703
|
+
const key = `${fileType}/${fileId}`;
|
|
704
|
+
const file = this.active.get(key);
|
|
705
|
+
|
|
706
|
+
if (!file) {
|
|
707
|
+
console.log('[DownloadQueue] urgentChunk: file not active:', key, 'chunk', chunkIndex);
|
|
708
|
+
return false;
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
// Already in-flight — nothing to do
|
|
712
|
+
const isActive = this._activeTasks.some(
|
|
713
|
+
t => t._parentFile === file && t.chunkIndex === chunkIndex && t.state === 'downloading'
|
|
714
|
+
);
|
|
715
|
+
if (isActive) {
|
|
716
|
+
// Mark the in-flight task as urgent so processQueue() limits concurrency
|
|
717
|
+
const activeTask = this._activeTasks.find(
|
|
718
|
+
t => t._parentFile === file && t.chunkIndex === chunkIndex
|
|
322
719
|
);
|
|
720
|
+
if (activeTask && activeTask._priority < PRIORITY.urgent) {
|
|
721
|
+
activeTask._priority = PRIORITY.urgent;
|
|
722
|
+
console.log(`[DownloadQueue] URGENT: ${key} chunk ${chunkIndex} (already in-flight, limiting slots)`);
|
|
723
|
+
// Don't call processQueue() — can't stop in-flight tasks, but next
|
|
724
|
+
// processQueue() call (when any task completes) will see hasUrgent
|
|
725
|
+
// and limit new starts to URGENT_CONCURRENCY.
|
|
726
|
+
return true;
|
|
727
|
+
}
|
|
728
|
+
console.log('[DownloadQueue] urgentChunk: already urgent:', key, 'chunk', chunkIndex);
|
|
729
|
+
return false;
|
|
730
|
+
}
|
|
731
|
+
|
|
732
|
+
// Find task in queue (may be past a barrier)
|
|
733
|
+
const idx = this.queue.findIndex(
|
|
734
|
+
t => t !== BARRIER && t._parentFile === file && t.chunkIndex === chunkIndex
|
|
735
|
+
);
|
|
323
736
|
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
737
|
+
if (idx === -1) {
|
|
738
|
+
console.log('[DownloadQueue] urgentChunk: chunk not in queue:', key, 'chunk', chunkIndex);
|
|
739
|
+
return false;
|
|
740
|
+
}
|
|
741
|
+
|
|
742
|
+
const task = this.queue.splice(idx, 1)[0];
|
|
743
|
+
task._priority = PRIORITY.urgent;
|
|
744
|
+
// Move to front of queue (past any barriers)
|
|
745
|
+
this.queue.unshift(task);
|
|
746
|
+
|
|
747
|
+
console.log(`[DownloadQueue] URGENT: ${key} chunk ${chunkIndex} (moved to front)`);
|
|
748
|
+
this.processQueue();
|
|
749
|
+
return true;
|
|
750
|
+
}
|
|
751
|
+
|
|
752
|
+
/**
|
|
753
|
+
* Process queue — barrier-aware loop.
|
|
754
|
+
*
|
|
755
|
+
* Supports two modes:
|
|
756
|
+
* 1. Priority-sorted (legacy): queue sorted by priority, urgent reduces concurrency
|
|
757
|
+
* 2. Barrier-ordered: queue contains BARRIER sentinels that act as hard gates
|
|
758
|
+
*
|
|
759
|
+
* BARRIER behavior:
|
|
760
|
+
* - When processQueue() hits a BARRIER and running > 0 → STOP (slots stay empty)
|
|
761
|
+
* - When running === 0 → remove barrier, continue with tasks below
|
|
762
|
+
* - Tasks are never reordered past a BARRIER (except urgentChunk which bypasses)
|
|
763
|
+
*
|
|
764
|
+
* Urgent mode: when any task has PRIORITY.urgent, concurrency drops to
|
|
765
|
+
* URGENT_CONCURRENCY so the stalled chunk gets maximum bandwidth.
|
|
766
|
+
*/
|
|
767
|
+
processQueue() {
|
|
768
|
+
if (this.paused) return;
|
|
769
|
+
|
|
770
|
+
// Determine effective concurrency and minimum priority to start
|
|
771
|
+
const hasUrgent = this.queue.some(t => t !== BARRIER && t._priority >= PRIORITY.urgent) ||
|
|
772
|
+
this._activeTasks?.some(t => t._priority >= PRIORITY.urgent && t.state === 'downloading');
|
|
773
|
+
const maxSlots = hasUrgent ? URGENT_CONCURRENCY : this.concurrency;
|
|
774
|
+
const minPriority = hasUrgent ? PRIORITY.urgent : 0; // Urgent = only urgent tasks run
|
|
775
|
+
|
|
776
|
+
// Fill slots from front of queue
|
|
777
|
+
while (this.running < maxSlots && this.queue.length > 0) {
|
|
778
|
+
const next = this.queue[0];
|
|
779
|
+
|
|
780
|
+
// Hit a BARRIER — hard gate
|
|
781
|
+
if (next === BARRIER) {
|
|
782
|
+
if (this.running > 0) {
|
|
783
|
+
break; // In-flight tasks still running — slots stay empty
|
|
331
784
|
}
|
|
332
|
-
|
|
785
|
+
// All above-barrier tasks done → raise barrier, continue
|
|
786
|
+
this.queue.shift();
|
|
787
|
+
continue;
|
|
333
788
|
}
|
|
334
|
-
// All held files done — clear hold
|
|
335
|
-
console.log('[DownloadQueue] Layout hold cleared, resuming normal downloads');
|
|
336
|
-
this._layoutHoldIds = null;
|
|
337
|
-
}
|
|
338
789
|
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
790
|
+
// Per-file limit: skip to next eligible task (but don't cross barrier)
|
|
791
|
+
if (next._priority < minPriority || !this._canStartTask(next)) {
|
|
792
|
+
let found = false;
|
|
793
|
+
for (let i = 1; i < this.queue.length; i++) {
|
|
794
|
+
if (this.queue[i] === BARRIER) break; // Don't look past barrier
|
|
795
|
+
const task = this.queue[i];
|
|
796
|
+
if (task._priority >= minPriority && this._canStartTask(task)) {
|
|
797
|
+
this.queue.splice(i, 1);
|
|
798
|
+
this._startTask(task);
|
|
799
|
+
found = true;
|
|
800
|
+
break;
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
if (!found) break;
|
|
804
|
+
continue;
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
this.queue.shift();
|
|
808
|
+
this._startTask(next);
|
|
343
809
|
}
|
|
344
810
|
|
|
345
811
|
if (this.queue.length === 0 && this.running === 0) {
|
|
@@ -348,87 +814,88 @@ export class DownloadQueue {
|
|
|
348
814
|
}
|
|
349
815
|
|
|
350
816
|
/**
|
|
351
|
-
*
|
|
817
|
+
* Per-file concurrency check. Priority sorting decides order,
|
|
818
|
+
* this just prevents one file from hogging all connections.
|
|
352
819
|
*/
|
|
820
|
+
_canStartTask(task) {
|
|
821
|
+
return task._parentFile._runningCount < this.maxChunksPerFile;
|
|
822
|
+
}
|
|
823
|
+
|
|
353
824
|
_startTask(task) {
|
|
354
825
|
this.running++;
|
|
355
|
-
|
|
826
|
+
task._parentFile._runningCount++;
|
|
827
|
+
this._activeTasks.push(task);
|
|
828
|
+
const key = `${task.fileInfo.type}/${task.fileInfo.id}`;
|
|
829
|
+
const chunkLabel = task.chunkIndex != null ? ` chunk ${task.chunkIndex}` : '';
|
|
830
|
+
console.log(`[DownloadQueue] Starting: ${key}${chunkLabel} (${this.running}/${this.concurrency} active)`);
|
|
356
831
|
|
|
357
832
|
task.start()
|
|
358
|
-
.
|
|
359
|
-
.finally(() => {
|
|
833
|
+
.then(() => {
|
|
360
834
|
this.running--;
|
|
361
|
-
|
|
362
|
-
|
|
835
|
+
task._parentFile._runningCount--;
|
|
836
|
+
this._activeTasks = this._activeTasks.filter(t => t !== task);
|
|
837
|
+
console.log(`[DownloadQueue] Fetched: ${key}${chunkLabel} (${this.running} active, ${this.queue.length} pending)`);
|
|
363
838
|
this.processQueue();
|
|
839
|
+
return task._parentFile.onTaskComplete(task);
|
|
840
|
+
})
|
|
841
|
+
.catch(err => {
|
|
842
|
+
this.running--;
|
|
843
|
+
task._parentFile._runningCount--;
|
|
844
|
+
this._activeTasks = this._activeTasks.filter(t => t !== task);
|
|
845
|
+
this.processQueue();
|
|
846
|
+
task._parentFile.onTaskFailed(task, err);
|
|
364
847
|
});
|
|
365
848
|
}
|
|
366
849
|
|
|
367
850
|
/**
|
|
368
|
-
*
|
|
369
|
-
*
|
|
370
|
-
*
|
|
371
|
-
* @returns {boolean} true if file was found (queued or active)
|
|
851
|
+
* Wait for all queued prepare (HEAD) operations to finish.
|
|
852
|
+
* Returns when the prepare queue is drained and all FileDownloads have
|
|
853
|
+
* either created their tasks or failed.
|
|
372
854
|
*/
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
}
|
|
855
|
+
awaitAllPrepared() {
|
|
856
|
+
return new Promise((resolve) => {
|
|
857
|
+
const check = () => {
|
|
858
|
+
if (this._preparingCount === 0 && this._prepareQueue.length === 0) {
|
|
859
|
+
resolve();
|
|
860
|
+
} else {
|
|
861
|
+
setTimeout(check, 50);
|
|
862
|
+
}
|
|
863
|
+
};
|
|
864
|
+
check();
|
|
865
|
+
});
|
|
866
|
+
}
|
|
384
867
|
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
868
|
+
removeCompleted(key) {
|
|
869
|
+
const file = this.active.get(key);
|
|
870
|
+
if (file && (file.state === 'complete' || file.state === 'failed')) {
|
|
871
|
+
this.queue = this.queue.filter(t => t === BARRIER || t._parentFile !== file);
|
|
872
|
+
this.active.delete(key);
|
|
388
873
|
}
|
|
389
|
-
|
|
390
|
-
// Check if already downloading
|
|
391
|
-
for (const [, task] of this.active) {
|
|
392
|
-
if (task.fileInfo.type === fileType && String(task.fileInfo.id) === String(fileId)) {
|
|
393
|
-
console.log('[DownloadQueue] Already downloading:', `${fileType}/${fileId}`);
|
|
394
|
-
return true;
|
|
395
|
-
}
|
|
396
|
-
}
|
|
397
|
-
|
|
398
|
-
console.log('[DownloadQueue] Not found in queue:', `${fileType}/${fileId}`);
|
|
399
|
-
return false;
|
|
400
874
|
}
|
|
401
875
|
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
*/
|
|
405
|
-
getTask(url) {
|
|
406
|
-
return this.active.get(url) || null;
|
|
876
|
+
getTask(key) {
|
|
877
|
+
return this.active.get(key) || null;
|
|
407
878
|
}
|
|
408
879
|
|
|
409
|
-
/**
|
|
410
|
-
* Get progress for all active downloads
|
|
411
|
-
*/
|
|
412
880
|
getProgress() {
|
|
413
881
|
const progress = {};
|
|
414
|
-
for (const [
|
|
415
|
-
progress[
|
|
416
|
-
downloaded:
|
|
417
|
-
total:
|
|
418
|
-
percent:
|
|
419
|
-
state:
|
|
882
|
+
for (const [key, file] of this.active.entries()) {
|
|
883
|
+
progress[key] = {
|
|
884
|
+
downloaded: file.downloadedBytes,
|
|
885
|
+
total: file.totalBytes,
|
|
886
|
+
percent: file.totalBytes > 0 ? (file.downloadedBytes / file.totalBytes * 100).toFixed(1) : 0,
|
|
887
|
+
state: file.state
|
|
420
888
|
};
|
|
421
889
|
}
|
|
422
890
|
return progress;
|
|
423
891
|
}
|
|
424
892
|
|
|
425
|
-
/**
|
|
426
|
-
* Cancel all downloads
|
|
427
|
-
*/
|
|
428
893
|
clear() {
|
|
429
894
|
this.queue = [];
|
|
430
895
|
this.active.clear();
|
|
431
896
|
this.running = 0;
|
|
897
|
+
this._prepareQueue = [];
|
|
898
|
+
this._preparingCount = 0;
|
|
432
899
|
}
|
|
433
900
|
}
|
|
434
901
|
|
|
@@ -440,40 +907,37 @@ export class DownloadManager {
|
|
|
440
907
|
this.queue = new DownloadQueue(options);
|
|
441
908
|
}
|
|
442
909
|
|
|
443
|
-
/**
|
|
444
|
-
* Enqueue file for download
|
|
445
|
-
* @param {Object} fileInfo - { id, type, path, md5 }
|
|
446
|
-
* @returns {DownloadTask}
|
|
447
|
-
*/
|
|
448
910
|
enqueue(fileInfo) {
|
|
449
911
|
return this.queue.enqueue(fileInfo);
|
|
450
912
|
}
|
|
451
913
|
|
|
452
914
|
/**
|
|
453
|
-
*
|
|
915
|
+
* Enqueue a file for layout-grouped downloading.
|
|
916
|
+
* Layout grouping is now handled externally by LayoutTaskBuilder.
|
|
917
|
+
* @param {Object} fileInfo - File info
|
|
918
|
+
* @returns {FileDownload}
|
|
454
919
|
*/
|
|
455
|
-
|
|
456
|
-
return this.queue.
|
|
920
|
+
enqueueForLayout(fileInfo) {
|
|
921
|
+
return this.queue.enqueue(fileInfo);
|
|
922
|
+
}
|
|
923
|
+
|
|
924
|
+
getTask(key) {
|
|
925
|
+
return this.queue.getTask(key);
|
|
457
926
|
}
|
|
458
927
|
|
|
459
|
-
/**
|
|
460
|
-
* Get progress for all downloads
|
|
461
|
-
*/
|
|
462
928
|
getProgress() {
|
|
463
929
|
return this.queue.getProgress();
|
|
464
930
|
}
|
|
465
931
|
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
*/
|
|
469
|
-
prioritizeLayoutFiles(fileIds) {
|
|
470
|
-
this.queue.prioritizeLayoutFiles(fileIds);
|
|
932
|
+
prioritizeLayoutFiles(fileIds, priority) {
|
|
933
|
+
this.queue.prioritizeLayoutFiles(fileIds, priority);
|
|
471
934
|
this.queue.processQueue();
|
|
472
935
|
}
|
|
473
936
|
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
937
|
+
urgentChunk(fileType, fileId, chunkIndex) {
|
|
938
|
+
return this.queue.urgentChunk(fileType, fileId, chunkIndex);
|
|
939
|
+
}
|
|
940
|
+
|
|
477
941
|
clear() {
|
|
478
942
|
this.queue.clear();
|
|
479
943
|
}
|