@xiboplayer/cache 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/cache.js ADDED
@@ -0,0 +1,739 @@
1
+ /**
2
+ * File cache manager using Cache API and IndexedDB
3
+ */
4
+
5
+ import SparkMD5 from 'spark-md5';
6
+ import { config } from '@xiboplayer/utils';
7
+
8
+ const CACHE_NAME = 'xibo-media-v1';
9
+ const DB_NAME = 'xibo-player';
10
+ const DB_VERSION = 1;
11
+ const STORE_FILES = 'files';
12
+ const CONCURRENT_CHUNKS = 4; // Download 4 chunks simultaneously for 4x speedup
13
+
14
+ // Dynamic base path for multi-variant deployment (pwa, pwa-xmds, pwa-xlr)
15
+ const BASE = (typeof window !== 'undefined')
16
+ ? window.location.pathname.replace(/\/[^/]*$/, '').replace(/\/$/, '') || '/player/pwa'
17
+ : '/player/pwa';
18
+
19
+ export class CacheManager {
20
+ constructor() {
21
+ this.cache = null;
22
+ this.db = null;
23
+ // Dependants: mediaId → Set<layoutId> — tracks which layouts use each media file
24
+ this.dependants = new Map();
25
+ }
26
+
27
+ /**
28
+ * Extract filename from download URL
29
+ * URL format: https://.../xmds.php?file=1.png&...
30
+ */
31
+ extractFilename(url) {
32
+ try {
33
+ const urlObj = new URL(url);
34
+ const fileParam = urlObj.searchParams.get('file');
35
+ return fileParam || 'unknown';
36
+ } catch (e) {
37
+ return 'unknown';
38
+ }
39
+ }
40
+
41
+ /**
42
+ * Rewrite CMS URL to use configured CMS address
43
+ * Handles cases where RequiredFiles returns absolute URLs
44
+ */
45
+ rewriteUrl(url) {
46
+ if (!url) return url;
47
+
48
+ // If URL is absolute and points to a different domain, rewrite it
49
+ try {
50
+ const urlObj = new URL(url);
51
+ const configUrl = new URL(config.cmsAddress);
52
+
53
+ // If domains differ, replace with configured CMS address
54
+ if (urlObj.origin !== configUrl.origin) {
55
+ console.log(`[Cache] Rewriting URL: ${urlObj.origin} → ${configUrl.origin}`);
56
+ urlObj.protocol = configUrl.protocol;
57
+ urlObj.hostname = configUrl.hostname;
58
+ urlObj.port = configUrl.port;
59
+ return urlObj.toString();
60
+ }
61
+ } catch (e) {
62
+ // Not a valid URL, return as-is
63
+ }
64
+
65
+ return url;
66
+ }
67
+
68
+ /**
69
+ * Initialize cache and database
70
+ */
71
+ async init() {
72
+ this.cache = await caches.open(CACHE_NAME);
73
+ this.db = await this.openDB();
74
+ }
75
+
76
+ /**
77
+ * Open IndexedDB
78
+ */
79
+ openDB() {
80
+ return new Promise((resolve, reject) => {
81
+ const request = indexedDB.open(DB_NAME, DB_VERSION);
82
+
83
+ request.onerror = () => reject(request.error);
84
+ request.onsuccess = () => resolve(request.result);
85
+
86
+ request.onupgradeneeded = (event) => {
87
+ const db = event.target.result;
88
+ if (!db.objectStoreNames.contains(STORE_FILES)) {
89
+ const store = db.createObjectStore(STORE_FILES, { keyPath: 'id' });
90
+ store.createIndex('type', 'type', { unique: false });
91
+ }
92
+ };
93
+ });
94
+ }
95
+
96
+ /**
97
+ * Get file record from IndexedDB
98
+ */
99
+ async getFile(id) {
100
+ return new Promise((resolve, reject) => {
101
+ const tx = this.db.transaction(STORE_FILES, 'readonly');
102
+ const store = tx.objectStore(STORE_FILES);
103
+ const request = store.get(id);
104
+ request.onsuccess = () => resolve(request.result);
105
+ request.onerror = () => reject(request.error);
106
+ });
107
+ }
108
+
109
+ /**
110
+ * Save file record to IndexedDB
111
+ */
112
+ async saveFile(fileRecord) {
113
+ return new Promise((resolve, reject) => {
114
+ const tx = this.db.transaction(STORE_FILES, 'readwrite');
115
+ const store = tx.objectStore(STORE_FILES);
116
+ const request = store.put(fileRecord);
117
+ request.onsuccess = () => resolve();
118
+ request.onerror = () => reject(request.error);
119
+ });
120
+ }
121
+
122
+ /**
123
+ * Get all file records
124
+ */
125
+ async getAllFiles() {
126
+ return new Promise((resolve, reject) => {
127
+ const tx = this.db.transaction(STORE_FILES, 'readonly');
128
+ const store = tx.objectStore(STORE_FILES);
129
+ const request = store.getAll();
130
+ request.onsuccess = () => resolve(request.result);
131
+ request.onerror = () => reject(request.error);
132
+ });
133
+ }
134
+
135
+ /**
136
+ * Download and cache a file with MD5 verification
137
+ * Handles large files with streaming to avoid memory issues
138
+ *
139
+ * Note: This method is a fallback for when Service Worker is not active.
140
+ * When Service Worker is running, file downloads are handled by sw.js.
141
+ */
142
+ async downloadFile(fileInfo) {
143
+ const { id, type, path, md5, download } = fileInfo;
144
+
145
+ // Check if Service Worker is handling downloads
146
+ if (typeof navigator !== 'undefined' && navigator.serviceWorker?.controller) {
147
+ console.log(`[Cache] Service Worker active - skipping direct download for ${type}/${id}`);
148
+ console.log(`[Cache] File will be downloaded by Service Worker in background`);
149
+ return {
150
+ id,
151
+ type,
152
+ path,
153
+ md5: md5 || 'pending',
154
+ size: 0,
155
+ cachedAt: Date.now(),
156
+ isServiceWorkerDownload: true
157
+ };
158
+ }
159
+
160
+ // Skip files with no URL (widgets/resources generated on-demand)
161
+ if (!path || path === 'null' || path === 'undefined') {
162
+ console.log(`[Cache] Skipping ${type}/${id} - no download URL (will be generated on-demand)`);
163
+ return null;
164
+ }
165
+
166
+ // Check if already cached
167
+ const existing = await this.getFile(id);
168
+ const cacheKey = this.getCacheKey(type, id);
169
+
170
+ if (existing) {
171
+ // Check if MD5 matches current expected value
172
+ if (existing.md5 === md5) {
173
+ // MD5 matches - verify file isn't corrupted
174
+ const cachedResponse = await this.cache.match(cacheKey);
175
+
176
+ if (cachedResponse && type === 'media') {
177
+ const blob = await cachedResponse.blob();
178
+ const contentType = cachedResponse.headers.get('Content-Type');
179
+
180
+ // Delete bad cache (text/plain errors or tiny files)
181
+ if (contentType === 'text/plain' || blob.size < 100) {
182
+ console.warn(`[Cache] Bad cache detected for ${type}/${id} (${contentType}, ${blob.size} bytes) - re-downloading`);
183
+ await this.cache.delete(cacheKey);
184
+ // Continue to download below
185
+ } else {
186
+ console.log(`[Cache] ${type}/${id} already cached`);
187
+ return existing;
188
+ }
189
+ } else {
190
+ console.log(`[Cache] ${type}/${id} already cached`);
191
+ return existing;
192
+ }
193
+ } else {
194
+ // MD5 mismatch - file has been updated on CMS
195
+ console.warn(`[Cache] ${type}/${id} MD5 changed (cached: ${existing.md5}, expected: ${md5}) - re-downloading`);
196
+ await this.cache.delete(cacheKey);
197
+ // Continue to download below
198
+ }
199
+ }
200
+
201
+ console.log(`[Cache] Downloading ${type}/${id} from ${path}`);
202
+
203
+ // Rewrite URL to use configured CMS (handles proxy case)
204
+ const downloadUrl = this.rewriteUrl(path);
205
+ console.log(`[Cache] Using URL: ${downloadUrl}`);
206
+
207
+ // Check file size with HEAD request first (avoid downloading unnecessarily)
208
+ const headResponse = await fetch(downloadUrl, { method: 'HEAD' });
209
+
210
+ // HTTP 202 means Service Worker is still downloading in background
211
+ // Don't proceed with caching - file isn't ready yet
212
+ // Return pending metadata instead of throwing (allows collection to continue)
213
+ if (headResponse.status === 202) {
214
+ console.warn(`[Cache] ${type}/${id} still downloading in background (HTTP 202) - will retry on next collection`);
215
+ return {
216
+ id,
217
+ type,
218
+ path,
219
+ md5: md5 || 'pending',
220
+ size: 0,
221
+ cachedAt: Date.now(),
222
+ isPending: true // Mark as pending for retry
223
+ };
224
+ }
225
+
226
+ const contentLength = parseInt(headResponse.headers.get('Content-Length') || '0');
227
+ const isLargeFile = contentLength > 100 * 1024 * 1024; // > 100 MB
228
+
229
+ console.log(`[Cache] File size: ${(contentLength / 1024 / 1024).toFixed(1)} MB ${isLargeFile ? '(large file)' : ''}`);
230
+
231
+ // filename already has cacheKey from above (line 143)
232
+ const filename = type === 'media' ? this.extractFilename(path) : id;
233
+
234
+ // Also create MD5-based cache key for content-addressable lookup
235
+ // This allows Service Worker to find files by content hash instead of filename
236
+ const md5CacheKey = md5 ? `/cache/hash/${md5}` : null;
237
+
238
+ let calculatedMd5;
239
+ let fileSize;
240
+
241
+ if (isLargeFile) {
242
+ // Large file: Cache in background for future use, but don't block
243
+ console.log(`[Cache] Large file detected (${(contentLength / 1024 / 1024).toFixed(1)} MB), caching in background`);
244
+
245
+ // Start background download (don't await)
246
+ this.downloadLargeFileInBackground(downloadUrl, cacheKey, contentLength, filename, id, type, path, md5)
247
+ .catch(err => console.warn(`[Cache] Background download failed for ${id}:`, err));
248
+
249
+ // Return immediately - don't block collection cycle
250
+ const metadata = {
251
+ id,
252
+ type,
253
+ path,
254
+ md5: md5 || 'pending',
255
+ size: contentLength,
256
+ cachedAt: Date.now(),
257
+ isBackgroundDownload: true
258
+ };
259
+
260
+ await this.saveFile(metadata);
261
+
262
+ console.log(`[Cache] ${type}/${id} downloading in background (${contentLength} bytes)`);
263
+
264
+ return metadata;
265
+ } else {
266
+ // Small file: Download fully and verify MD5
267
+ this.notifyDownloadProgress(filename, 0, contentLength);
268
+
269
+ // Now do the actual download for small files
270
+ const response = await fetch(downloadUrl);
271
+
272
+ // HTTP 202 means Service Worker is still downloading in background
273
+ // Don't cache the 202 response - it's just a placeholder message
274
+ // Return pending metadata instead of throwing (allows collection to continue)
275
+ if (response.status === 202) {
276
+ console.warn(`[Cache] ${type}/${id} still downloading in background (HTTP 202) - will retry on next collection`);
277
+ return {
278
+ id,
279
+ type,
280
+ path,
281
+ md5: md5 || 'pending',
282
+ size: 0,
283
+ cachedAt: Date.now(),
284
+ isPending: true // Mark as pending for retry
285
+ };
286
+ }
287
+
288
+ if (!response.ok) {
289
+ throw new Error(`Failed to download ${path}: ${response.status}`);
290
+ }
291
+
292
+ const blob = await response.blob();
293
+ const arrayBuffer = await blob.arrayBuffer();
294
+
295
+ // Verify MD5
296
+ calculatedMd5 = SparkMD5.ArrayBuffer.hash(arrayBuffer);
297
+ if (md5 && calculatedMd5 !== md5) {
298
+ // KIOSK MODE: Log MD5 mismatches but always continue
299
+ // Rendering methods (renderImage, renderVideo, renderLayout, etc.) will
300
+ // naturally fail if wrong file type is provided
301
+ // This ensures maximum uptime for kiosk deployments
302
+ console.warn(`[Cache] MD5 mismatch for ${type}/${id}:`);
303
+ console.warn(`[Cache] Expected: ${md5}`);
304
+ console.warn(`[Cache] Got: ${calculatedMd5}`);
305
+ console.warn(`[Cache] Accepting file anyway (kiosk mode - renderer will validate)`);
306
+
307
+ // Use the file regardless - let the renderer handle validation
308
+ calculatedMd5 = md5; // Prevent re-download loop
309
+ }
310
+
311
+ // Cache the response
312
+ await this.cache.put(cacheKey, new Response(blob, {
313
+ headers: {
314
+ 'Content-Type': response.headers.get('Content-Type') || 'application/octet-stream',
315
+ 'Content-Length': blob.size
316
+ }
317
+ }));
318
+
319
+ fileSize = blob.size;
320
+ this.notifyDownloadProgress(filename, fileSize, contentLength, true);
321
+ console.log(`[Cache] Cached ${type}/${id} (${fileSize} bytes, MD5: ${calculatedMd5})`);
322
+ }
323
+
324
+ // Save metadata
325
+ const fileRecord = {
326
+ id,
327
+ type,
328
+ path,
329
+ md5: calculatedMd5,
330
+ size: fileSize,
331
+ cachedAt: Date.now()
332
+ };
333
+ await this.saveFile(fileRecord);
334
+
335
+ return fileRecord;
336
+ }
337
+
338
+ /**
339
+ * Get cache key for a file
340
+ * For media, uses the actual filename; for layouts, uses the ID
341
+ */
342
+ getCacheKey(type, id, filename = null) {
343
+ const key = filename || id;
344
+ return `${BASE}/cache/${type}/${key}`;
345
+ }
346
+
347
+ /**
348
+ * Get cached file as blob
349
+ */
350
+ async getCachedFile(type, id) {
351
+ const cacheKey = this.getCacheKey(type, id);
352
+ const response = await this.cache.match(cacheKey);
353
+
354
+ if (!response) {
355
+ return null;
356
+ }
357
+ return await response.blob();
358
+ }
359
+
360
+ /**
361
+ * Get cached file as Response (preserves headers like Content-Type)
362
+ */
363
+ async getCachedResponse(type, id) {
364
+ const cacheKey = this.getCacheKey(type, id);
365
+ return await this.cache.match(cacheKey);
366
+ }
367
+
368
+ /**
369
+ * Get cached file as text
370
+ */
371
+ async getCachedFileText(type, id) {
372
+ const cacheKey = this.getCacheKey(type, id);
373
+ const response = await this.cache.match(cacheKey);
374
+ if (!response) {
375
+ return null;
376
+ }
377
+ return await response.text();
378
+ }
379
+
380
+ /**
381
+ * Store widget HTML in cache for iframe loading
382
+ * @param {string} layoutId - Layout ID
383
+ * @param {string} regionId - Region ID
384
+ * @param {string} mediaId - Media ID
385
+ * @param {string} html - Widget HTML content
386
+ * @returns {Promise<string>} Cache key URL
387
+ */
388
+ async cacheWidgetHtml(layoutId, regionId, mediaId, html) {
389
+ const cacheKey = `${BASE}/cache/widget/${layoutId}/${regionId}/${mediaId}`;
390
+ const cache = await caches.open(CACHE_NAME);
391
+
392
+ // Inject <base> tag to fix relative paths for widget dependencies
393
+ // Widget HTML has relative paths like "bundle.min.js" that should resolve to /player/cache/media/
394
+ const baseTag = '<base href="/player/cache/media/">';
395
+ let modifiedHtml = html;
396
+
397
+ // Insert base tag after <head> opening tag
398
+ if (html.includes('<head>')) {
399
+ modifiedHtml = html.replace('<head>', '<head>' + baseTag);
400
+ } else if (html.includes('<HEAD>')) {
401
+ modifiedHtml = html.replace('<HEAD>', '<HEAD>' + baseTag);
402
+ } else {
403
+ // No head tag, prepend base tag
404
+ modifiedHtml = baseTag + html;
405
+ }
406
+
407
+ // Rewrite absolute CMS signed URLs to local cache paths
408
+ // Matches: https://cms/xmds.php?file=bundle.min.js&...&X-Amz-Signature=...
409
+ // These absolute URLs bypass the <base> tag entirely, causing slow CMS fetches
410
+ const cmsUrlRegex = /https?:\/\/[^"'\s)]+xmds\.php\?[^"'\s)]*file=([^&"'\s)]+)[^"'\s)]*/g;
411
+ const staticResources = [];
412
+ modifiedHtml = modifiedHtml.replace(cmsUrlRegex, (match, filename) => {
413
+ const localPath = `${BASE}/cache/static/${filename}`;
414
+ staticResources.push({ filename, originalUrl: match });
415
+ console.log(`[Cache] Rewrote widget URL: ${filename} → ${localPath}`);
416
+ return localPath;
417
+ });
418
+
419
+ // Inject CSS default for object-position to suppress CMS template warning
420
+ // CMS global-elements.xml uses {{alignId}} {{valignId}} which produces
421
+ // invalid CSS (empty value) when alignment is not configured
422
+ const cssFixTag = '<style>img,video{object-position:center center}</style>';
423
+ if (modifiedHtml.includes('</head>')) {
424
+ modifiedHtml = modifiedHtml.replace('</head>', cssFixTag + '</head>');
425
+ } else if (modifiedHtml.includes('</HEAD>')) {
426
+ modifiedHtml = modifiedHtml.replace('</HEAD>', cssFixTag + '</HEAD>');
427
+ }
428
+
429
+ // Rewrite Interactive Control hostAddress to SW-interceptable path
430
+ // The IC library uses hostAddress + '/info', '/trigger', etc.
431
+ // Original: hostAddress: "https://cms.example.com" → XHR to /info goes to CMS (fails)
432
+ // Rewritten: hostAddress: "/player/pwa/ic" → XHR to /player/pwa/ic/info (intercepted by SW)
433
+ modifiedHtml = modifiedHtml.replace(
434
+ /hostAddress\s*:\s*["']https?:\/\/[^"']+["']/g,
435
+ `hostAddress: "${BASE}/ic"`
436
+ );
437
+
438
+ console.log(`[Cache] Injected base tag and rewrote CMS URLs in widget HTML`);
439
+
440
+ // Construct full URL for cache storage
441
+ const cacheUrl = new URL(cacheKey, window.location.origin);
442
+
443
+ const response = new Response(modifiedHtml, {
444
+ headers: {
445
+ 'Content-Type': 'text/html; charset=utf-8',
446
+ 'Access-Control-Allow-Origin': '*'
447
+ }
448
+ });
449
+
450
+ await cache.put(cacheUrl, response);
451
+ console.log(`[Cache] Stored widget HTML at ${cacheKey} (${modifiedHtml.length} bytes)`);
452
+
453
+ // Fetch and cache static resources (shared Cache API - accessible from main thread and SW)
454
+ if (staticResources.length > 0) {
455
+ const STATIC_CACHE_NAME = 'xibo-static-v1';
456
+ const staticCache = await caches.open(STATIC_CACHE_NAME);
457
+
458
+ await Promise.all(staticResources.map(async ({ filename, originalUrl }) => {
459
+ const staticKey = `${BASE}/cache/static/${filename}`;
460
+ const existing = await staticCache.match(staticKey);
461
+ if (existing) return; // Already cached
462
+
463
+ try {
464
+ const resp = await fetch(originalUrl);
465
+ if (!resp.ok) {
466
+ console.warn(`[Cache] Failed to fetch static resource: ${filename} (HTTP ${resp.status})`);
467
+ return;
468
+ }
469
+
470
+ const ext = filename.split('.').pop().toLowerCase();
471
+ const contentType = {
472
+ 'js': 'application/javascript',
473
+ 'css': 'text/css',
474
+ 'otf': 'font/otf', 'ttf': 'font/ttf',
475
+ 'woff': 'font/woff', 'woff2': 'font/woff2',
476
+ 'eot': 'application/vnd.ms-fontobject',
477
+ 'svg': 'image/svg+xml'
478
+ }[ext] || 'application/octet-stream';
479
+
480
+ // For CSS files, rewrite font URLs and cache referenced font files
481
+ if (ext === 'css') {
482
+ let cssText = await resp.text();
483
+ const fontResources = [];
484
+ const fontUrlRegex = /url\((['"]?)(https?:\/\/[^'")\s]+\?[^'")\s]*file=([^&'")\s]+\.(?:woff2?|ttf|otf|eot|svg))[^'")\s]*)\1\)/gi;
485
+ cssText = cssText.replace(fontUrlRegex, (_match, quote, fullUrl, fontFilename) => {
486
+ fontResources.push({ filename: fontFilename, originalUrl: fullUrl });
487
+ console.log(`[Cache] Rewrote font URL in CSS: ${fontFilename}`);
488
+ return `url(${quote}${BASE}/cache/static/${encodeURIComponent(fontFilename)}${quote})`;
489
+ });
490
+
491
+ await staticCache.put(staticKey, new Response(cssText, {
492
+ headers: { 'Content-Type': 'text/css' }
493
+ }));
494
+ console.log(`[Cache] Cached CSS with ${fontResources.length} rewritten font URLs: ${filename}`);
495
+
496
+ // Fetch and cache referenced font files
497
+ await Promise.all(fontResources.map(async ({ filename: fontFile, originalUrl: fontUrl }) => {
498
+ const fontKey = `${BASE}/cache/static/${encodeURIComponent(fontFile)}`;
499
+ const existingFont = await staticCache.match(fontKey);
500
+ if (existingFont) return;
501
+
502
+ try {
503
+ const fontResp = await fetch(fontUrl);
504
+ if (!fontResp.ok) {
505
+ console.warn(`[Cache] Failed to fetch font: ${fontFile} (HTTP ${fontResp.status})`);
506
+ return;
507
+ }
508
+ const fontBlob = await fontResp.blob();
509
+ const fontExt = fontFile.split('.').pop().toLowerCase();
510
+ const fontContentType = {
511
+ 'otf': 'font/otf', 'ttf': 'font/ttf',
512
+ 'woff': 'font/woff', 'woff2': 'font/woff2',
513
+ 'eot': 'application/vnd.ms-fontobject',
514
+ 'svg': 'image/svg+xml'
515
+ }[fontExt] || 'application/octet-stream';
516
+
517
+ await staticCache.put(fontKey, new Response(fontBlob, {
518
+ headers: { 'Content-Type': fontContentType }
519
+ }));
520
+ console.log(`[Cache] Cached font: ${fontFile} (${fontContentType}, ${fontBlob.size} bytes)`);
521
+ } catch (fontErr) {
522
+ console.warn(`[Cache] Failed to cache font: ${fontFile}`, fontErr);
523
+ }
524
+ }));
525
+ } else {
526
+ const blob = await resp.blob();
527
+ await staticCache.put(staticKey, new Response(blob, {
528
+ headers: { 'Content-Type': contentType }
529
+ }));
530
+ console.log(`[Cache] Cached static resource: ${filename} (${contentType}, ${blob.size} bytes)`);
531
+ }
532
+ } catch (error) {
533
+ console.warn(`[Cache] Failed to cache static resource: ${filename}`, error);
534
+ }
535
+ }));
536
+ }
537
+
538
+ return cacheKey;
539
+ }
540
+
541
+ /**
542
+ * Track that a media file is used by a layout (dependant)
543
+ * @param {string|number} mediaId
544
+ * @param {string|number} layoutId
545
+ */
546
+ addDependant(mediaId, layoutId) {
547
+ const key = String(mediaId);
548
+ if (!this.dependants.has(key)) {
549
+ this.dependants.set(key, new Set());
550
+ }
551
+ this.dependants.get(key).add(String(layoutId));
552
+ }
553
+
554
+ /**
555
+ * Remove a layout from all dependant sets (layout removed from schedule)
556
+ * @param {string|number} layoutId
557
+ * @returns {string[]} Media IDs that are now orphaned (no layouts reference them)
558
+ */
559
+ removeLayoutDependants(layoutId) {
560
+ const lid = String(layoutId);
561
+ const orphaned = [];
562
+
563
+ for (const [mediaId, layouts] of this.dependants) {
564
+ layouts.delete(lid);
565
+ if (layouts.size === 0) {
566
+ this.dependants.delete(mediaId);
567
+ orphaned.push(mediaId);
568
+ }
569
+ }
570
+
571
+ if (orphaned.length > 0) {
572
+ console.log(`[Cache] ${orphaned.length} media files orphaned after layout ${layoutId} removed:`, orphaned);
573
+ }
574
+ return orphaned;
575
+ }
576
+
577
+ /**
578
+ * Check if a media file is still referenced by any layout
579
+ * @param {string|number} mediaId
580
+ * @returns {boolean}
581
+ */
582
+ isMediaReferenced(mediaId) {
583
+ const layouts = this.dependants.get(String(mediaId));
584
+ return layouts ? layouts.size > 0 : false;
585
+ }
586
+
587
+ /**
588
+ * Download large file in background (non-blocking)
589
+ * Continues after collection cycle completes
590
+ * Uses parallel chunk downloads for 4x speedup
591
+ */
592
+ async downloadLargeFileInBackground(downloadUrl, cacheKey, contentLength, filename, id, type, path, md5) {
593
+ const CHUNK_SIZE = 50 * 1024 * 1024; // 50 MB chunks
594
+ let downloadedBytes = 0;
595
+
596
+ console.log(`[Cache] Background download started: ${filename}`);
597
+ this.notifyDownloadProgress(filename, 0, contentLength);
598
+
599
+ try {
600
+ // Calculate all chunk ranges
601
+ const chunkRanges = [];
602
+ for (let start = 0; start < contentLength; start += CHUNK_SIZE) {
603
+ const end = Math.min(start + CHUNK_SIZE - 1, contentLength - 1);
604
+ chunkRanges.push({ start, end, index: chunkRanges.length });
605
+ }
606
+
607
+ console.log(`[Cache] Downloading ${chunkRanges.length} chunks in parallel (${CONCURRENT_CHUNKS} concurrent)`);
608
+
609
+ // Parallel download with concurrency limit
610
+ const chunkMap = new Map(); // position -> blob
611
+ let nextChunkIndex = 0;
612
+
613
+ const downloadChunk = async (range) => {
614
+ const rangeHeader = `bytes=${range.start}-${range.end}`;
615
+
616
+ try {
617
+ const chunkResponse = await fetch(downloadUrl, {
618
+ headers: { 'Range': rangeHeader }
619
+ });
620
+
621
+ if (!chunkResponse.ok && chunkResponse.status !== 206) {
622
+ throw new Error(`Chunk ${range.index} failed: ${chunkResponse.status}`);
623
+ }
624
+
625
+ const chunkBlob = await chunkResponse.blob();
626
+ chunkMap.set(range.index, chunkBlob);
627
+
628
+ downloadedBytes += chunkBlob.size;
629
+ const progress = ((downloadedBytes / contentLength) * 100).toFixed(1);
630
+ console.log(`[Cache] Chunk ${range.index}/${chunkRanges.length - 1} complete (${progress}%)`);
631
+ this.notifyDownloadProgress(filename, downloadedBytes, contentLength);
632
+
633
+ return chunkBlob;
634
+ } catch (error) {
635
+ console.error(`[Cache] Chunk ${range.index} failed:`, error);
636
+ throw error;
637
+ }
638
+ };
639
+
640
+ // Download with concurrency control
641
+ const downloadNext = async () => {
642
+ while (nextChunkIndex < chunkRanges.length) {
643
+ const range = chunkRanges[nextChunkIndex++];
644
+ await downloadChunk(range);
645
+ }
646
+ };
647
+
648
+ // Start CONCURRENT_CHUNKS parallel downloaders
649
+ const downloaders = [];
650
+ for (let i = 0; i < CONCURRENT_CHUNKS; i++) {
651
+ downloaders.push(downloadNext());
652
+ }
653
+
654
+ await Promise.all(downloaders);
655
+
656
+ // Reassemble chunks in order
657
+ const orderedChunks = [];
658
+ for (let i = 0; i < chunkRanges.length; i++) {
659
+ orderedChunks.push(chunkMap.get(i));
660
+ }
661
+
662
+ // Combine all chunks
663
+ const blob = new Blob(orderedChunks);
664
+
665
+ // Get content type from first chunk response
666
+ const contentType = orderedChunks[0]?.type || 'video/mp4';
667
+
668
+ // Cache the complete file
669
+ await this.cache.put(cacheKey, new Response(blob, {
670
+ headers: {
671
+ 'Content-Type': contentType,
672
+ 'Content-Length': blob.size,
673
+ 'Accept-Ranges': 'bytes'
674
+ }
675
+ }));
676
+
677
+ // Update metadata
678
+ const metadata = {
679
+ id,
680
+ type,
681
+ path,
682
+ md5: md5 || 'background',
683
+ size: blob.size,
684
+ cachedAt: Date.now(),
685
+ isBackgroundDownload: false,
686
+ cached: true
687
+ };
688
+
689
+ await this.saveFile(metadata);
690
+
691
+ this.notifyDownloadProgress(filename, downloadedBytes, contentLength, true);
692
+
693
+ console.log(`[Cache] Background download complete: ${filename} (${blob.size} bytes in ${orderedChunks.length} chunks)`);
694
+
695
+ // Notify that file is now available for playback
696
+ window.dispatchEvent(new CustomEvent('media-cached', {
697
+ detail: { filename, id, type, size: blob.size }
698
+ }));
699
+ } catch (error) {
700
+ console.error(`[Cache] Background download failed for ${filename}:`, error);
701
+ this.notifyDownloadProgress(filename, downloadedBytes, contentLength, false, true);
702
+ }
703
+ }
704
+
705
+ /**
706
+ * Notify UI about download progress
707
+ */
708
+ notifyDownloadProgress(filename, loaded, total, complete = false, error = false) {
709
+ const event = new CustomEvent('download-progress', {
710
+ detail: {
711
+ filename,
712
+ loaded,
713
+ total,
714
+ percent: total > 0 ? (loaded / total) * 100 : 0,
715
+ complete,
716
+ error
717
+ }
718
+ });
719
+ window.dispatchEvent(event);
720
+ }
721
+
722
+ /**
723
+ * Clear all cached files
724
+ */
725
+ async clearAll() {
726
+ await caches.delete(CACHE_NAME);
727
+ this.cache = await caches.open(CACHE_NAME);
728
+
729
+ return new Promise((resolve, reject) => {
730
+ const tx = this.db.transaction(STORE_FILES, 'readwrite');
731
+ const store = tx.objectStore(STORE_FILES);
732
+ const request = store.clear();
733
+ request.onsuccess = () => resolve();
734
+ request.onerror = () => reject(request.error);
735
+ });
736
+ }
737
+ }
738
+
739
+ export const cacheManager = new CacheManager();