@budarin/psw-plugin-opfs-serve-range 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,8 @@
1
+ import type { Plugin } from '@budarin/pluggable-serviceworker';
2
+ export interface OpfsRangeFromNetworkAndCacheOptions {
3
+ order?: number;
4
+ include?: string[];
5
+ exclude?: string[];
6
+ enableLogging?: boolean;
7
+ }
8
+ export declare function opfsRangeFromNetworkAndCache(options?: OpfsRangeFromNetworkAndCacheOptions): Plugin | undefined;
@@ -0,0 +1,191 @@
1
+ import { notifyClients } from '@budarin/pluggable-serviceworker/utils';
2
+ import { HEADER_RANGE } from '@budarin/http-constants/headers';
3
+ import { getOpfsDir, urlToOpfsKey } from './index.js';
4
+ import { parseRangeHeader, build206Response, build206ResponseFromStream, createRangeExtractTransform, } from './opfsRangeUtil.js';
5
+ import { writeToOpfs, metadataFromResponse } from './opfsWrite.js';
6
+ import { isOpfsAvailable, shouldProcessFile } from './opfsUtil.js';
7
+ import { isBlacklisted } from './opfsLru.js';
8
+ import { OPFS_MSG_SKIP_QUOTA_EXCEEDED } from './opfsMessages.js';
9
+ const loadingUrls = new Set();
10
+ async function backgroundFullFetchToOpfs(url, logger, enableLogging) {
11
+ try {
12
+ if (isBlacklisted(url)) {
13
+ if (enableLogging) {
14
+ logger.debug(`opfsRangeFromNetworkAndCache: skip ${url} (blacklisted, quota exceeded)`);
15
+ }
16
+ return;
17
+ }
18
+ const fullRequest = new Request(url, { method: 'GET' });
19
+ const response = await fetch(fullRequest);
20
+ if (!response.ok || !response.body) {
21
+ if (enableLogging) {
22
+ logger.debug(`opfsRangeFromNetworkAndCache: background full GET ${url} -> ${response.status}, skip cache`);
23
+ }
24
+ return;
25
+ }
26
+ if (response.status !== 200) {
27
+ if (enableLogging) {
28
+ logger.debug(`opfsRangeFromNetworkAndCache: background full GET ${url} -> ${response.status}, skip cache`);
29
+ }
30
+ return;
31
+ }
32
+ const metadata = metadataFromResponse(response, url);
33
+ const key = await urlToOpfsKey(url);
34
+ const root = await navigator.storage.getDirectory();
35
+ const dir = await getOpfsDir(root, true);
36
+ await writeToOpfs(dir, key, response.body, metadata, {
37
+ url,
38
+ ...(metadata.size > 0 && { knownSize: metadata.size }),
39
+ });
40
+ if (enableLogging) {
41
+ logger.debug(`opfsRangeFromNetworkAndCache: background cached ${url} -> ${key} (${metadata.size} bytes)`);
42
+ }
43
+ }
44
+ catch (err) {
45
+ if (enableLogging) {
46
+ logger.error(`opfsRangeFromNetworkAndCache: background full GET failed ${url}`, err);
47
+ }
48
+ }
49
+ finally {
50
+ loadingUrls.delete(url);
51
+ }
52
+ }
53
+ export function opfsRangeFromNetworkAndCache(options = {}) {
54
+ if (!isOpfsAvailable()) {
55
+ return undefined;
56
+ }
57
+ const { order = -10, include, exclude, enableLogging = false, } = options;
58
+ return {
59
+ name: 'opfs-range-from-network-and-cache',
60
+ order,
61
+ async fetch(event, logger) {
62
+ const request = event.request;
63
+ if (request.method !== 'GET') {
64
+ return;
65
+ }
66
+ if (!shouldProcessFile(request.url, include, exclude)) {
67
+ return;
68
+ }
69
+ const url = request.url;
70
+ const rangeHeader = request.headers.get(HEADER_RANGE);
71
+ if (!rangeHeader) {
72
+ try {
73
+ const response = await fetch(request);
74
+ if (!response.ok || !response.body) {
75
+ return response;
76
+ }
77
+ if (response.status !== 200) {
78
+ return response;
79
+ }
80
+ if (isBlacklisted(url)) {
81
+ notifyClients(OPFS_MSG_SKIP_QUOTA_EXCEEDED, { url });
82
+ return new Response(response.body, {
83
+ status: response.status,
84
+ statusText: response.statusText,
85
+ headers: response.headers,
86
+ });
87
+ }
88
+ const metadata = metadataFromResponse(response, url);
89
+ const key = await urlToOpfsKey(url);
90
+ const root = await navigator.storage.getDirectory();
91
+ const dir = await getOpfsDir(root, true);
92
+ const [branch1, branch2] = response.body.tee();
93
+ writeToOpfs(dir, key, branch2, metadata, {
94
+ url,
95
+ ...(metadata.size > 0 && { knownSize: metadata.size }),
96
+ }).catch((err) => {
97
+ if (enableLogging) {
98
+ logger.error(`opfsRangeFromNetworkAndCache: write failed ${url}`, err);
99
+ }
100
+ });
101
+ if (enableLogging) {
102
+ logger.debug(`opfsRangeFromNetworkAndCache: caching full GET ${url} (${metadata.size} bytes)`);
103
+ }
104
+ return new Response(branch1, {
105
+ status: response.status,
106
+ statusText: response.statusText,
107
+ headers: response.headers,
108
+ });
109
+ }
110
+ catch {
111
+ return;
112
+ }
113
+ }
114
+ try {
115
+ const key = await urlToOpfsKey(url);
116
+ const root = await navigator.storage.getDirectory();
117
+ try {
118
+ const dir = await getOpfsDir(root, false);
119
+ await dir.getFileHandle(key);
120
+ logger.warn(`opfsRangeFromNetworkAndCache: file exists in OPFS for ${url} but request was not served from cache; fetching from network (possible: If-Range mismatch, invalid range, or opfsServeRange order)`);
121
+ }
122
+ catch {
123
+ }
124
+ const response = await fetch(request);
125
+ if (!response.body) {
126
+ return response;
127
+ }
128
+ if (response.status === 206) {
129
+ if (!loadingUrls.has(url)) {
130
+ loadingUrls.add(url);
131
+ backgroundFullFetchToOpfs(url, logger, enableLogging);
132
+ }
133
+ return new Response(response.body, {
134
+ status: response.status,
135
+ statusText: response.statusText,
136
+ headers: response.headers,
137
+ });
138
+ }
139
+ if (response.status === 416) {
140
+ return response;
141
+ }
142
+ if (response.status === 200) {
143
+ const contentLength = response.headers.get('Content-Length');
144
+ const fullSize = contentLength
145
+ ? parseInt(contentLength, 10)
146
+ : 0;
147
+ const type = response.headers.get('Content-Type') ??
148
+ 'application/octet-stream';
149
+ const etag = response.headers.get('ETag') ?? undefined;
150
+ const lastModified = response.headers.get('Last-Modified') ?? undefined;
151
+ if (fullSize > 0 &&
152
+ Number.isInteger(fullSize)) {
153
+ const range = parseRangeHeader(rangeHeader, fullSize);
154
+ const metadata = metadataFromResponse(response, url);
155
+ const key = await urlToOpfsKey(url);
156
+ const root = await navigator.storage.getDirectory();
157
+ const dir = await getOpfsDir(root, true);
158
+ const [branch1, branch2] = response.body.tee();
159
+ writeToOpfs(dir, key, branch2, metadata, {
160
+ url,
161
+ knownSize: fullSize,
162
+ }).catch((err) => {
163
+ if (enableLogging) {
164
+ logger.error(`opfsRangeFromNetworkAndCache: write failed ${url}`, err);
165
+ }
166
+ });
167
+ const rangeStream = branch1.pipeThrough(createRangeExtractTransform(range));
168
+ return build206ResponseFromStream(rangeStream, range, fullSize, {
169
+ type,
170
+ ...(etag && { etag }),
171
+ ...(lastModified && { lastModified }),
172
+ });
173
+ }
174
+ const blob = await response.blob();
175
+ const size = blob.size;
176
+ const range = parseRangeHeader(rangeHeader, size);
177
+ const rangeBlob = blob.slice(range.start, range.end + 1);
178
+ return build206Response(rangeBlob, range, size, {
179
+ type,
180
+ ...(etag && { etag }),
181
+ ...(lastModified && { lastModified }),
182
+ });
183
+ }
184
+ return response;
185
+ }
186
+ catch {
187
+ return;
188
+ }
189
+ },
190
+ };
191
+ }
@@ -0,0 +1,14 @@
1
+ export interface RangeSpec {
2
+ start: number;
3
+ end: number;
4
+ }
5
+ export declare function parseRangeHeader(rangeHeader: string, fullSize: number): RangeSpec;
6
+ export interface Build206Options {
7
+ type?: string;
8
+ etag?: string;
9
+ lastModified?: string;
10
+ cacheControl?: string;
11
+ }
12
+ export declare function createRangeExtractTransform(range: RangeSpec): TransformStream<Uint8Array, Uint8Array>;
13
+ export declare function build206Response(rangeBlob: Blob, range: RangeSpec, fullSize: number, options?: Build206Options): Response;
14
+ export declare function build206ResponseFromStream(rangeStream: ReadableStream<Uint8Array>, range: RangeSpec, fullSize: number, options?: Build206Options): Response;
@@ -0,0 +1,99 @@
1
+ import { HEADER_CONTENT_TYPE, HEADER_CONTENT_RANGE, HEADER_CONTENT_LENGTH, HEADER_ETAG, HEADER_LAST_MODIFIED, } from '@budarin/http-constants/headers';
2
+ import { HTTP_STATUS_PARTIAL_CONTENT } from '@budarin/http-constants/statuses';
3
+ import { MIME_APPLICATION_OCTET_STREAM } from '@budarin/http-constants/mime-types';
4
+ const HEADER_ACCEPT_RANGES = 'Accept-Ranges';
5
+ export function parseRangeHeader(rangeHeader, fullSize) {
6
+ const trimmed = rangeHeader.trim();
7
+ const suffixMatch = /^bytes=-(\d+)$/.exec(trimmed);
8
+ if (suffixMatch) {
9
+ const suffixLength = parseInt(suffixMatch[1], 10);
10
+ if (isNaN(suffixLength) || suffixLength <= 0) {
11
+ throw new Error('Invalid suffix range value');
12
+ }
13
+ const start = Math.max(0, fullSize - suffixLength);
14
+ const end = fullSize - 1;
15
+ return { start, end };
16
+ }
17
+ const rangeMatch = /^bytes=(\d+)-(\d*)$/.exec(trimmed);
18
+ if (!rangeMatch) {
19
+ throw new Error('Invalid or unsupported range header format');
20
+ }
21
+ const start = parseInt(rangeMatch[1], 10);
22
+ const end = rangeMatch[2] ? parseInt(rangeMatch[2], 10) : fullSize - 1;
23
+ if (isNaN(start) || isNaN(end)) {
24
+ throw new Error('Invalid range values');
25
+ }
26
+ if (start < 0 || start >= fullSize) {
27
+ throw new Error('Range start is out of bounds');
28
+ }
29
+ if (end < start || end >= fullSize) {
30
+ throw new Error('Range end is out of bounds');
31
+ }
32
+ return { start, end };
33
+ }
34
+ export function createRangeExtractTransform(range) {
35
+ let offset = 0;
36
+ const { start, end } = range;
37
+ return new TransformStream({
38
+ transform(chunk, controller) {
39
+ const chunkStart = offset;
40
+ const chunkEnd = offset + chunk.byteLength;
41
+ offset = chunkEnd;
42
+ if (chunkEnd <= start || chunkStart > end) {
43
+ return;
44
+ }
45
+ const sliceStart = Math.max(0, start - chunkStart);
46
+ const sliceEnd = Math.min(chunk.byteLength, end - chunkStart + 1);
47
+ if (sliceStart < sliceEnd) {
48
+ controller.enqueue(chunk.slice(sliceStart, sliceEnd));
49
+ }
50
+ },
51
+ });
52
+ }
53
+ export function build206Response(rangeBlob, range, fullSize, options = {}) {
54
+ const { type = MIME_APPLICATION_OCTET_STREAM, etag, lastModified, cacheControl, } = options;
55
+ const contentRange = `bytes ${String(range.start)}-${String(range.end)}/${String(fullSize)}`;
56
+ const headers = new Headers({
57
+ [HEADER_CONTENT_RANGE]: contentRange,
58
+ [HEADER_CONTENT_LENGTH]: String(rangeBlob.size),
59
+ [HEADER_CONTENT_TYPE]: type,
60
+ [HEADER_ACCEPT_RANGES]: 'bytes',
61
+ });
62
+ if (cacheControl) {
63
+ headers.set('Cache-Control', cacheControl);
64
+ }
65
+ if (etag) {
66
+ headers.set(HEADER_ETAG, etag);
67
+ }
68
+ if (lastModified) {
69
+ headers.set(HEADER_LAST_MODIFIED, lastModified);
70
+ }
71
+ return new Response(rangeBlob, {
72
+ status: HTTP_STATUS_PARTIAL_CONTENT,
73
+ headers,
74
+ });
75
+ }
76
+ export function build206ResponseFromStream(rangeStream, range, fullSize, options = {}) {
77
+ const { type = MIME_APPLICATION_OCTET_STREAM, etag, lastModified, cacheControl, } = options;
78
+ const rangeLength = range.end - range.start + 1;
79
+ const contentRange = `bytes ${String(range.start)}-${String(range.end)}/${String(fullSize)}`;
80
+ const headers = new Headers({
81
+ [HEADER_CONTENT_RANGE]: contentRange,
82
+ [HEADER_CONTENT_LENGTH]: String(rangeLength),
83
+ [HEADER_CONTENT_TYPE]: type,
84
+ [HEADER_ACCEPT_RANGES]: 'bytes',
85
+ });
86
+ if (cacheControl) {
87
+ headers.set('Cache-Control', cacheControl);
88
+ }
89
+ if (etag) {
90
+ headers.set(HEADER_ETAG, etag);
91
+ }
92
+ if (lastModified) {
93
+ headers.set(HEADER_LAST_MODIFIED, lastModified);
94
+ }
95
+ return new Response(rangeStream, {
96
+ status: HTTP_STATUS_PARTIAL_CONTENT,
97
+ headers,
98
+ });
99
+ }
@@ -0,0 +1,11 @@
1
+ export interface OpfsConfigOptions {
2
+ folderName?: string;
3
+ maxCacheFraction?: number;
4
+ }
5
+ export declare function isOpfsAvailable(): boolean;
6
+ export declare function configureOpfs(options?: OpfsConfigOptions): void;
7
+ export declare function getMaxCacheFraction(): number;
8
+ export declare function matchesGlob(url: string, pattern: string): boolean;
9
+ export declare function shouldProcessFile(url: string, include?: string[], exclude?: string[]): boolean;
10
+ export declare function getOpfsDir(root: FileSystemDirectoryHandle, create: boolean): Promise<FileSystemDirectoryHandle>;
11
+ export declare function clearOpfsCache(): Promise<void>;
@@ -0,0 +1,61 @@
1
+ import { OPFS_FOLDER_NAME } from './opfsFormat.js';
2
+ const DEFAULT_MAX_CACHE_FRACTION = 0.5;
3
+ let opfsConfig = {};
4
+ export function isOpfsAvailable() {
5
+ return (typeof navigator !== 'undefined' &&
6
+ navigator?.storage != null &&
7
+ typeof navigator.storage.getDirectory === 'function');
8
+ }
9
+ export function configureOpfs(options = {}) {
10
+ opfsConfig = { ...options };
11
+ }
12
+ function getResolvedFolderName() {
13
+ return opfsConfig.folderName ?? OPFS_FOLDER_NAME;
14
+ }
15
+ export function getMaxCacheFraction() {
16
+ const v = opfsConfig.maxCacheFraction;
17
+ if (v === undefined || v < 0 || v > 1) {
18
+ return DEFAULT_MAX_CACHE_FRACTION;
19
+ }
20
+ return v;
21
+ }
22
+ export function matchesGlob(url, pattern) {
23
+ const pathname = new URL(url, 'https://example.com').pathname;
24
+ const regexPattern = pattern
25
+ .replace(/[.+^${}()|[\]\\]/g, '\\$&')
26
+ .replace(/\*/g, '.*')
27
+ .replace(/\?/g, '.');
28
+ const regex = new RegExp(`^${regexPattern}$`);
29
+ return regex.test(pathname);
30
+ }
31
+ export function shouldProcessFile(url, include, exclude) {
32
+ if (exclude?.length) {
33
+ for (const pattern of exclude) {
34
+ if (matchesGlob(url, pattern)) {
35
+ return false;
36
+ }
37
+ }
38
+ }
39
+ if (include?.length) {
40
+ for (const pattern of include) {
41
+ if (matchesGlob(url, pattern)) {
42
+ return true;
43
+ }
44
+ }
45
+ return false;
46
+ }
47
+ return true;
48
+ }
49
+ export async function getOpfsDir(root, create) {
50
+ const name = getResolvedFolderName();
51
+ return root.getDirectoryHandle(name, { create });
52
+ }
53
+ export async function clearOpfsCache() {
54
+ const root = await navigator.storage.getDirectory();
55
+ const name = getResolvedFolderName();
56
+ try {
57
+ await root.removeEntry(name, { recursive: true });
58
+ }
59
+ catch {
60
+ }
61
+ }
@@ -0,0 +1,7 @@
1
+ import { type OpfsMetadata } from './opfsFormat.js';
2
+ export interface WriteToOpfsOptions {
3
+ url?: string;
4
+ knownSize?: number;
5
+ }
6
+ export declare function writeToOpfs(dir: FileSystemDirectoryHandle, key: string, bodyStream: ReadableStream<Uint8Array>, metadata: OpfsMetadata, options?: WriteToOpfsOptions): Promise<void>;
7
+ export declare function metadataFromResponse(response: Response, url: string): OpfsMetadata;
@@ -0,0 +1,94 @@
1
+ import { notifyClients } from '@budarin/pluggable-serviceworker/utils';
2
+ import { OPFS_META_FOOTER_LENGTH, MEGABYTE, } from './opfsFormat.js';
3
+ import { ensureSpaceForWrite, listCacheFilesWithMeta, getTotalCacheSize, computeEvictionSet, evictFiles, getCacheLimit, getStorageEstimate, addToBlacklist, } from './opfsLru.js';
4
+ import { OPFS_MSG_WRITE_SKIPPED_SIZE, OPFS_MSG_QUOTA_EXCEEDED, OPFS_MSG_EVICTION_COMPLETED, OPFS_MSG_WRITE_FAILED, } from './opfsMessages.js';
5
+ export async function writeToOpfs(dir, key, bodyStream, metadata, options = {}) {
6
+ const { url, knownSize } = options;
7
+ if (knownSize !== undefined && knownSize > 0) {
8
+ const result = await ensureSpaceForWrite(dir, knownSize, {
9
+ onEvicted(keys) {
10
+ if (keys.length > 0) {
11
+ notifyClients(OPFS_MSG_EVICTION_COMPLETED, { count: keys.length });
12
+ }
13
+ },
14
+ });
15
+ if (!result.ok) {
16
+ notifyClients(OPFS_MSG_WRITE_SKIPPED_SIZE, {
17
+ url,
18
+ size: knownSize,
19
+ reason: result.reason,
20
+ });
21
+ throw new Error(result.reason);
22
+ }
23
+ }
24
+ const handle = await dir.getFileHandle(key, { create: true });
25
+ const writable = await handle.createWritable();
26
+ let bodySize = 0;
27
+ const wrapper = new WritableStream({
28
+ write(chunk) {
29
+ bodySize += chunk.byteLength;
30
+ return writable.write(chunk);
31
+ },
32
+ async close() {
33
+ const meta = {
34
+ ...metadata,
35
+ size: bodySize,
36
+ lastAccessed: Date.now(),
37
+ };
38
+ const metaJson = JSON.stringify(meta);
39
+ const metaBytes = new TextEncoder().encode(metaJson);
40
+ const lengthAb = new ArrayBuffer(OPFS_META_FOOTER_LENGTH);
41
+ new DataView(lengthAb).setUint32(0, metaBytes.length, true);
42
+ await writable.seek(bodySize);
43
+ await writable.write(metaBytes);
44
+ await writable.write(lengthAb);
45
+ await writable.close();
46
+ },
47
+ });
48
+ try {
49
+ await bodyStream.pipeTo(wrapper);
50
+ }
51
+ catch (err) {
52
+ const isQuotaExceeded = err instanceof Error &&
53
+ (err.name === 'QuotaExceededError' || err.name === 'QuotaExceeded');
54
+ try {
55
+ await dir.removeEntry(key);
56
+ }
57
+ catch {
58
+ }
59
+ if (isQuotaExceeded && url !== undefined) {
60
+ const entries = await listCacheFilesWithMeta(dir);
61
+ const totalCacheSize = getTotalCacheSize(entries);
62
+ const bytesWritten = bodySize;
63
+ if (bytesWritten >= totalCacheSize) {
64
+ addToBlacklist(url);
65
+ notifyClients(OPFS_MSG_QUOTA_EXCEEDED, { url });
66
+ }
67
+ else {
68
+ const estimate = await getStorageEstimate();
69
+ const limit = getCacheLimit(estimate);
70
+ const headroom = Math.min(MEGABYTE, Math.max(0, Math.floor(limit * 0.1)));
71
+ const needToFree = bytesWritten + headroom;
72
+ const keysToDelete = computeEvictionSet(entries, needToFree);
73
+ await evictFiles(dir, keysToDelete);
74
+ }
75
+ }
76
+ notifyClients(OPFS_MSG_WRITE_FAILED, { url, reason: err instanceof Error ? err.message : String(err) });
77
+ throw err;
78
+ }
79
+ }
80
+ export function metadataFromResponse(response, url) {
81
+ const contentLength = response.headers.get('Content-Length');
82
+ const parsed = contentLength ? parseInt(contentLength, 10) : 0;
83
+ const size = parsed > 0 && Number.isInteger(parsed) ? parsed : 0;
84
+ const type = response.headers.get('Content-Type') ?? 'application/octet-stream';
85
+ const etag = response.headers.get('ETag') ?? undefined;
86
+ const lastModified = response.headers.get('Last-Modified') ?? undefined;
87
+ return {
88
+ url,
89
+ size,
90
+ type,
91
+ ...(etag && { etag }),
92
+ ...(lastModified && { lastModified }),
93
+ };
94
+ }