@componentor/fs 1.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +742 -0
- package/dist/index.d.ts +544 -0
- package/dist/index.js +2551 -0
- package/dist/index.js.map +1 -0
- package/dist/opfs-hybrid.d.ts +198 -0
- package/dist/opfs-hybrid.js +2552 -0
- package/dist/opfs-hybrid.js.map +1 -0
- package/dist/opfs-worker-proxy.d.ts +224 -0
- package/dist/opfs-worker-proxy.js +274 -0
- package/dist/opfs-worker-proxy.js.map +1 -0
- package/dist/opfs-worker.js +2732 -0
- package/dist/opfs-worker.js.map +1 -0
- package/package.json +66 -0
- package/src/constants.ts +52 -0
- package/src/errors.ts +88 -0
- package/src/file-handle.ts +100 -0
- package/src/global.d.ts +57 -0
- package/src/handle-manager.ts +250 -0
- package/src/index.ts +1404 -0
- package/src/opfs-hybrid.ts +265 -0
- package/src/opfs-worker-proxy.ts +374 -0
- package/src/opfs-worker.ts +253 -0
- package/src/packed-storage.ts +426 -0
- package/src/path-utils.ts +97 -0
- package/src/streams.ts +109 -0
- package/src/symlink-manager.ts +329 -0
- package/src/types.ts +285 -0
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OPFS Worker Script
|
|
3
|
+
* Runs OPFS operations in a dedicated Web Worker for non-blocking main thread
|
|
4
|
+
*
|
|
5
|
+
* Usage: Create a worker with this script and communicate via postMessage
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import OPFS from './index.js'
|
|
9
|
+
import type { BatchWriteEntry, SymlinkDefinition } from './types.js'
|
|
10
|
+
|
|
11
|
+
// Message types
|
|
12
|
+
interface WorkerRequest {
|
|
13
|
+
id: number
|
|
14
|
+
method: string
|
|
15
|
+
args: unknown[]
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
interface WorkerResponse {
|
|
19
|
+
id: number
|
|
20
|
+
result?: unknown
|
|
21
|
+
error?: { message: string; code?: string }
|
|
22
|
+
// For transferable arrays
|
|
23
|
+
transfer?: ArrayBuffer[]
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Initialize OPFS with sync mode (available in workers)
|
|
27
|
+
let fs: OPFS | null = null
|
|
28
|
+
|
|
29
|
+
function getFS(): OPFS {
|
|
30
|
+
if (!fs) {
|
|
31
|
+
fs = new OPFS({ useSync: true, verbose: false })
|
|
32
|
+
}
|
|
33
|
+
return fs
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Handle incoming messages
|
|
37
|
+
self.onmessage = async (event: MessageEvent<WorkerRequest>) => {
|
|
38
|
+
const { id, method, args } = event.data
|
|
39
|
+
|
|
40
|
+
try {
|
|
41
|
+
const opfs = getFS()
|
|
42
|
+
let result: unknown
|
|
43
|
+
const transfer: ArrayBuffer[] = []
|
|
44
|
+
|
|
45
|
+
// Route to appropriate method
|
|
46
|
+
switch (method) {
|
|
47
|
+
// File operations
|
|
48
|
+
case 'readFile': {
|
|
49
|
+
const data = await opfs.readFile(args[0] as string, args[1] as { encoding?: string })
|
|
50
|
+
if (data instanceof Uint8Array) {
|
|
51
|
+
// Transfer the buffer for zero-copy
|
|
52
|
+
result = data
|
|
53
|
+
transfer.push(data.buffer)
|
|
54
|
+
} else {
|
|
55
|
+
result = data
|
|
56
|
+
}
|
|
57
|
+
break
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
case 'writeFile':
|
|
61
|
+
await opfs.writeFile(args[0] as string, args[1] as string | Uint8Array, args[2] as object)
|
|
62
|
+
result = undefined
|
|
63
|
+
break
|
|
64
|
+
|
|
65
|
+
case 'readFileBatch': {
|
|
66
|
+
const results = await opfs.readFileBatch(args[0] as string[])
|
|
67
|
+
// Transfer all buffers
|
|
68
|
+
for (const r of results) {
|
|
69
|
+
if (r.data) {
|
|
70
|
+
transfer.push(r.data.buffer)
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
result = results
|
|
74
|
+
break
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
case 'writeFileBatch':
|
|
78
|
+
await opfs.writeFileBatch(args[0] as BatchWriteEntry[])
|
|
79
|
+
result = undefined
|
|
80
|
+
break
|
|
81
|
+
|
|
82
|
+
case 'appendFile':
|
|
83
|
+
await opfs.appendFile(args[0] as string, args[1] as string | Uint8Array, args[2] as object)
|
|
84
|
+
result = undefined
|
|
85
|
+
break
|
|
86
|
+
|
|
87
|
+
case 'copyFile':
|
|
88
|
+
await opfs.copyFile(args[0] as string, args[1] as string, args[2] as number)
|
|
89
|
+
result = undefined
|
|
90
|
+
break
|
|
91
|
+
|
|
92
|
+
case 'unlink':
|
|
93
|
+
await opfs.unlink(args[0] as string)
|
|
94
|
+
result = undefined
|
|
95
|
+
break
|
|
96
|
+
|
|
97
|
+
case 'truncate':
|
|
98
|
+
await opfs.truncate(args[0] as string, args[1] as number)
|
|
99
|
+
result = undefined
|
|
100
|
+
break
|
|
101
|
+
|
|
102
|
+
// Directory operations
|
|
103
|
+
case 'mkdir':
|
|
104
|
+
await opfs.mkdir(args[0] as string)
|
|
105
|
+
result = undefined
|
|
106
|
+
break
|
|
107
|
+
|
|
108
|
+
case 'rmdir':
|
|
109
|
+
await opfs.rmdir(args[0] as string)
|
|
110
|
+
result = undefined
|
|
111
|
+
break
|
|
112
|
+
|
|
113
|
+
case 'readdir':
|
|
114
|
+
result = await opfs.readdir(args[0] as string, args[1] as object)
|
|
115
|
+
break
|
|
116
|
+
|
|
117
|
+
case 'cp':
|
|
118
|
+
await opfs.cp(args[0] as string, args[1] as string, args[2] as object)
|
|
119
|
+
result = undefined
|
|
120
|
+
break
|
|
121
|
+
|
|
122
|
+
case 'rm':
|
|
123
|
+
await opfs.rm(args[0] as string, args[1] as object)
|
|
124
|
+
result = undefined
|
|
125
|
+
break
|
|
126
|
+
|
|
127
|
+
// Stat operations
|
|
128
|
+
case 'stat':
|
|
129
|
+
result = serializeStats(await opfs.stat(args[0] as string))
|
|
130
|
+
break
|
|
131
|
+
|
|
132
|
+
case 'lstat':
|
|
133
|
+
result = serializeStats(await opfs.lstat(args[0] as string))
|
|
134
|
+
break
|
|
135
|
+
|
|
136
|
+
case 'exists':
|
|
137
|
+
result = await opfs.exists(args[0] as string)
|
|
138
|
+
break
|
|
139
|
+
|
|
140
|
+
case 'access':
|
|
141
|
+
await opfs.access(args[0] as string, args[1] as number)
|
|
142
|
+
result = undefined
|
|
143
|
+
break
|
|
144
|
+
|
|
145
|
+
case 'statfs':
|
|
146
|
+
result = await opfs.statfs(args[0] as string | undefined)
|
|
147
|
+
break
|
|
148
|
+
|
|
149
|
+
case 'du':
|
|
150
|
+
result = await opfs.du(args[0] as string)
|
|
151
|
+
break
|
|
152
|
+
|
|
153
|
+
// Symlink operations
|
|
154
|
+
case 'symlink':
|
|
155
|
+
await opfs.symlink(args[0] as string, args[1] as string)
|
|
156
|
+
result = undefined
|
|
157
|
+
break
|
|
158
|
+
|
|
159
|
+
case 'readlink':
|
|
160
|
+
result = await opfs.readlink(args[0] as string)
|
|
161
|
+
break
|
|
162
|
+
|
|
163
|
+
case 'symlinkBatch':
|
|
164
|
+
await opfs.symlinkBatch(args[0] as SymlinkDefinition[])
|
|
165
|
+
result = undefined
|
|
166
|
+
break
|
|
167
|
+
|
|
168
|
+
case 'realpath':
|
|
169
|
+
result = await opfs.realpath(args[0] as string)
|
|
170
|
+
break
|
|
171
|
+
|
|
172
|
+
// Other operations
|
|
173
|
+
case 'rename':
|
|
174
|
+
await opfs.rename(args[0] as string, args[1] as string)
|
|
175
|
+
result = undefined
|
|
176
|
+
break
|
|
177
|
+
|
|
178
|
+
case 'mkdtemp':
|
|
179
|
+
result = await opfs.mkdtemp(args[0] as string)
|
|
180
|
+
break
|
|
181
|
+
|
|
182
|
+
case 'chmod':
|
|
183
|
+
await opfs.chmod(args[0] as string, args[1] as number)
|
|
184
|
+
result = undefined
|
|
185
|
+
break
|
|
186
|
+
|
|
187
|
+
case 'chown':
|
|
188
|
+
await opfs.chown(args[0] as string, args[1] as number, args[2] as number)
|
|
189
|
+
result = undefined
|
|
190
|
+
break
|
|
191
|
+
|
|
192
|
+
case 'utimes':
|
|
193
|
+
await opfs.utimes(args[0] as string, args[1] as Date | number, args[2] as Date | number)
|
|
194
|
+
result = undefined
|
|
195
|
+
break
|
|
196
|
+
|
|
197
|
+
case 'lutimes':
|
|
198
|
+
await opfs.lutimes(args[0] as string, args[1] as Date | number, args[2] as Date | number)
|
|
199
|
+
result = undefined
|
|
200
|
+
break
|
|
201
|
+
|
|
202
|
+
case 'resetCache':
|
|
203
|
+
opfs.resetCache()
|
|
204
|
+
result = undefined
|
|
205
|
+
break
|
|
206
|
+
|
|
207
|
+
case 'gc':
|
|
208
|
+
// Force full garbage collection by completely reinitializing the OPFS instance
|
|
209
|
+
// This releases all handles and caches, allowing browser to clean up resources
|
|
210
|
+
fs = null
|
|
211
|
+
fs = new OPFS({ useSync: true, verbose: false })
|
|
212
|
+
result = undefined
|
|
213
|
+
break
|
|
214
|
+
|
|
215
|
+
default:
|
|
216
|
+
throw new Error(`Unknown method: ${method}`)
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
const response: WorkerResponse = { id, result }
|
|
220
|
+
if (transfer.length > 0) {
|
|
221
|
+
self.postMessage(response, transfer)
|
|
222
|
+
} else {
|
|
223
|
+
self.postMessage(response)
|
|
224
|
+
}
|
|
225
|
+
} catch (err) {
|
|
226
|
+
const error = err as Error & { code?: string }
|
|
227
|
+
const response: WorkerResponse = {
|
|
228
|
+
id,
|
|
229
|
+
error: {
|
|
230
|
+
message: error.message,
|
|
231
|
+
code: error.code
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
self.postMessage(response)
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Serialize Stats object (functions can't be transferred)
|
|
239
|
+
function serializeStats(stats: { type: string; size: number; mode: number; ctime: Date; ctimeMs: number; mtime: Date; mtimeMs: number; target?: string }) {
|
|
240
|
+
return {
|
|
241
|
+
type: stats.type,
|
|
242
|
+
size: stats.size,
|
|
243
|
+
mode: stats.mode,
|
|
244
|
+
ctime: stats.ctime.toISOString(),
|
|
245
|
+
ctimeMs: stats.ctimeMs,
|
|
246
|
+
mtime: stats.mtime.toISOString(),
|
|
247
|
+
mtimeMs: stats.mtimeMs,
|
|
248
|
+
target: stats.target
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Signal that worker is ready
|
|
253
|
+
self.postMessage({ type: 'ready' })
|
|
@@ -0,0 +1,426 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Packed Storage - Stores multiple files in a single OPFS file
|
|
3
|
+
*
|
|
4
|
+
* Instead of creating 100 separate files (100 OPFS API calls),
|
|
5
|
+
* we write all data to one pack file with an index (1-2 API calls).
|
|
6
|
+
*
|
|
7
|
+
* Format:
|
|
8
|
+
* [index length: 4 bytes][CRC32: 4 bytes][JSON index][file data...]
|
|
9
|
+
*
|
|
10
|
+
* Index format:
|
|
11
|
+
* { "path": { offset: number, size: number }, ... }
|
|
12
|
+
*
|
|
13
|
+
* CRC32 is calculated over [JSON index][file data...] for integrity verification.
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import type { HandleManager } from './handle-manager.js'
|
|
17
|
+
import { createECORRUPTED } from './errors.js'
|
|
18
|
+
|
|
19
|
+
// CRC32 lookup table (pre-computed for performance)
|
|
20
|
+
const CRC32_TABLE = new Uint32Array(256)
|
|
21
|
+
for (let i = 0; i < 256; i++) {
|
|
22
|
+
let c = i
|
|
23
|
+
for (let j = 0; j < 8; j++) {
|
|
24
|
+
c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1
|
|
25
|
+
}
|
|
26
|
+
CRC32_TABLE[i] = c
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Calculate CRC32 checksum of data
|
|
31
|
+
*/
|
|
32
|
+
function crc32(data: Uint8Array): number {
|
|
33
|
+
let crc = 0xffffffff
|
|
34
|
+
for (let i = 0; i < data.length; i++) {
|
|
35
|
+
crc = CRC32_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8)
|
|
36
|
+
}
|
|
37
|
+
return (crc ^ 0xffffffff) >>> 0
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
interface PackIndex {
|
|
41
|
+
[path: string]: { offset: number; size: number }
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const PACK_FILE = '/.opfs-pack'
|
|
45
|
+
|
|
46
|
+
export class PackedStorage {
|
|
47
|
+
private handleManager: HandleManager
|
|
48
|
+
private useSync: boolean
|
|
49
|
+
private index: PackIndex | null = null
|
|
50
|
+
private indexLoaded = false
|
|
51
|
+
|
|
52
|
+
constructor(handleManager: HandleManager, useSync: boolean) {
|
|
53
|
+
this.handleManager = handleManager
|
|
54
|
+
this.useSync = useSync
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Reset pack storage state (memory only)
|
|
59
|
+
*/
|
|
60
|
+
reset(): void {
|
|
61
|
+
this.index = null
|
|
62
|
+
this.indexLoaded = false
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Clear pack storage completely (deletes pack file from disk)
|
|
67
|
+
*/
|
|
68
|
+
async clear(): Promise<void> {
|
|
69
|
+
this.index = null
|
|
70
|
+
this.indexLoaded = false
|
|
71
|
+
|
|
72
|
+
try {
|
|
73
|
+
const root = await this.handleManager.getRoot()
|
|
74
|
+
await root.removeEntry(PACK_FILE.replace(/^\//, ''))
|
|
75
|
+
} catch {
|
|
76
|
+
// Pack file doesn't exist, that's fine
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Load pack index from disk (always reloads to support hybrid mode)
|
|
82
|
+
* Verifies CRC32 checksum for integrity
|
|
83
|
+
*/
|
|
84
|
+
private async loadIndex(): Promise<PackIndex> {
|
|
85
|
+
// Always reload from disk to ensure we see writes from other threads/workers
|
|
86
|
+
try {
|
|
87
|
+
const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
|
|
88
|
+
if (!fileHandle) {
|
|
89
|
+
return {}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (this.useSync) {
|
|
93
|
+
const access = await fileHandle.createSyncAccessHandle()
|
|
94
|
+
const size = access.getSize()
|
|
95
|
+
if (size < 8) {
|
|
96
|
+
access.close()
|
|
97
|
+
return {}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Read header: index length + CRC32
|
|
101
|
+
const header = new Uint8Array(8)
|
|
102
|
+
access.read(header, { at: 0 })
|
|
103
|
+
const view = new DataView(header.buffer)
|
|
104
|
+
const indexLen = view.getUint32(0, true)
|
|
105
|
+
const storedCrc = view.getUint32(4, true)
|
|
106
|
+
|
|
107
|
+
// Read everything after header (index + data) for CRC verification
|
|
108
|
+
const contentSize = size - 8
|
|
109
|
+
const content = new Uint8Array(contentSize)
|
|
110
|
+
access.read(content, { at: 8 })
|
|
111
|
+
access.close()
|
|
112
|
+
|
|
113
|
+
// Verify CRC32
|
|
114
|
+
const calculatedCrc = crc32(content)
|
|
115
|
+
if (calculatedCrc !== storedCrc) {
|
|
116
|
+
throw createECORRUPTED(PACK_FILE)
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Parse index from content
|
|
120
|
+
const indexJson = new TextDecoder().decode(content.subarray(0, indexLen))
|
|
121
|
+
return JSON.parse(indexJson)
|
|
122
|
+
} else {
|
|
123
|
+
const file = await fileHandle.getFile()
|
|
124
|
+
const data = new Uint8Array(await file.arrayBuffer())
|
|
125
|
+
if (data.length < 8) {
|
|
126
|
+
return {}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const view = new DataView(data.buffer)
|
|
130
|
+
const indexLen = view.getUint32(0, true)
|
|
131
|
+
const storedCrc = view.getUint32(4, true)
|
|
132
|
+
|
|
133
|
+
// Verify CRC32 over content (everything after header)
|
|
134
|
+
const content = data.subarray(8)
|
|
135
|
+
const calculatedCrc = crc32(content)
|
|
136
|
+
if (calculatedCrc !== storedCrc) {
|
|
137
|
+
throw createECORRUPTED(PACK_FILE)
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const indexJson = new TextDecoder().decode(content.subarray(0, indexLen))
|
|
141
|
+
return JSON.parse(indexJson)
|
|
142
|
+
}
|
|
143
|
+
} catch {
|
|
144
|
+
return {}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Check if a path exists in the pack
|
|
150
|
+
*/
|
|
151
|
+
async has(path: string): Promise<boolean> {
|
|
152
|
+
const index = await this.loadIndex()
|
|
153
|
+
return path in index
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
/**
|
|
157
|
+
* Get file size from pack (for stat)
|
|
158
|
+
*/
|
|
159
|
+
async getSize(path: string): Promise<number | null> {
|
|
160
|
+
const index = await this.loadIndex()
|
|
161
|
+
const entry = index[path]
|
|
162
|
+
return entry ? entry.size : null
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Read a file from the pack
|
|
167
|
+
*/
|
|
168
|
+
async read(path: string): Promise<Uint8Array | null> {
|
|
169
|
+
const index = await this.loadIndex()
|
|
170
|
+
const entry = index[path]
|
|
171
|
+
if (!entry) return null
|
|
172
|
+
|
|
173
|
+
const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
|
|
174
|
+
if (!fileHandle) return null
|
|
175
|
+
|
|
176
|
+
const buffer = new Uint8Array(entry.size)
|
|
177
|
+
|
|
178
|
+
if (this.useSync) {
|
|
179
|
+
const access = await fileHandle.createSyncAccessHandle()
|
|
180
|
+
access.read(buffer, { at: entry.offset })
|
|
181
|
+
access.close()
|
|
182
|
+
} else {
|
|
183
|
+
const file = await fileHandle.getFile()
|
|
184
|
+
const data = new Uint8Array(await file.arrayBuffer())
|
|
185
|
+
buffer.set(data.subarray(entry.offset, entry.offset + entry.size))
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
return buffer
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
/**
|
|
192
|
+
* Read multiple files from the pack in a single operation
|
|
193
|
+
* Loads index once, reads all data in parallel
|
|
194
|
+
*/
|
|
195
|
+
async readBatch(paths: string[]): Promise<Map<string, Uint8Array | null>> {
|
|
196
|
+
const results = new Map<string, Uint8Array | null>()
|
|
197
|
+
if (paths.length === 0) return results
|
|
198
|
+
|
|
199
|
+
const index = await this.loadIndex()
|
|
200
|
+
|
|
201
|
+
// Find which paths are in the pack
|
|
202
|
+
const toRead: Array<{ path: string; offset: number; size: number }> = []
|
|
203
|
+
for (const path of paths) {
|
|
204
|
+
const entry = index[path]
|
|
205
|
+
if (entry) {
|
|
206
|
+
toRead.push({ path, offset: entry.offset, size: entry.size })
|
|
207
|
+
} else {
|
|
208
|
+
results.set(path, null)
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
if (toRead.length === 0) return results
|
|
213
|
+
|
|
214
|
+
const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
|
|
215
|
+
if (!fileHandle) {
|
|
216
|
+
for (const { path } of toRead) {
|
|
217
|
+
results.set(path, null)
|
|
218
|
+
}
|
|
219
|
+
return results
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
if (this.useSync) {
|
|
223
|
+
const access = await fileHandle.createSyncAccessHandle()
|
|
224
|
+
for (const { path, offset, size } of toRead) {
|
|
225
|
+
const buffer = new Uint8Array(size)
|
|
226
|
+
access.read(buffer, { at: offset })
|
|
227
|
+
results.set(path, buffer)
|
|
228
|
+
}
|
|
229
|
+
access.close()
|
|
230
|
+
} else {
|
|
231
|
+
const file = await fileHandle.getFile()
|
|
232
|
+
const data = new Uint8Array(await file.arrayBuffer())
|
|
233
|
+
for (const { path, offset, size } of toRead) {
|
|
234
|
+
const buffer = new Uint8Array(size)
|
|
235
|
+
buffer.set(data.subarray(offset, offset + size))
|
|
236
|
+
results.set(path, buffer)
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
return results
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
/**
|
|
244
|
+
* Write multiple files to the pack in a single operation
|
|
245
|
+
* This is the key optimization - 100 files become 1 write!
|
|
246
|
+
* Includes CRC32 checksum for integrity verification.
|
|
247
|
+
* Note: This replaces the entire pack with the new entries
|
|
248
|
+
*/
|
|
249
|
+
async writeBatch(entries: Array<{ path: string; data: Uint8Array }>): Promise<void> {
|
|
250
|
+
if (entries.length === 0) return
|
|
251
|
+
|
|
252
|
+
const encoder = new TextEncoder()
|
|
253
|
+
|
|
254
|
+
// Calculate total data size
|
|
255
|
+
let totalDataSize = 0
|
|
256
|
+
for (const { data } of entries) {
|
|
257
|
+
totalDataSize += data.length
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Build index - iterate until offsets stabilize
|
|
261
|
+
// (offset changes -> JSON length changes -> header size changes -> offset changes)
|
|
262
|
+
// Header format: [index length: 4][CRC32: 4][JSON index][file data...]
|
|
263
|
+
const newIndex: PackIndex = {}
|
|
264
|
+
let headerSize = 8 // 4 bytes index length + 4 bytes CRC32
|
|
265
|
+
let prevHeaderSize = 0
|
|
266
|
+
|
|
267
|
+
// Iterate until stable (usually 2-3 iterations)
|
|
268
|
+
while (headerSize !== prevHeaderSize) {
|
|
269
|
+
prevHeaderSize = headerSize
|
|
270
|
+
|
|
271
|
+
let currentOffset = headerSize
|
|
272
|
+
for (const { path, data } of entries) {
|
|
273
|
+
newIndex[path] = { offset: currentOffset, size: data.length }
|
|
274
|
+
currentOffset += data.length
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
const indexBuf = encoder.encode(JSON.stringify(newIndex))
|
|
278
|
+
headerSize = 8 + indexBuf.length
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
// Build the complete pack file
|
|
282
|
+
const finalIndexBuf = encoder.encode(JSON.stringify(newIndex))
|
|
283
|
+
const totalSize = headerSize + totalDataSize
|
|
284
|
+
const packBuffer = new Uint8Array(totalSize)
|
|
285
|
+
const view = new DataView(packBuffer.buffer)
|
|
286
|
+
|
|
287
|
+
// Write index JSON at offset 8
|
|
288
|
+
packBuffer.set(finalIndexBuf, 8)
|
|
289
|
+
|
|
290
|
+
// Write data at correct offsets
|
|
291
|
+
for (const { path, data } of entries) {
|
|
292
|
+
const entry = newIndex[path]
|
|
293
|
+
packBuffer.set(data, entry.offset)
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// Calculate CRC32 over content (index + data, everything after header)
|
|
297
|
+
const content = packBuffer.subarray(8)
|
|
298
|
+
const checksum = crc32(content)
|
|
299
|
+
|
|
300
|
+
// Write header (index length + CRC32)
|
|
301
|
+
view.setUint32(0, finalIndexBuf.length, true)
|
|
302
|
+
view.setUint32(4, checksum, true)
|
|
303
|
+
|
|
304
|
+
await this.writePackFile(packBuffer)
|
|
305
|
+
this.index = newIndex
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
/**
|
|
309
|
+
* Write the pack file to OPFS
|
|
310
|
+
*/
|
|
311
|
+
private async writePackFile(data: Uint8Array): Promise<void> {
|
|
312
|
+
const { fileHandle } = await this.handleManager.getHandle(PACK_FILE, { create: true })
|
|
313
|
+
if (!fileHandle) return
|
|
314
|
+
|
|
315
|
+
if (this.useSync) {
|
|
316
|
+
const access = await fileHandle.createSyncAccessHandle()
|
|
317
|
+
access.truncate(data.length)
|
|
318
|
+
access.write(data, { at: 0 })
|
|
319
|
+
access.close()
|
|
320
|
+
} else {
|
|
321
|
+
const writable = await fileHandle.createWritable()
|
|
322
|
+
await writable.write(data)
|
|
323
|
+
await writable.close()
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
/**
|
|
328
|
+
* Remove a path from the pack index
|
|
329
|
+
* Note: Doesn't reclaim space, just removes from index and recalculates CRC32
|
|
330
|
+
*/
|
|
331
|
+
async remove(path: string): Promise<boolean> {
|
|
332
|
+
const index = await this.loadIndex()
|
|
333
|
+
if (!(path in index)) return false
|
|
334
|
+
|
|
335
|
+
delete index[path]
|
|
336
|
+
|
|
337
|
+
const { fileHandle } = await this.handleManager.getHandle(PACK_FILE)
|
|
338
|
+
if (!fileHandle) return true
|
|
339
|
+
|
|
340
|
+
// Need to read existing file to recalculate CRC32
|
|
341
|
+
const encoder = new TextEncoder()
|
|
342
|
+
const newIndexBuf = encoder.encode(JSON.stringify(index))
|
|
343
|
+
|
|
344
|
+
if (this.useSync) {
|
|
345
|
+
const access = await fileHandle.createSyncAccessHandle()
|
|
346
|
+
const size = access.getSize()
|
|
347
|
+
|
|
348
|
+
// Read old header to get old index length
|
|
349
|
+
const oldHeader = new Uint8Array(8)
|
|
350
|
+
access.read(oldHeader, { at: 0 })
|
|
351
|
+
const oldIndexLen = new DataView(oldHeader.buffer).getUint32(0, true)
|
|
352
|
+
|
|
353
|
+
// Read data portion (after old index)
|
|
354
|
+
const dataStart = 8 + oldIndexLen
|
|
355
|
+
const dataSize = size - dataStart
|
|
356
|
+
const dataPortion = new Uint8Array(dataSize)
|
|
357
|
+
if (dataSize > 0) {
|
|
358
|
+
access.read(dataPortion, { at: dataStart })
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// Build new content (new index + data)
|
|
362
|
+
const newContent = new Uint8Array(newIndexBuf.length + dataSize)
|
|
363
|
+
newContent.set(newIndexBuf, 0)
|
|
364
|
+
if (dataSize > 0) {
|
|
365
|
+
newContent.set(dataPortion, newIndexBuf.length)
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Calculate new CRC32
|
|
369
|
+
const checksum = crc32(newContent)
|
|
370
|
+
|
|
371
|
+
// Build new header
|
|
372
|
+
const newHeader = new Uint8Array(8)
|
|
373
|
+
const view = new DataView(newHeader.buffer)
|
|
374
|
+
view.setUint32(0, newIndexBuf.length, true)
|
|
375
|
+
view.setUint32(4, checksum, true)
|
|
376
|
+
|
|
377
|
+
// Write new file
|
|
378
|
+
const newFile = new Uint8Array(8 + newContent.length)
|
|
379
|
+
newFile.set(newHeader, 0)
|
|
380
|
+
newFile.set(newContent, 8)
|
|
381
|
+
|
|
382
|
+
access.truncate(newFile.length)
|
|
383
|
+
access.write(newFile, { at: 0 })
|
|
384
|
+
access.close()
|
|
385
|
+
} else {
|
|
386
|
+
// For non-sync, rewrite the whole file
|
|
387
|
+
const file = await fileHandle.getFile()
|
|
388
|
+
const oldData = new Uint8Array(await file.arrayBuffer())
|
|
389
|
+
|
|
390
|
+
if (oldData.length < 8) return true
|
|
391
|
+
|
|
392
|
+
const oldIndexLen = new DataView(oldData.buffer).getUint32(0, true)
|
|
393
|
+
const dataStart = 8 + oldIndexLen
|
|
394
|
+
const dataPortion = oldData.subarray(dataStart)
|
|
395
|
+
|
|
396
|
+
// Build new content
|
|
397
|
+
const newContent = new Uint8Array(newIndexBuf.length + dataPortion.length)
|
|
398
|
+
newContent.set(newIndexBuf, 0)
|
|
399
|
+
newContent.set(dataPortion, newIndexBuf.length)
|
|
400
|
+
|
|
401
|
+
// Calculate CRC32
|
|
402
|
+
const checksum = crc32(newContent)
|
|
403
|
+
|
|
404
|
+
// Build new file
|
|
405
|
+
const newFile = new Uint8Array(8 + newContent.length)
|
|
406
|
+
const view = new DataView(newFile.buffer)
|
|
407
|
+
view.setUint32(0, newIndexBuf.length, true)
|
|
408
|
+
view.setUint32(4, checksum, true)
|
|
409
|
+
newFile.set(newContent, 8)
|
|
410
|
+
|
|
411
|
+
const writable = await fileHandle.createWritable()
|
|
412
|
+
await writable.write(newFile)
|
|
413
|
+
await writable.close()
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
return true
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
/**
|
|
420
|
+
* Check if pack file is being used (has entries)
|
|
421
|
+
*/
|
|
422
|
+
async isEmpty(): Promise<boolean> {
|
|
423
|
+
const index = await this.loadIndex()
|
|
424
|
+
return Object.keys(index).length === 0
|
|
425
|
+
}
|
|
426
|
+
}
|