@aptre/v86 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/LICENSE.MIT +22 -0
- package/Readme.md +237 -0
- package/dist/v86.browser.js +26666 -0
- package/dist/v86.browser.js.map +7 -0
- package/dist/v86.js +26632 -0
- package/dist/v86.js.map +7 -0
- package/gen/generate_analyzer.ts +512 -0
- package/gen/generate_interpreter.ts +522 -0
- package/gen/generate_jit.ts +624 -0
- package/gen/rust_ast.ts +107 -0
- package/gen/util.ts +35 -0
- package/gen/x86_table.ts +1836 -0
- package/lib/9p.ts +1547 -0
- package/lib/filesystem.ts +1879 -0
- package/lib/marshall.ts +168 -0
- package/lib/softfloat/softfloat.c +32501 -0
- package/lib/zstd/zstddeclib.c +13520 -0
- package/package.json +75 -0
- package/src/acpi.ts +267 -0
- package/src/browser/dummy_screen.ts +106 -0
- package/src/browser/fake_network.ts +1771 -0
- package/src/browser/fetch_network.ts +361 -0
- package/src/browser/filestorage.ts +124 -0
- package/src/browser/inbrowser_network.ts +57 -0
- package/src/browser/keyboard.ts +564 -0
- package/src/browser/main.ts +3415 -0
- package/src/browser/mouse.ts +255 -0
- package/src/browser/network.ts +142 -0
- package/src/browser/print_stats.ts +336 -0
- package/src/browser/screen.ts +978 -0
- package/src/browser/serial.ts +316 -0
- package/src/browser/speaker.ts +1223 -0
- package/src/browser/starter.ts +1688 -0
- package/src/browser/wisp_network.ts +332 -0
- package/src/browser/worker_bus.ts +64 -0
- package/src/buffer.ts +652 -0
- package/src/bus.ts +78 -0
- package/src/const.ts +128 -0
- package/src/cpu.ts +2891 -0
- package/src/dma.ts +474 -0
- package/src/elf.ts +251 -0
- package/src/floppy.ts +1778 -0
- package/src/ide.ts +3455 -0
- package/src/io.ts +504 -0
- package/src/iso9660.ts +317 -0
- package/src/kernel.ts +250 -0
- package/src/lib.ts +645 -0
- package/src/log.ts +149 -0
- package/src/main.ts +199 -0
- package/src/ne2k.ts +1589 -0
- package/src/pci.ts +815 -0
- package/src/pit.ts +406 -0
- package/src/ps2.ts +820 -0
- package/src/rtc.ts +537 -0
- package/src/rust/analysis.rs +101 -0
- package/src/rust/codegen.rs +2660 -0
- package/src/rust/config.rs +3 -0
- package/src/rust/control_flow.rs +425 -0
- package/src/rust/cpu/apic.rs +658 -0
- package/src/rust/cpu/arith.rs +1207 -0
- package/src/rust/cpu/call_indirect.rs +2 -0
- package/src/rust/cpu/cpu.rs +4501 -0
- package/src/rust/cpu/fpu.rs +923 -0
- package/src/rust/cpu/global_pointers.rs +112 -0
- package/src/rust/cpu/instructions.rs +2486 -0
- package/src/rust/cpu/instructions_0f.rs +5261 -0
- package/src/rust/cpu/ioapic.rs +316 -0
- package/src/rust/cpu/memory.rs +351 -0
- package/src/rust/cpu/misc_instr.rs +613 -0
- package/src/rust/cpu/mod.rs +16 -0
- package/src/rust/cpu/modrm.rs +133 -0
- package/src/rust/cpu/pic.rs +402 -0
- package/src/rust/cpu/sse_instr.rs +361 -0
- package/src/rust/cpu/string.rs +701 -0
- package/src/rust/cpu/vga.rs +175 -0
- package/src/rust/cpu_context.rs +69 -0
- package/src/rust/dbg.rs +98 -0
- package/src/rust/gen/analyzer.rs +3807 -0
- package/src/rust/gen/analyzer0f.rs +3992 -0
- package/src/rust/gen/interpreter.rs +4447 -0
- package/src/rust/gen/interpreter0f.rs +5404 -0
- package/src/rust/gen/jit.rs +5080 -0
- package/src/rust/gen/jit0f.rs +5547 -0
- package/src/rust/gen/mod.rs +14 -0
- package/src/rust/jit.rs +2443 -0
- package/src/rust/jit_instructions.rs +7881 -0
- package/src/rust/js_api.rs +6 -0
- package/src/rust/leb.rs +46 -0
- package/src/rust/lib.rs +29 -0
- package/src/rust/modrm.rs +330 -0
- package/src/rust/opstats.rs +249 -0
- package/src/rust/page.rs +15 -0
- package/src/rust/paging.rs +25 -0
- package/src/rust/prefix.rs +15 -0
- package/src/rust/profiler.rs +155 -0
- package/src/rust/regs.rs +38 -0
- package/src/rust/softfloat.rs +286 -0
- package/src/rust/state_flags.rs +27 -0
- package/src/rust/wasmgen/mod.rs +2 -0
- package/src/rust/wasmgen/wasm_builder.rs +1047 -0
- package/src/rust/wasmgen/wasm_opcodes.rs +221 -0
- package/src/rust/zstd.rs +105 -0
- package/src/sb16.ts +1928 -0
- package/src/state.ts +359 -0
- package/src/uart.ts +472 -0
- package/src/vga.ts +2791 -0
- package/src/virtio.ts +1756 -0
- package/src/virtio_balloon.ts +273 -0
- package/src/virtio_console.ts +372 -0
- package/src/virtio_net.ts +326 -0
package/src/buffer.ts
ADDED
|
@@ -0,0 +1,652 @@
|
|
|
1
|
+
import { load_file, get_file_size } from './lib.js'
|
|
2
|
+
import { dbg_assert, dbg_log } from './log.js'
|
|
3
|
+
|
|
4
|
+
// The smallest size the emulated hardware can emit
|
|
5
|
+
const BLOCK_SIZE = 256
|
|
6
|
+
|
|
7
|
+
const ASYNC_SAFE = false
|
|
8
|
+
|
|
9
|
+
export class SyncBuffer {
|
|
10
|
+
buffer: ArrayBuffer
|
|
11
|
+
byteLength: number
|
|
12
|
+
onload: ((e: { buffer: ArrayBuffer }) => void) | undefined = undefined
|
|
13
|
+
onprogress:
|
|
14
|
+
| ((e: {
|
|
15
|
+
loaded: number
|
|
16
|
+
total: number
|
|
17
|
+
lengthComputable: boolean
|
|
18
|
+
}) => void)
|
|
19
|
+
| undefined = undefined
|
|
20
|
+
|
|
21
|
+
constructor(buffer: ArrayBuffer) {
|
|
22
|
+
dbg_assert(buffer instanceof ArrayBuffer)
|
|
23
|
+
|
|
24
|
+
this.buffer = buffer
|
|
25
|
+
this.byteLength = buffer.byteLength
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
load(): void {
|
|
29
|
+
if (this.onload) {
|
|
30
|
+
this.onload({ buffer: this.buffer })
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
get(start: number, len: number, fn: (data: Uint8Array) => void): void {
|
|
35
|
+
dbg_assert(start + len <= this.byteLength)
|
|
36
|
+
fn(new Uint8Array(this.buffer, start, len))
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
set(start: number, slice: Uint8Array, fn: () => void): void {
|
|
40
|
+
dbg_assert(start + slice.byteLength <= this.byteLength)
|
|
41
|
+
|
|
42
|
+
new Uint8Array(this.buffer, start, slice.byteLength).set(slice)
|
|
43
|
+
fn()
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
get_buffer(fn: (buffer: ArrayBuffer) => void): void {
|
|
47
|
+
fn(this.buffer)
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
get_state(): [number, Uint8Array] {
|
|
51
|
+
return [this.byteLength, new Uint8Array(this.buffer)]
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
set_state(state: [number, Uint8Array]): void {
|
|
55
|
+
this.byteLength = state[0]
|
|
56
|
+
this.buffer = state[1].slice().buffer
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
class AsyncXHRBuffer {
|
|
61
|
+
filename: string
|
|
62
|
+
byteLength: number | undefined
|
|
63
|
+
block_cache: Map<number, Uint8Array> = new Map()
|
|
64
|
+
block_cache_is_write: Set<number> = new Set()
|
|
65
|
+
fixed_chunk_size: number | undefined
|
|
66
|
+
cache_reads: boolean
|
|
67
|
+
onload: ((e: object) => void) | undefined = undefined
|
|
68
|
+
onprogress: ((e: ProgressEvent) => void) | undefined = undefined
|
|
69
|
+
|
|
70
|
+
constructor(
|
|
71
|
+
filename: string,
|
|
72
|
+
size: number | undefined,
|
|
73
|
+
fixed_chunk_size: number | undefined,
|
|
74
|
+
) {
|
|
75
|
+
this.filename = filename
|
|
76
|
+
this.byteLength = size
|
|
77
|
+
this.fixed_chunk_size = fixed_chunk_size
|
|
78
|
+
this.cache_reads = !!fixed_chunk_size
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async load(): Promise<void> {
|
|
82
|
+
if (this.byteLength !== undefined) {
|
|
83
|
+
if (this.onload) {
|
|
84
|
+
this.onload(Object.create(null))
|
|
85
|
+
}
|
|
86
|
+
return
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const size = await get_file_size(this.filename)
|
|
90
|
+
this.byteLength = size
|
|
91
|
+
if (this.onload) {
|
|
92
|
+
this.onload(Object.create(null))
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
get_from_cache(offset: number, len: number): Uint8Array | undefined {
|
|
97
|
+
const number_of_blocks = len / BLOCK_SIZE
|
|
98
|
+
const block_index = offset / BLOCK_SIZE
|
|
99
|
+
|
|
100
|
+
for (let i = 0; i < number_of_blocks; i++) {
|
|
101
|
+
const block = this.block_cache.get(block_index + i)
|
|
102
|
+
|
|
103
|
+
if (!block) {
|
|
104
|
+
return
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (number_of_blocks === 1) {
|
|
109
|
+
return this.block_cache.get(block_index)
|
|
110
|
+
} else {
|
|
111
|
+
const result = new Uint8Array(len)
|
|
112
|
+
for (let i = 0; i < number_of_blocks; i++) {
|
|
113
|
+
result.set(
|
|
114
|
+
this.block_cache.get(block_index + i)!,
|
|
115
|
+
i * BLOCK_SIZE,
|
|
116
|
+
)
|
|
117
|
+
}
|
|
118
|
+
return result
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
get(offset: number, len: number, fn: (data: Uint8Array) => void): void {
|
|
123
|
+
dbg_assert(offset + len <= this.byteLength!)
|
|
124
|
+
dbg_assert(offset % BLOCK_SIZE === 0)
|
|
125
|
+
dbg_assert(len % BLOCK_SIZE === 0)
|
|
126
|
+
dbg_assert(len > 0)
|
|
127
|
+
|
|
128
|
+
const block = this.get_from_cache(offset, len)
|
|
129
|
+
if (block) {
|
|
130
|
+
if (ASYNC_SAFE) {
|
|
131
|
+
setTimeout(fn.bind(this, block), 0)
|
|
132
|
+
} else {
|
|
133
|
+
fn(block)
|
|
134
|
+
}
|
|
135
|
+
return
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
let requested_start = offset
|
|
139
|
+
let requested_length = len
|
|
140
|
+
if (this.fixed_chunk_size) {
|
|
141
|
+
requested_start = offset - (offset % this.fixed_chunk_size)
|
|
142
|
+
requested_length =
|
|
143
|
+
Math.ceil(
|
|
144
|
+
(offset - requested_start + len) / this.fixed_chunk_size,
|
|
145
|
+
) * this.fixed_chunk_size
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
load_file(this.filename, {
|
|
149
|
+
done: function (this: AsyncXHRBuffer, buffer: ArrayBuffer) {
|
|
150
|
+
const block = new Uint8Array(buffer)
|
|
151
|
+
this.handle_read(requested_start, requested_length, block)
|
|
152
|
+
if (requested_start === offset && requested_length === len) {
|
|
153
|
+
fn(block)
|
|
154
|
+
} else {
|
|
155
|
+
fn(
|
|
156
|
+
block.subarray(
|
|
157
|
+
offset - requested_start,
|
|
158
|
+
offset - requested_start + len,
|
|
159
|
+
),
|
|
160
|
+
)
|
|
161
|
+
}
|
|
162
|
+
}.bind(this),
|
|
163
|
+
range: { start: requested_start, length: requested_length },
|
|
164
|
+
})
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
set(start: number, data: Uint8Array, fn: () => void): void {
|
|
168
|
+
const len = data.length
|
|
169
|
+
dbg_assert(start + data.byteLength <= this.byteLength!)
|
|
170
|
+
dbg_assert(start % BLOCK_SIZE === 0)
|
|
171
|
+
dbg_assert(len % BLOCK_SIZE === 0)
|
|
172
|
+
dbg_assert(len > 0)
|
|
173
|
+
|
|
174
|
+
const start_block = start / BLOCK_SIZE
|
|
175
|
+
const block_count = len / BLOCK_SIZE
|
|
176
|
+
|
|
177
|
+
for (let i = 0; i < block_count; i++) {
|
|
178
|
+
const block = this.block_cache.get(start_block + i)
|
|
179
|
+
|
|
180
|
+
if (block === undefined) {
|
|
181
|
+
const data_slice = data.slice(
|
|
182
|
+
i * BLOCK_SIZE,
|
|
183
|
+
(i + 1) * BLOCK_SIZE,
|
|
184
|
+
)
|
|
185
|
+
this.block_cache.set(start_block + i, data_slice)
|
|
186
|
+
} else {
|
|
187
|
+
const data_slice = data.subarray(
|
|
188
|
+
i * BLOCK_SIZE,
|
|
189
|
+
(i + 1) * BLOCK_SIZE,
|
|
190
|
+
)
|
|
191
|
+
dbg_assert(block.byteLength === data_slice.length)
|
|
192
|
+
block.set(data_slice)
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
this.block_cache_is_write.add(start_block + i)
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
fn()
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
handle_read(offset: number, len: number, block: Uint8Array): void {
|
|
202
|
+
// Used by AsyncXHRBuffer, AsyncXHRPartfileBuffer and AsyncFileBuffer
|
|
203
|
+
// Overwrites blocks from the original source that have been written since
|
|
204
|
+
|
|
205
|
+
const start_block = offset / BLOCK_SIZE
|
|
206
|
+
const block_count = len / BLOCK_SIZE
|
|
207
|
+
|
|
208
|
+
for (let i = 0; i < block_count; i++) {
|
|
209
|
+
const cached_block = this.block_cache.get(start_block + i)
|
|
210
|
+
|
|
211
|
+
if (cached_block) {
|
|
212
|
+
block.set(cached_block, i * BLOCK_SIZE)
|
|
213
|
+
} else if (this.cache_reads) {
|
|
214
|
+
this.block_cache.set(
|
|
215
|
+
start_block + i,
|
|
216
|
+
block.slice(i * BLOCK_SIZE, (i + 1) * BLOCK_SIZE),
|
|
217
|
+
)
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
get_buffer(fn: (buffer?: ArrayBuffer) => void): void {
|
|
223
|
+
// We must download all parts, unlikely a good idea for big files
|
|
224
|
+
fn()
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
get_state(): [[number, Uint8Array][]] {
|
|
228
|
+
const block_cache: [number, Uint8Array][] = []
|
|
229
|
+
|
|
230
|
+
for (const [index, block] of this.block_cache) {
|
|
231
|
+
dbg_assert(isFinite(index))
|
|
232
|
+
if (this.block_cache_is_write.has(index)) {
|
|
233
|
+
block_cache.push([index, block])
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
return [block_cache]
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
set_state(state: [[number, Uint8Array][]]): void {
|
|
241
|
+
const block_cache = state[0]
|
|
242
|
+
this.block_cache.clear()
|
|
243
|
+
this.block_cache_is_write.clear()
|
|
244
|
+
|
|
245
|
+
for (const [index, block] of block_cache) {
|
|
246
|
+
dbg_assert(isFinite(index))
|
|
247
|
+
this.block_cache.set(index, block)
|
|
248
|
+
this.block_cache_is_write.add(index)
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
export class AsyncXHRPartfileBuffer {
|
|
254
|
+
extension: string
|
|
255
|
+
basename: string
|
|
256
|
+
is_zstd: boolean
|
|
257
|
+
block_cache: Map<number, Uint8Array> = new Map()
|
|
258
|
+
block_cache_is_write: Set<number> = new Set()
|
|
259
|
+
byteLength: number | undefined
|
|
260
|
+
fixed_chunk_size: number | undefined
|
|
261
|
+
partfile_alt_format: boolean
|
|
262
|
+
|
|
263
|
+
zstd_decompress:
|
|
264
|
+
| ((size: number, data: Uint8Array) => Promise<any>)
|
|
265
|
+
| undefined
|
|
266
|
+
cache_reads: boolean
|
|
267
|
+
onload: ((e: object) => void) | undefined = undefined
|
|
268
|
+
onprogress: ((e: ProgressEvent) => void) | undefined = undefined
|
|
269
|
+
|
|
270
|
+
constructor(
|
|
271
|
+
filename: string,
|
|
272
|
+
size: number | undefined,
|
|
273
|
+
fixed_chunk_size: number | undefined,
|
|
274
|
+
partfile_alt_format: boolean | undefined,
|
|
275
|
+
|
|
276
|
+
zstd_decompress?: (size: number, data: Uint8Array) => Promise<any>,
|
|
277
|
+
) {
|
|
278
|
+
const parts = filename.match(/\.[^.]+(\.zst)?$/)
|
|
279
|
+
|
|
280
|
+
this.extension = parts ? parts[0] : ''
|
|
281
|
+
this.basename = filename.substring(
|
|
282
|
+
0,
|
|
283
|
+
filename.length - this.extension.length,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
this.is_zstd = this.extension.endsWith('.zst')
|
|
287
|
+
|
|
288
|
+
if (!this.basename.endsWith('/')) {
|
|
289
|
+
this.basename += '-'
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
this.byteLength = size
|
|
293
|
+
this.fixed_chunk_size = fixed_chunk_size
|
|
294
|
+
this.partfile_alt_format = !!partfile_alt_format
|
|
295
|
+
this.zstd_decompress = zstd_decompress
|
|
296
|
+
|
|
297
|
+
this.cache_reads = !!fixed_chunk_size
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
load(): void {
|
|
301
|
+
if (this.byteLength !== undefined) {
|
|
302
|
+
if (this.onload) {
|
|
303
|
+
this.onload(Object.create(null))
|
|
304
|
+
}
|
|
305
|
+
return
|
|
306
|
+
}
|
|
307
|
+
dbg_assert(false)
|
|
308
|
+
if (this.onload) {
|
|
309
|
+
this.onload(Object.create(null))
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
get(offset: number, len: number, fn: (data: Uint8Array) => void): void {
|
|
314
|
+
dbg_assert(offset + len <= this.byteLength!)
|
|
315
|
+
dbg_assert(offset % BLOCK_SIZE === 0)
|
|
316
|
+
dbg_assert(len % BLOCK_SIZE === 0)
|
|
317
|
+
dbg_assert(len > 0)
|
|
318
|
+
|
|
319
|
+
const block = this.get_from_cache(offset, len)
|
|
320
|
+
|
|
321
|
+
if (block) {
|
|
322
|
+
if (ASYNC_SAFE) {
|
|
323
|
+
setTimeout(fn.bind(this, block), 0)
|
|
324
|
+
} else {
|
|
325
|
+
fn(block)
|
|
326
|
+
}
|
|
327
|
+
return
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
if (this.fixed_chunk_size) {
|
|
331
|
+
const start_index = Math.floor(offset / this.fixed_chunk_size)
|
|
332
|
+
const m_offset = offset - start_index * this.fixed_chunk_size
|
|
333
|
+
dbg_assert(m_offset >= 0)
|
|
334
|
+
const total_count = Math.ceil(
|
|
335
|
+
(m_offset + len) / this.fixed_chunk_size,
|
|
336
|
+
)
|
|
337
|
+
const blocks = new Uint8Array(total_count * this.fixed_chunk_size)
|
|
338
|
+
let finished = 0
|
|
339
|
+
|
|
340
|
+
for (let i = 0; i < total_count; i++) {
|
|
341
|
+
const chunk_offset = (start_index + i) * this.fixed_chunk_size
|
|
342
|
+
|
|
343
|
+
const part_filename = this.partfile_alt_format
|
|
344
|
+
? // matches output of gnu split:
|
|
345
|
+
// split -b 512 -a8 -d --additional-suffix .img w95.img w95-
|
|
346
|
+
this.basename +
|
|
347
|
+
(start_index + i + '').padStart(8, '0') +
|
|
348
|
+
this.extension
|
|
349
|
+
: this.basename +
|
|
350
|
+
chunk_offset +
|
|
351
|
+
'-' +
|
|
352
|
+
(chunk_offset + this.fixed_chunk_size) +
|
|
353
|
+
this.extension
|
|
354
|
+
|
|
355
|
+
// XXX: unnecessary allocation
|
|
356
|
+
const cached = this.get_from_cache(
|
|
357
|
+
chunk_offset,
|
|
358
|
+
this.fixed_chunk_size,
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
if (cached) {
|
|
362
|
+
blocks.set(cached, i * this.fixed_chunk_size)
|
|
363
|
+
finished++
|
|
364
|
+
if (finished === total_count) {
|
|
365
|
+
fn(blocks.subarray(m_offset, m_offset + len))
|
|
366
|
+
}
|
|
367
|
+
} else {
|
|
368
|
+
load_file(part_filename, {
|
|
369
|
+
done: async function (
|
|
370
|
+
this: AsyncXHRPartfileBuffer,
|
|
371
|
+
buffer: ArrayBuffer,
|
|
372
|
+
) {
|
|
373
|
+
let block = new Uint8Array(buffer)
|
|
374
|
+
|
|
375
|
+
if (this.is_zstd) {
|
|
376
|
+
const decompressed = await this
|
|
377
|
+
.zstd_decompress!(
|
|
378
|
+
this.fixed_chunk_size!,
|
|
379
|
+
block,
|
|
380
|
+
)
|
|
381
|
+
block = new Uint8Array(decompressed)
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
blocks.set(block, i * this.fixed_chunk_size!)
|
|
385
|
+
this.handle_read(
|
|
386
|
+
(start_index + i) * this.fixed_chunk_size!,
|
|
387
|
+
this.fixed_chunk_size! | 0,
|
|
388
|
+
block,
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
finished++
|
|
392
|
+
if (finished === total_count) {
|
|
393
|
+
fn(blocks.subarray(m_offset, m_offset + len))
|
|
394
|
+
}
|
|
395
|
+
}.bind(this),
|
|
396
|
+
})
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
} else {
|
|
400
|
+
const part_filename =
|
|
401
|
+
this.basename + offset + '-' + (offset + len) + this.extension
|
|
402
|
+
|
|
403
|
+
load_file(part_filename, {
|
|
404
|
+
done: function (
|
|
405
|
+
this: AsyncXHRPartfileBuffer,
|
|
406
|
+
buffer: ArrayBuffer,
|
|
407
|
+
) {
|
|
408
|
+
dbg_assert(buffer.byteLength === len)
|
|
409
|
+
const block = new Uint8Array(buffer)
|
|
410
|
+
this.handle_read(offset, len, block)
|
|
411
|
+
fn(block)
|
|
412
|
+
}.bind(this),
|
|
413
|
+
})
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
// Shared methods from AsyncXHRBuffer
|
|
418
|
+
get_from_cache = AsyncXHRBuffer.prototype.get_from_cache
|
|
419
|
+
set = AsyncXHRBuffer.prototype.set
|
|
420
|
+
handle_read = AsyncXHRBuffer.prototype.handle_read
|
|
421
|
+
get_state = AsyncXHRBuffer.prototype.get_state
|
|
422
|
+
set_state = AsyncXHRBuffer.prototype.set_state
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
export class SyncFileBuffer {
|
|
426
|
+
file: File | undefined
|
|
427
|
+
byteLength: number
|
|
428
|
+
buffer: ArrayBuffer
|
|
429
|
+
onload: ((e: { buffer: ArrayBuffer }) => void) | undefined = undefined
|
|
430
|
+
onprogress:
|
|
431
|
+
| ((e: {
|
|
432
|
+
loaded: number
|
|
433
|
+
total: number
|
|
434
|
+
lengthComputable: boolean
|
|
435
|
+
}) => void)
|
|
436
|
+
| undefined = undefined
|
|
437
|
+
|
|
438
|
+
constructor(file: File) {
|
|
439
|
+
this.file = file
|
|
440
|
+
this.byteLength = file.size
|
|
441
|
+
|
|
442
|
+
if (file.size > 1 << 30) {
|
|
443
|
+
console.warn(
|
|
444
|
+
'SyncFileBuffer: Allocating buffer of ' +
|
|
445
|
+
(file.size >> 20) +
|
|
446
|
+
' MB ...',
|
|
447
|
+
)
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
this.buffer = new ArrayBuffer(file.size)
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
load(): void {
|
|
454
|
+
this.load_next(0)
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
load_next(start: number): void {
|
|
458
|
+
const PART_SIZE = 4 << 20
|
|
459
|
+
|
|
460
|
+
const filereader = new FileReader()
|
|
461
|
+
|
|
462
|
+
filereader.onload = function (
|
|
463
|
+
this: SyncFileBuffer,
|
|
464
|
+
e: ProgressEvent<FileReader>,
|
|
465
|
+
) {
|
|
466
|
+
const result = e.target!.result
|
|
467
|
+
if (!(result instanceof ArrayBuffer)) return
|
|
468
|
+
const buffer = new Uint8Array(result)
|
|
469
|
+
new Uint8Array(this.buffer, start).set(buffer)
|
|
470
|
+
this.load_next(start + PART_SIZE)
|
|
471
|
+
}.bind(this)
|
|
472
|
+
|
|
473
|
+
if (this.onprogress) {
|
|
474
|
+
this.onprogress({
|
|
475
|
+
loaded: start,
|
|
476
|
+
total: this.byteLength,
|
|
477
|
+
lengthComputable: true,
|
|
478
|
+
})
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
if (start < this.byteLength) {
|
|
482
|
+
const end = Math.min(start + PART_SIZE, this.byteLength)
|
|
483
|
+
const slice = this.file!.slice(start, end)
|
|
484
|
+
filereader.readAsArrayBuffer(slice)
|
|
485
|
+
} else {
|
|
486
|
+
this.file = undefined
|
|
487
|
+
if (this.onload) {
|
|
488
|
+
this.onload({ buffer: this.buffer })
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
get = SyncBuffer.prototype.get
|
|
494
|
+
set = SyncBuffer.prototype.set
|
|
495
|
+
get_buffer = SyncBuffer.prototype.get_buffer
|
|
496
|
+
get_state = SyncBuffer.prototype.get_state
|
|
497
|
+
set_state = SyncBuffer.prototype.set_state
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
export class AsyncFileBuffer {
|
|
501
|
+
file: File
|
|
502
|
+
byteLength: number
|
|
503
|
+
block_cache: Map<number, Uint8Array> = new Map()
|
|
504
|
+
block_cache_is_write: Set<number> = new Set()
|
|
505
|
+
onload: ((e: object) => void) | undefined = undefined
|
|
506
|
+
onprogress: ((e: ProgressEvent) => void) | undefined = undefined
|
|
507
|
+
|
|
508
|
+
constructor(file: File) {
|
|
509
|
+
this.file = file
|
|
510
|
+
this.byteLength = file.size
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
load(): void {
|
|
514
|
+
if (this.onload) {
|
|
515
|
+
this.onload(Object.create(null))
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
get(offset: number, len: number, fn: (data: Uint8Array) => void): void {
|
|
520
|
+
dbg_assert(offset % BLOCK_SIZE === 0)
|
|
521
|
+
dbg_assert(len % BLOCK_SIZE === 0)
|
|
522
|
+
dbg_assert(len > 0)
|
|
523
|
+
|
|
524
|
+
const block = this.get_from_cache(offset, len)
|
|
525
|
+
if (block) {
|
|
526
|
+
fn(block)
|
|
527
|
+
return
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
const fr = new FileReader()
|
|
531
|
+
|
|
532
|
+
fr.onload = function (
|
|
533
|
+
this: AsyncFileBuffer,
|
|
534
|
+
e: ProgressEvent<FileReader>,
|
|
535
|
+
) {
|
|
536
|
+
const result = e.target!.result
|
|
537
|
+
if (!(result instanceof ArrayBuffer)) return
|
|
538
|
+
const block = new Uint8Array(result)
|
|
539
|
+
|
|
540
|
+
this.handle_read(offset, len, block)
|
|
541
|
+
fn(block)
|
|
542
|
+
}.bind(this)
|
|
543
|
+
|
|
544
|
+
fr.readAsArrayBuffer(this.file.slice(offset, offset + len))
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
get_from_cache = AsyncXHRBuffer.prototype.get_from_cache
|
|
548
|
+
set = AsyncXHRBuffer.prototype.set
|
|
549
|
+
handle_read = AsyncXHRBuffer.prototype.handle_read
|
|
550
|
+
get_state = AsyncXHRBuffer.prototype.get_state
|
|
551
|
+
set_state = AsyncXHRBuffer.prototype.set_state
|
|
552
|
+
|
|
553
|
+
get_buffer(fn: (buffer?: ArrayBuffer) => void): void {
|
|
554
|
+
// We must load all parts, unlikely a good idea for big files
|
|
555
|
+
fn()
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
get_as_file(name: string): File {
|
|
559
|
+
const parts: BlobPart[] = []
|
|
560
|
+
const existing_blocks = Array.from(this.block_cache.keys()).sort(
|
|
561
|
+
function (x, y) {
|
|
562
|
+
return x - y
|
|
563
|
+
},
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
let current_offset = 0
|
|
567
|
+
|
|
568
|
+
for (let i = 0; i < existing_blocks.length; i++) {
|
|
569
|
+
const block_index = existing_blocks[i]
|
|
570
|
+
const block = this.block_cache.get(block_index)!
|
|
571
|
+
const start = block_index * BLOCK_SIZE
|
|
572
|
+
dbg_assert(start >= current_offset)
|
|
573
|
+
|
|
574
|
+
if (start !== current_offset) {
|
|
575
|
+
parts.push(this.file.slice(current_offset, start))
|
|
576
|
+
current_offset = start
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
parts.push(new Uint8Array(block))
|
|
580
|
+
current_offset += block.length
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
if (current_offset !== this.file.size) {
|
|
584
|
+
parts.push(this.file.slice(current_offset))
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
const file = new File(parts, name)
|
|
588
|
+
dbg_assert(file.size === this.file.size)
|
|
589
|
+
|
|
590
|
+
return file
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
export function buffer_from_object(
|
|
595
|
+
obj: {
|
|
596
|
+
buffer?: ArrayBuffer | File
|
|
597
|
+
url?: string
|
|
598
|
+
size?: number
|
|
599
|
+
fixed_chunk_size?: number
|
|
600
|
+
async?: boolean
|
|
601
|
+
use_parts?: boolean
|
|
602
|
+
},
|
|
603
|
+
|
|
604
|
+
zstd_decompress_worker?: (size: number, data: Uint8Array) => Promise<any>,
|
|
605
|
+
):
|
|
606
|
+
| SyncBuffer
|
|
607
|
+
| SyncFileBuffer
|
|
608
|
+
| AsyncFileBuffer
|
|
609
|
+
| AsyncXHRBuffer
|
|
610
|
+
| AsyncXHRPartfileBuffer
|
|
611
|
+
| undefined {
|
|
612
|
+
if (obj.buffer instanceof ArrayBuffer) {
|
|
613
|
+
return new SyncBuffer(obj.buffer)
|
|
614
|
+
} else if (typeof File !== 'undefined' && obj.buffer instanceof File) {
|
|
615
|
+
// SyncFileBuffer:
|
|
616
|
+
// - loads the whole disk image into memory, impossible for large files (more than 1GB)
|
|
617
|
+
// - can later serve get/set operations fast and synchronously
|
|
618
|
+
// - takes some time for first load, neglectable for small files (up to 100Mb)
|
|
619
|
+
//
|
|
620
|
+
// AsyncFileBuffer:
|
|
621
|
+
// - loads slices of the file asynchronously as requested
|
|
622
|
+
// - slower get/set
|
|
623
|
+
|
|
624
|
+
// Heuristics: If file is larger than or equal to 256M, use AsyncFileBuffer
|
|
625
|
+
let is_async = obj.async
|
|
626
|
+
if (is_async === undefined) {
|
|
627
|
+
is_async = obj.buffer.size >= 256 * 1024 * 1024
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
if (is_async) {
|
|
631
|
+
return new AsyncFileBuffer(obj.buffer)
|
|
632
|
+
} else {
|
|
633
|
+
return new SyncFileBuffer(obj.buffer)
|
|
634
|
+
}
|
|
635
|
+
} else if (obj.url) {
|
|
636
|
+
// Note: Only async for now
|
|
637
|
+
|
|
638
|
+
if (obj.use_parts) {
|
|
639
|
+
return new AsyncXHRPartfileBuffer(
|
|
640
|
+
obj.url,
|
|
641
|
+
obj.size,
|
|
642
|
+
obj.fixed_chunk_size,
|
|
643
|
+
false,
|
|
644
|
+
zstd_decompress_worker,
|
|
645
|
+
)
|
|
646
|
+
} else {
|
|
647
|
+
return new AsyncXHRBuffer(obj.url, obj.size, obj.fixed_chunk_size)
|
|
648
|
+
}
|
|
649
|
+
} else {
|
|
650
|
+
dbg_log('Ignored file: url=' + obj.url + ' buffer=' + obj.buffer)
|
|
651
|
+
}
|
|
652
|
+
}
|