@aepyornis/fastboot.ts 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/sparse.ts ADDED
@@ -0,0 +1,408 @@
1
+ // The MIT License (MIT)
2
+
3
+ // Copyright (c) 2021 Danny Lin <danny@kdrag0n.dev>
4
+
5
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ // of this software and associated documentation files (the "Software"), to deal
7
+ // in the Software without restriction, including without limitation the rights
8
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ // copies of the Software, and to permit persons to whom the Software is
10
+ // furnished to do so, subject to the following conditions:
11
+
12
+ // The above copyright notice and this permission notice shall be included in all
13
+ // copies or substantial portions of the Software.
14
+
15
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ // SOFTWARE.
22
+
23
+ function readBlobAsBuffer(blob: Blob): Promise<ArrayBuffer> {
24
+ return new Promise((resolve, reject) => {
25
+ let reader = new FileReader()
26
+ reader.onload = () => {
27
+ resolve(reader.result! as ArrayBuffer)
28
+ }
29
+ reader.onerror = () => {
30
+ reject(reader.error)
31
+ }
32
+
33
+ reader.readAsArrayBuffer(blob)
34
+ })
35
+ }
36
+
37
+ const common = {
38
+ readBlobAsBuffer: readBlobAsBuffer,
39
+ logVerbose: (...data) => console.log(...data),
40
+ logDebug: (...data) => console.log(...data),
41
+ }
42
+
43
+ const FILE_MAGIC = 0xed26ff3a
44
+
45
+ const MAJOR_VERSION = 1
46
+ const MINOR_VERSION = 0
47
+ export const FILE_HEADER_SIZE = 28
48
+ const CHUNK_HEADER_SIZE = 12
49
+
50
+ // AOSP libsparse uses 64 MiB chunks
51
+ const RAW_CHUNK_SIZE = 64 * 1024 * 1024
52
+
53
+ export class ImageError extends Error {
54
+ constructor(message: string) {
55
+ super(message)
56
+ this.name = "ImageError"
57
+ }
58
+ }
59
+
60
+ export interface SparseSplit {
61
+ data: ArrayBuffer
62
+ bytes: number
63
+ }
64
+
65
+ export enum ChunkType {
66
+ Raw = 0xcac1,
67
+ Fill = 0xcac2,
68
+ Skip = 0xcac3,
69
+ Crc32 = 0xcac4,
70
+ }
71
+
72
+ export interface SparseHeader {
73
+ blockSize: number
74
+ blocks: number
75
+ chunks: number
76
+ crc32: number
77
+ }
78
+
79
+ export interface SparseChunk {
80
+ type: ChunkType
81
+ /* 2: reserved, 16 bits */
82
+ blocks: number
83
+ dataBytes: number
84
+ data: Blob | null // to be populated by consumer
85
+ }
86
+
87
+ class BlobBuilder {
88
+ private blob: Blob
89
+ private type: string
90
+
91
+ constructor(type: string = "") {
92
+ this.type = type
93
+ this.blob = new Blob([], { type: this.type })
94
+ }
95
+
96
+ append(blob: Blob) {
97
+ this.blob = new Blob([this.blob, blob], { type: this.type })
98
+ }
99
+
100
+ getBlob(): Blob {
101
+ return this.blob
102
+ }
103
+ }
104
+
105
+ /**
106
+ * Returns a parsed version of the sparse image file header from the given buffer.
107
+ *
108
+ * @param {ArrayBuffer} buffer - Raw file header data.
109
+ * @returns {SparseHeader} Object containing the header information.
110
+ */
111
+ export function parseFileHeader(buffer: ArrayBuffer): SparseHeader | null {
112
+ let view = new DataView(buffer)
113
+
114
+ let magic = view.getUint32(0, true)
115
+ if (magic !== FILE_MAGIC) {
116
+ return null
117
+ }
118
+
119
+ // v1.0+
120
+ let major = view.getUint16(4, true)
121
+ let minor = view.getUint16(6, true)
122
+ if (major !== MAJOR_VERSION || minor < MINOR_VERSION) {
123
+ throw new ImageError(`Unsupported sparse image version ${major}.${minor}`)
124
+ }
125
+
126
+ let fileHdrSize = view.getUint16(8, true)
127
+ let chunkHdrSize = view.getUint16(10, true)
128
+ if (fileHdrSize !== FILE_HEADER_SIZE || chunkHdrSize !== CHUNK_HEADER_SIZE) {
129
+ throw new ImageError(
130
+ `Invalid file header size ${fileHdrSize}, chunk header size ${chunkHdrSize}`,
131
+ )
132
+ }
133
+
134
+ let blockSize = view.getUint32(12, true)
135
+ if (blockSize % 4 !== 0) {
136
+ throw new ImageError(`Block size ${blockSize} is not a multiple of 4`)
137
+ }
138
+
139
+ return {
140
+ blockSize: blockSize,
141
+ blocks: view.getUint32(16, true),
142
+ chunks: view.getUint32(20, true),
143
+ crc32: view.getUint32(24, true),
144
+ }
145
+ }
146
+
147
+ function parseChunkHeader(buffer: ArrayBuffer) {
148
+ let view = new DataView(buffer)
149
+
150
+ // This isn't the same as what createImage takes.
151
+ // Further processing needs to be done on the chunks.
152
+ return {
153
+ type: view.getUint16(0, true),
154
+ /* 2: reserved, 16 bits */
155
+ blocks: view.getUint32(4, true),
156
+ dataBytes: view.getUint32(8, true) - CHUNK_HEADER_SIZE,
157
+ data: null, // to be populated by consumer
158
+ } as SparseChunk
159
+ }
160
+
161
+ function calcChunksBlockSize(chunks: Array<SparseChunk>) {
162
+ return chunks.map((chunk) => chunk.blocks).reduce((total, c) => total + c, 0)
163
+ }
164
+
165
+ function calcChunksDataSize(chunks: Array<SparseChunk>) {
166
+ return chunks
167
+ .map((chunk) => chunk.data!.size)
168
+ .reduce((total, c) => total + c, 0)
169
+ }
170
+
171
+ function calcChunksSize(chunks: Array<SparseChunk>) {
172
+ // 28-byte file header, 12-byte chunk headers
173
+ let overhead = FILE_HEADER_SIZE + CHUNK_HEADER_SIZE * chunks.length
174
+ return overhead + calcChunksDataSize(chunks)
175
+ }
176
+
177
+ async function createImage(
178
+ header: SparseHeader,
179
+ chunks: Array<SparseChunk>,
180
+ ): Promise<Blob> {
181
+ let blobBuilder = new BlobBuilder()
182
+
183
+ let buffer = new ArrayBuffer(FILE_HEADER_SIZE)
184
+ let dataView = new DataView(buffer)
185
+ let arrayView = new Uint8Array(buffer)
186
+
187
+ dataView.setUint32(0, FILE_MAGIC, true)
188
+ // v1.0
189
+ dataView.setUint16(4, MAJOR_VERSION, true)
190
+ dataView.setUint16(6, MINOR_VERSION, true)
191
+ dataView.setUint16(8, FILE_HEADER_SIZE, true)
192
+ dataView.setUint16(10, CHUNK_HEADER_SIZE, true)
193
+
194
+ // Match input parameters
195
+ dataView.setUint32(12, header.blockSize, true)
196
+ dataView.setUint32(16, header.blocks, true)
197
+ dataView.setUint32(20, chunks.length, true)
198
+
199
+ // We don't care about the CRC. AOSP docs specify that this should be a CRC32,
200
+ // but AOSP libsparse always sets 0 and puts the CRC in a final undocumented
201
+ // 0xCAC4 chunk instead.
202
+ dataView.setUint32(24, 0, true)
203
+
204
+ blobBuilder.append(new Blob([buffer]))
205
+ for (let chunk of chunks) {
206
+ buffer = new ArrayBuffer(CHUNK_HEADER_SIZE + chunk.data!.size)
207
+ dataView = new DataView(buffer)
208
+ arrayView = new Uint8Array(buffer)
209
+
210
+ dataView.setUint16(0, chunk.type, true)
211
+ dataView.setUint16(2, 0, true) // reserved
212
+ dataView.setUint32(4, chunk.blocks, true)
213
+ dataView.setUint32(8, CHUNK_HEADER_SIZE + chunk.data!.size, true)
214
+
215
+ let chunkArrayView = new Uint8Array(
216
+ await common.readBlobAsBuffer(chunk.data!),
217
+ )
218
+ arrayView.set(chunkArrayView, CHUNK_HEADER_SIZE)
219
+ blobBuilder.append(new Blob([buffer]))
220
+ }
221
+
222
+ return blobBuilder.getBlob()
223
+ }
224
+
225
+ /**
226
+ * Creates a sparse image from buffer containing raw image data.
227
+ *
228
+ * @param {Blob} blob - Blob containing the raw image data.
229
+ * @returns {Promise<Blob>} Promise that resolves the blob containing the new sparse image.
230
+ */
231
+ export async function fromRaw(blob: Blob): Promise<Blob> {
232
+ let header = {
233
+ blockSize: 4096,
234
+ blocks: blob.size / 4096,
235
+ chunks: 1,
236
+ crc32: 0,
237
+ }
238
+
239
+ let chunks = []
240
+ while (blob.size > 0) {
241
+ let chunkSize = Math.min(blob.size, RAW_CHUNK_SIZE)
242
+ chunks.push({
243
+ type: ChunkType.Raw,
244
+ blocks: chunkSize / header.blockSize,
245
+ data: blob.slice(0, chunkSize),
246
+ } as SparseChunk)
247
+ blob = blob.slice(chunkSize)
248
+ }
249
+
250
+ return createImage(header, chunks)
251
+ }
252
+
253
+ /**
254
+ * Split a sparse image into smaller sparse images within the given size.
255
+ * This takes a Blob instead of an ArrayBuffer because it may process images
256
+ * larger than RAM.
257
+ *
258
+ * @param {Blob} blob - Blob containing the sparse image to split.
259
+ * @param {number} splitSize - Maximum size per split.
260
+ * @yields {Object} Data of the next split image and its output size in bytes.
261
+ */
262
+ export async function* splitBlob(blob: Blob, splitSize: number) {
263
+ common.logDebug(
264
+ `Splitting ${blob.size}-byte sparse image into ${splitSize}-byte chunks`,
265
+ )
266
+
267
+ // 7/8 is a safe value for the split size, to account for extra overhead
268
+ // AOSP source code does the same
269
+ const safeSendValue = Math.floor(splitSize * (7 / 8))
270
+
271
+ // Short-circuit if splitting isn't required
272
+ if (blob.size <= splitSize) {
273
+ common.logDebug("Blob fits in 1 payload, not splitting")
274
+ yield {
275
+ data: await common.readBlobAsBuffer(blob),
276
+ bytes: blob.size,
277
+ } as SparseSplit
278
+ return
279
+ }
280
+
281
+ let headerData = await common.readBlobAsBuffer(
282
+ blob.slice(0, FILE_HEADER_SIZE),
283
+ )
284
+ let header = parseFileHeader(headerData)
285
+ if (header === null) {
286
+ throw new ImageError("Blob is not a sparse image")
287
+ }
288
+
289
+ // Remove CRC32 (if present), otherwise splitting will invalidate it
290
+ header.crc32 = 0
291
+ blob = blob.slice(FILE_HEADER_SIZE)
292
+
293
+ let splitChunks: Array<SparseChunk> = []
294
+ let splitDataBytes = 0
295
+ for (let i = 0; i < header.chunks; i++) {
296
+ let chunkHeaderData = await common.readBlobAsBuffer(
297
+ blob.slice(0, CHUNK_HEADER_SIZE),
298
+ )
299
+ let originalChunk = parseChunkHeader(chunkHeaderData)
300
+ originalChunk.data = blob.slice(
301
+ CHUNK_HEADER_SIZE,
302
+ CHUNK_HEADER_SIZE + originalChunk.dataBytes,
303
+ )
304
+ blob = blob.slice(CHUNK_HEADER_SIZE + originalChunk.dataBytes)
305
+
306
+ let chunksToProcess: SparseChunk[] = []
307
+
308
+ // take into account cases where the chunk data is bigger than the maximum allowed download size
309
+ if (originalChunk.dataBytes > safeSendValue) {
310
+ common.logDebug(
311
+ `Data of chunk ${i} is bigger than the maximum allowed download size: ${originalChunk.dataBytes} > ${safeSendValue}`,
312
+ )
313
+
314
+ // we should now split this chunk into multiple chunks that fit
315
+ let originalDataBytes = originalChunk.dataBytes
316
+ let originalData = originalChunk.data
317
+
318
+ while (originalDataBytes > 0) {
319
+ const toSend = Math.min(safeSendValue, originalDataBytes)
320
+
321
+ chunksToProcess.push({
322
+ type: originalChunk.type,
323
+ dataBytes: toSend,
324
+ data: originalData.slice(0, toSend),
325
+ blocks: toSend / header?.blockSize,
326
+ })
327
+
328
+ originalData = originalData.slice(toSend)
329
+ originalDataBytes -= toSend
330
+ }
331
+
332
+ common.logDebug("chunksToProcess", chunksToProcess)
333
+ } else {
334
+ chunksToProcess.push(originalChunk)
335
+ }
336
+
337
+ for (const chunk of chunksToProcess) {
338
+ let bytesRemaining = splitSize - calcChunksSize(splitChunks)
339
+ common.logVerbose(
340
+ ` Chunk ${i}: type ${chunk.type}, ${chunk.dataBytes} bytes / ${chunk.blocks} blocks, ${bytesRemaining} bytes remaining`,
341
+ )
342
+
343
+ if (bytesRemaining >= chunk.dataBytes) {
344
+ // Read the chunk and add it
345
+ common.logVerbose(" Space is available, adding chunk")
346
+ splitChunks.push(chunk)
347
+ // Track amount of data written on the output device, in bytes
348
+ splitDataBytes += chunk.blocks * header.blockSize
349
+ } else {
350
+ // Out of space, finish this split
351
+ // Blocks need to be calculated from chunk headers instead of going by size
352
+ // because FILL and SKIP chunks cover more blocks than the data they contain.
353
+ let splitBlocks = calcChunksBlockSize(splitChunks)
354
+ splitChunks.push({
355
+ type: ChunkType.Skip,
356
+ blocks: header.blocks - splitBlocks,
357
+ data: new Blob([]),
358
+ dataBytes: 0,
359
+ })
360
+ common.logVerbose(
361
+ `Partition is ${header.blocks} blocks, used ${splitBlocks}, padded with ${
362
+ header.blocks - splitBlocks
363
+ }, finishing split with ${calcChunksBlockSize(splitChunks)} blocks`,
364
+ )
365
+ let splitImage = await createImage(header, splitChunks)
366
+ common.logDebug(
367
+ `Finished ${splitImage.size}-byte split with ${splitChunks.length} chunks`,
368
+ )
369
+ yield {
370
+ data: await common.readBlobAsBuffer(splitImage),
371
+ bytes: splitDataBytes,
372
+ } as SparseSplit
373
+
374
+ // Start a new split. Every split is considered a full image by the
375
+ // bootloader, so we need to skip the *total* written blocks.
376
+ common.logVerbose(
377
+ `Starting new split: skipping first ${splitBlocks} blocks and adding chunk`,
378
+ )
379
+ splitChunks = [
380
+ {
381
+ type: ChunkType.Skip,
382
+ blocks: splitBlocks,
383
+ data: new Blob([]),
384
+ dataBytes: 0,
385
+ },
386
+ chunk,
387
+ ]
388
+
389
+ splitDataBytes = chunk.dataBytes
390
+ }
391
+ }
392
+ }
393
+
394
+ // Finish the final split if necessary
395
+ if (
396
+ splitChunks.length > 0 &&
397
+ (splitChunks.length > 1 || splitChunks[0].type !== ChunkType.Skip)
398
+ ) {
399
+ let splitImage = await createImage(header, splitChunks)
400
+ common.logDebug(
401
+ `Finishing final ${splitImage.size}-byte split with ${splitChunks.length} chunks`,
402
+ )
403
+ yield {
404
+ data: await common.readBlobAsBuffer(splitImage),
405
+ bytes: splitDataBytes,
406
+ } as SparseSplit
407
+ }
408
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,15 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "es6",
4
+ "lib": ["es6", "dom"],
5
+ "module": "es6",
6
+ "sourceMap": true,
7
+ "allowJs": false,
8
+ "esModuleInterop": true,
9
+ "forceConsistentCasingInFileNames": true,
10
+ "strict": true,
11
+ "noEmit": true
12
+ },
13
+ "include": ["src/**/*"],
14
+ "exclude": []
15
+ }