@mapcatch/util 1.0.7 → 1.0.8-b
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +5 -1
- package/dist/catchUtil.min.esm.js +5419 -4864
- package/dist/catchUtil.min.js +23 -21
- package/package.json +1 -1
- package/src/exif/index.js +26 -9
- package/src/exif/parse_image.js +8 -16
- package/src/image-size/detector.js +24 -0
- package/src/image-size/fromFile.js +55 -0
- package/src/image-size/index.js +2 -0
- package/src/image-size/lookup.js +37 -0
- package/src/image-size/types/bmp.js +10 -0
- package/src/image-size/types/cur.js +16 -0
- package/src/image-size/types/dds.js +10 -0
- package/src/image-size/types/gif.js +11 -0
- package/src/image-size/types/heif.js +35 -0
- package/src/image-size/types/icns.js +112 -0
- package/src/image-size/types/ico.js +74 -0
- package/src/image-size/types/index.js +43 -0
- package/src/image-size/types/j2c.js +11 -0
- package/src/image-size/types/jp2.js +22 -0
- package/src/image-size/types/jpg.js +157 -0
- package/src/image-size/types/ktx.js +18 -0
- package/src/image-size/types/png.js +36 -0
- package/src/image-size/types/pnm.js +74 -0
- package/src/image-size/types/psd.js +10 -0
- package/src/image-size/types/svg.js +100 -0
- package/src/image-size/types/tga.js +14 -0
- package/src/image-size/types/tiff.js +92 -0
- package/src/image-size/types/utils.js +83 -0
- package/src/image-size/types/webp.js +67 -0
- package/src/util.js +3 -7
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
// NOTE: we only support baseline and progressive JPGs here
|
|
2
|
+
// due to the structure of the loader class, we only get a buffer
|
|
3
|
+
// with a maximum size of 4096 bytes. so if the SOF marker is outside
|
|
4
|
+
// if this range we can't detect the file size correctly.
|
|
5
|
+
import { readUInt, readUInt16BE, toHexString } from './utils'
|
|
6
|
+
|
|
7
|
+
const EXIF_MARKER = '45786966'
|
|
8
|
+
const APP1_DATA_SIZE_BYTES = 2
|
|
9
|
+
const EXIF_HEADER_BYTES = 6
|
|
10
|
+
const TIFF_BYTE_ALIGN_BYTES = 2
|
|
11
|
+
const BIG_ENDIAN_BYTE_ALIGN = '4d4d'
|
|
12
|
+
const LITTLE_ENDIAN_BYTE_ALIGN = '4949'
|
|
13
|
+
|
|
14
|
+
// Each entry is exactly 12 bytes
|
|
15
|
+
const IDF_ENTRY_BYTES = 12
|
|
16
|
+
const NUM_DIRECTORY_ENTRIES_BYTES = 2
|
|
17
|
+
|
|
18
|
+
function isEXIF(input) {
|
|
19
|
+
return toHexString(input, 2, 6) === EXIF_MARKER
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function extractSize(input, index) {
|
|
23
|
+
return {
|
|
24
|
+
height: readUInt16BE(input, index),
|
|
25
|
+
width: readUInt16BE(input, index + 2),
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function extractOrientation(exifBlock, isBigEndian) {
|
|
30
|
+
// TODO: assert that this contains 0x002A
|
|
31
|
+
// let STATIC_MOTOROLA_TIFF_HEADER_BYTES = 2
|
|
32
|
+
// let TIFF_IMAGE_FILE_DIRECTORY_BYTES = 4
|
|
33
|
+
|
|
34
|
+
// TODO: derive from TIFF_IMAGE_FILE_DIRECTORY_BYTES
|
|
35
|
+
const idfOffset = 8
|
|
36
|
+
|
|
37
|
+
// IDF osset works from right after the header bytes
|
|
38
|
+
// (so the offset includes the tiff byte align)
|
|
39
|
+
const offset = EXIF_HEADER_BYTES + idfOffset
|
|
40
|
+
|
|
41
|
+
const idfDirectoryEntries = readUInt(exifBlock, 16, offset, isBigEndian)
|
|
42
|
+
|
|
43
|
+
for (
|
|
44
|
+
let directoryEntryNumber = 0;
|
|
45
|
+
directoryEntryNumber < idfDirectoryEntries;
|
|
46
|
+
directoryEntryNumber++
|
|
47
|
+
) {
|
|
48
|
+
const start =
|
|
49
|
+
offset +
|
|
50
|
+
NUM_DIRECTORY_ENTRIES_BYTES +
|
|
51
|
+
directoryEntryNumber * IDF_ENTRY_BYTES
|
|
52
|
+
const end = start + IDF_ENTRY_BYTES
|
|
53
|
+
|
|
54
|
+
// Skip on corrupt EXIF blocks
|
|
55
|
+
if (start > exifBlock.length) {
|
|
56
|
+
return
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const block = exifBlock.slice(start, end)
|
|
60
|
+
const tagNumber = readUInt(block, 16, 0, isBigEndian)
|
|
61
|
+
|
|
62
|
+
// 0x0112 (decimal: 274) is the `orientation` tag ID
|
|
63
|
+
if (tagNumber === 274) {
|
|
64
|
+
const dataFormat = readUInt(block, 16, 2, isBigEndian)
|
|
65
|
+
if (dataFormat !== 3) {
|
|
66
|
+
return
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// unsinged int has 2 bytes per component
|
|
70
|
+
// if there would more than 4 bytes in total it's a pointer
|
|
71
|
+
const numberOfComponents = readUInt(block, 32, 4, isBigEndian)
|
|
72
|
+
if (numberOfComponents !== 1) {
|
|
73
|
+
return
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return readUInt(block, 16, 8, isBigEndian)
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function validateExifBlock(input, index) {
|
|
82
|
+
// Skip APP1 Data Size
|
|
83
|
+
const exifBlock = input.slice(APP1_DATA_SIZE_BYTES, index)
|
|
84
|
+
|
|
85
|
+
// Consider byte alignment
|
|
86
|
+
const byteAlign = toHexString(
|
|
87
|
+
exifBlock,
|
|
88
|
+
EXIF_HEADER_BYTES,
|
|
89
|
+
EXIF_HEADER_BYTES + TIFF_BYTE_ALIGN_BYTES,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
// Ignore Empty EXIF. Validate byte alignment
|
|
93
|
+
const isBigEndian = byteAlign === BIG_ENDIAN_BYTE_ALIGN
|
|
94
|
+
const isLittleEndian = byteAlign === LITTLE_ENDIAN_BYTE_ALIGN
|
|
95
|
+
|
|
96
|
+
if (isBigEndian || isLittleEndian) {
|
|
97
|
+
return extractOrientation(exifBlock, isBigEndian)
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
function validateInput(input, index) {
|
|
102
|
+
// index should be within buffer limits
|
|
103
|
+
if (index > input.length) {
|
|
104
|
+
throw new TypeError('Corrupt JPG, exceeded buffer limits')
|
|
105
|
+
}
|
|
106
|
+
// Every JPEG block must begin with a 0xFF
|
|
107
|
+
if (input[index] !== 0xff) {
|
|
108
|
+
throw new TypeError('Invalid JPG, marker table corrupted')
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
export const JPG = {
|
|
113
|
+
validate: (input) => toHexString(input, 0, 2) === 'ffd8',
|
|
114
|
+
|
|
115
|
+
calculate(input) {
|
|
116
|
+
// Skip 4 chars, they are for signature
|
|
117
|
+
input = input.slice(4)
|
|
118
|
+
|
|
119
|
+
let orientation
|
|
120
|
+
let next
|
|
121
|
+
while (input.length) {
|
|
122
|
+
// read length of the next block
|
|
123
|
+
const i = readUInt16BE(input, 0)
|
|
124
|
+
|
|
125
|
+
if (isEXIF(input)) {
|
|
126
|
+
orientation = validateExifBlock(input, i)
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// ensure correct format
|
|
130
|
+
validateInput(input, i)
|
|
131
|
+
|
|
132
|
+
// 0xFFC0 is baseline standard(SOF)
|
|
133
|
+
// 0xFFC1 is baseline optimized(SOF)
|
|
134
|
+
// 0xFFC2 is progressive(SOF2)
|
|
135
|
+
next = input[i + 1]
|
|
136
|
+
if (next === 0xc0 || next === 0xc1 || next === 0xc2) {
|
|
137
|
+
const size = extractSize(input, i + 5)
|
|
138
|
+
|
|
139
|
+
// TODO: is orientation=0 a valid answer here?
|
|
140
|
+
if (!orientation) {
|
|
141
|
+
return size
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return {
|
|
145
|
+
height: size.height,
|
|
146
|
+
orientation,
|
|
147
|
+
width: size.width,
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// move to the next block
|
|
152
|
+
input = input.slice(i + 2)
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
throw new TypeError('Invalid JPG, no size found')
|
|
156
|
+
},
|
|
157
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { toUTF8String, readUInt32LE } from './utils'
|
|
2
|
+
|
|
3
|
+
export const KTX = {
|
|
4
|
+
validate: (input) => {
|
|
5
|
+
const signature = toUTF8String(input, 1, 7)
|
|
6
|
+
return ['KTX 11', 'KTX 20'].includes(signature)
|
|
7
|
+
},
|
|
8
|
+
|
|
9
|
+
calculate: (input) => {
|
|
10
|
+
const type = input[5] === 0x31 ? 'ktx' : 'ktx2'
|
|
11
|
+
const offset = type === 'ktx' ? 36 : 20
|
|
12
|
+
return ({
|
|
13
|
+
height: readUInt32LE(input, offset + 4),
|
|
14
|
+
width: readUInt32LE(input, offset),
|
|
15
|
+
type,
|
|
16
|
+
})
|
|
17
|
+
},
|
|
18
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { toUTF8String, readUInt32BE } from './utils'
|
|
2
|
+
|
|
3
|
+
const pngSignature = 'PNG\r\n\x1a\n'
|
|
4
|
+
const pngImageHeaderChunkName = 'IHDR'
|
|
5
|
+
|
|
6
|
+
// Used to detect "fried" png's: http://www.jongware.com/pngdefry.html
|
|
7
|
+
const pngFriedChunkName = 'CgBI'
|
|
8
|
+
|
|
9
|
+
export const PNG = {
|
|
10
|
+
validate(input) {
|
|
11
|
+
if (pngSignature === toUTF8String(input, 1, 8)) {
|
|
12
|
+
let chunkName = toUTF8String(input, 12, 16)
|
|
13
|
+
if (chunkName === pngFriedChunkName) {
|
|
14
|
+
chunkName = toUTF8String(input, 28, 32)
|
|
15
|
+
}
|
|
16
|
+
if (chunkName !== pngImageHeaderChunkName) {
|
|
17
|
+
throw new TypeError('Invalid PNG')
|
|
18
|
+
}
|
|
19
|
+
return true
|
|
20
|
+
}
|
|
21
|
+
return false
|
|
22
|
+
},
|
|
23
|
+
|
|
24
|
+
calculate(input) {
|
|
25
|
+
if (toUTF8String(input, 12, 16) === pngFriedChunkName) {
|
|
26
|
+
return {
|
|
27
|
+
height: readUInt32BE(input, 36),
|
|
28
|
+
width: readUInt32BE(input, 32),
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
return {
|
|
32
|
+
height: readUInt32BE(input, 20),
|
|
33
|
+
width: readUInt32BE(input, 16),
|
|
34
|
+
}
|
|
35
|
+
},
|
|
36
|
+
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { toUTF8String } from './utils'
|
|
2
|
+
|
|
3
|
+
const PNMTypes = {
|
|
4
|
+
P1: 'pbm/ascii',
|
|
5
|
+
P2: 'pgm/ascii',
|
|
6
|
+
P3: 'ppm/ascii',
|
|
7
|
+
P4: 'pbm',
|
|
8
|
+
P5: 'pgm',
|
|
9
|
+
P6: 'ppm',
|
|
10
|
+
P7: 'pam',
|
|
11
|
+
PF: 'pfm',
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const handlers = {
|
|
15
|
+
default: (lines) => {
|
|
16
|
+
let dimensions = []
|
|
17
|
+
|
|
18
|
+
while (lines.length > 0) {
|
|
19
|
+
const line = lines.shift()
|
|
20
|
+
if (line[0] === '#') {
|
|
21
|
+
continue
|
|
22
|
+
}
|
|
23
|
+
dimensions = line.split(' ')
|
|
24
|
+
break
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (dimensions.length === 2) {
|
|
28
|
+
return {
|
|
29
|
+
height: parseInt(dimensions[1], 10),
|
|
30
|
+
width: parseInt(dimensions[0], 10),
|
|
31
|
+
}
|
|
32
|
+
} else {
|
|
33
|
+
throw new TypeError('Invalid PNM')
|
|
34
|
+
}
|
|
35
|
+
},
|
|
36
|
+
pam: (lines) => {
|
|
37
|
+
const size = {}
|
|
38
|
+
while (lines.length > 0) {
|
|
39
|
+
const line = lines.shift()
|
|
40
|
+
if (line.length > 16 || line.charCodeAt(0) > 128) {
|
|
41
|
+
continue
|
|
42
|
+
}
|
|
43
|
+
const [key, value] = line.split(' ')
|
|
44
|
+
if (key && value) {
|
|
45
|
+
size[key.toLowerCase()] = parseInt(value, 10)
|
|
46
|
+
}
|
|
47
|
+
if (size.height && size.width) {
|
|
48
|
+
break
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (size.height && size.width) {
|
|
53
|
+
return {
|
|
54
|
+
height: size.height,
|
|
55
|
+
width: size.width,
|
|
56
|
+
}
|
|
57
|
+
} else {
|
|
58
|
+
throw new TypeError('Invalid PAM')
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export const PNM = {
|
|
64
|
+
validate: (input) => toUTF8String(input, 0, 2) in PNMTypes,
|
|
65
|
+
|
|
66
|
+
calculate(input) {
|
|
67
|
+
const signature = toUTF8String(input, 0, 2)
|
|
68
|
+
const type = PNMTypes[signature]
|
|
69
|
+
// TODO: this probably generates garbage. move to a stream based parser
|
|
70
|
+
const lines = toUTF8String(input, 3).split(/[\r\n]+/)
|
|
71
|
+
const handler = handlers[type] || handlers.default
|
|
72
|
+
return handler(lines)
|
|
73
|
+
},
|
|
74
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { toUTF8String } from './utils'
|
|
2
|
+
|
|
3
|
+
const svgReg = /<svg\s([^>"']|"[^"]*"|'[^']*')*>/
|
|
4
|
+
|
|
5
|
+
const extractorRegExps = {
|
|
6
|
+
height: /\sheight=(['"])([^%]+?)\1/,
|
|
7
|
+
root: svgReg,
|
|
8
|
+
viewbox: /\sviewBox=(['"])(.+?)\1/i,
|
|
9
|
+
width: /\swidth=(['"])([^%]+?)\1/,
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const INCH_CM = 2.54
|
|
13
|
+
const units = {
|
|
14
|
+
in: 96,
|
|
15
|
+
cm: 96 / INCH_CM,
|
|
16
|
+
em: 16,
|
|
17
|
+
ex: 8,
|
|
18
|
+
m: (96 / INCH_CM) * 100,
|
|
19
|
+
mm: 96 / INCH_CM / 10,
|
|
20
|
+
pc: 96 / 72 / 12,
|
|
21
|
+
pt: 96 / 72,
|
|
22
|
+
px: 1,
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const unitsReg = new RegExp(
|
|
26
|
+
`^([0-9.]+(?:e\\d+)?)(${Object.keys(units).join('|')})?$`,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
function parseLength(len) {
|
|
30
|
+
const m = unitsReg.exec(len)
|
|
31
|
+
if (!m) {
|
|
32
|
+
return undefined
|
|
33
|
+
}
|
|
34
|
+
return Math.round(Number(m[1]) * (units[m[2]] || 1))
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function parseViewbox(viewbox) {
|
|
38
|
+
const bounds = viewbox.split(' ')
|
|
39
|
+
return {
|
|
40
|
+
height: parseLength(bounds[3]),
|
|
41
|
+
width: parseLength(bounds[2]),
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function parseAttributes(root) {
|
|
46
|
+
const width = root.match(extractorRegExps.width)
|
|
47
|
+
const height = root.match(extractorRegExps.height)
|
|
48
|
+
const viewbox = root.match(extractorRegExps.viewbox)
|
|
49
|
+
return {
|
|
50
|
+
height: height && (parseLength(height[2])),
|
|
51
|
+
viewbox: viewbox && (parseViewbox(viewbox[2])),
|
|
52
|
+
width: width && (parseLength(width[2])),
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function calculateByDimensions(attrs) {
|
|
57
|
+
return {
|
|
58
|
+
height: attrs.height,
|
|
59
|
+
width: attrs.width,
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function calculateByViewbox(attrs, viewbox) {
|
|
64
|
+
const ratio = (viewbox.width) / (viewbox.height)
|
|
65
|
+
if (attrs.width) {
|
|
66
|
+
return {
|
|
67
|
+
height: Math.floor(attrs.width / ratio),
|
|
68
|
+
width: attrs.width,
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
if (attrs.height) {
|
|
72
|
+
return {
|
|
73
|
+
height: attrs.height,
|
|
74
|
+
width: Math.floor(attrs.height * ratio),
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
return {
|
|
78
|
+
height: viewbox.height,
|
|
79
|
+
width: viewbox.width,
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
export const SVG = {
|
|
84
|
+
// Scan only the first kilo-byte to speed up the check on larger files
|
|
85
|
+
validate: (input) => svgReg.test(toUTF8String(input, 0, 1000)),
|
|
86
|
+
|
|
87
|
+
calculate(input) {
|
|
88
|
+
const root = toUTF8String(input).match(extractorRegExps.root)
|
|
89
|
+
if (root) {
|
|
90
|
+
const attrs = parseAttributes(root[0])
|
|
91
|
+
if (attrs.width && attrs.height) {
|
|
92
|
+
return calculateByDimensions(attrs)
|
|
93
|
+
}
|
|
94
|
+
if (attrs.viewbox) {
|
|
95
|
+
return calculateByViewbox(attrs, attrs.viewbox)
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
throw new TypeError('Invalid SVG')
|
|
99
|
+
},
|
|
100
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { readUInt16LE } from './utils'
|
|
2
|
+
|
|
3
|
+
export const TGA = {
|
|
4
|
+
validate(input) {
|
|
5
|
+
return readUInt16LE(input, 0) === 0 && readUInt16LE(input, 4) === 0
|
|
6
|
+
},
|
|
7
|
+
|
|
8
|
+
calculate(input) {
|
|
9
|
+
return {
|
|
10
|
+
height: readUInt16LE(input, 14),
|
|
11
|
+
width: readUInt16LE(input, 12),
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
// based on http://www.compix.com/fileformattif.htm
|
|
2
|
+
// TO-DO: support big-endian as well
|
|
3
|
+
import { readUInt, toHexString, toUTF8String } from './utils'
|
|
4
|
+
|
|
5
|
+
// Read IFD (image-file-directory) into a buffer
|
|
6
|
+
function readIFD(input, isBigEndian) {
|
|
7
|
+
const ifdOffset = readUInt(input, 32, 4, isBigEndian)
|
|
8
|
+
return input.slice(ifdOffset + 2)
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// TIFF values seem to be messed up on Big-Endian, this helps
|
|
12
|
+
function readValue(input, isBigEndian) {
|
|
13
|
+
const low = readUInt(input, 16, 8, isBigEndian)
|
|
14
|
+
const high = readUInt(input, 16, 10, isBigEndian)
|
|
15
|
+
return (high << 16) + low
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// move to the next tag
|
|
19
|
+
function nextTag(input) {
|
|
20
|
+
if (input.length > 24) {
|
|
21
|
+
return input.slice(12)
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Extract IFD tags from TIFF metadata
|
|
26
|
+
function extractTags(input, isBigEndian) {
|
|
27
|
+
const tags = {}
|
|
28
|
+
|
|
29
|
+
let temp = input
|
|
30
|
+
while (temp && temp.length) {
|
|
31
|
+
const code = readUInt(temp, 16, 0, isBigEndian)
|
|
32
|
+
const type = readUInt(temp, 16, 2, isBigEndian)
|
|
33
|
+
const length = readUInt(temp, 32, 4, isBigEndian)
|
|
34
|
+
|
|
35
|
+
// 0 means end of IFD
|
|
36
|
+
if (code === 0) {
|
|
37
|
+
break
|
|
38
|
+
} else {
|
|
39
|
+
// 256 is width, 257 is height
|
|
40
|
+
// if (code === 256 || code === 257) {
|
|
41
|
+
if (length === 1 && (type === 3 || type === 4)) {
|
|
42
|
+
tags[code] = readValue(temp, isBigEndian)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// move to the next tag
|
|
46
|
+
temp = nextTag(temp)
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return tags
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Test if the TIFF is Big Endian or Little Endian
|
|
54
|
+
function determineEndianness(input) {
|
|
55
|
+
const signature = toUTF8String(input, 0, 2)
|
|
56
|
+
if ('II' === signature) {
|
|
57
|
+
return 'LE'
|
|
58
|
+
} else if ('MM' === signature) {
|
|
59
|
+
return 'BE'
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const signatures = [
|
|
64
|
+
// '492049', // currently not supported
|
|
65
|
+
'49492a00', // Little endian
|
|
66
|
+
'4d4d002a', // Big Endian
|
|
67
|
+
// '4d4d002a', // BigTIFF > 4GB. currently not supported
|
|
68
|
+
]
|
|
69
|
+
|
|
70
|
+
export const TIFF = {
|
|
71
|
+
validate: (input) => signatures.includes(toHexString(input, 0, 4)),
|
|
72
|
+
|
|
73
|
+
calculate(input) {
|
|
74
|
+
// Determine BE/LE
|
|
75
|
+
const isBigEndian = determineEndianness(input) === 'BE'
|
|
76
|
+
|
|
77
|
+
// read the IFD
|
|
78
|
+
const ifdBuffer = readIFD(input, isBigEndian)
|
|
79
|
+
|
|
80
|
+
// extract the tags from the IFD
|
|
81
|
+
const tags = extractTags(ifdBuffer, isBigEndian)
|
|
82
|
+
|
|
83
|
+
const width = tags[256]
|
|
84
|
+
const height = tags[257]
|
|
85
|
+
|
|
86
|
+
if (!width || !height) {
|
|
87
|
+
throw new TypeError('Invalid Tiff. Missing tags')
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return { height, width }
|
|
91
|
+
},
|
|
92
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
const decoder = new TextDecoder()
|
|
2
|
+
export const toUTF8String = (
|
|
3
|
+
input,
|
|
4
|
+
start = 0,
|
|
5
|
+
end = input.length,
|
|
6
|
+
) => decoder.decode(input.slice(start, end))
|
|
7
|
+
|
|
8
|
+
export const toHexString = (input, start = 0, end = input.length) =>
|
|
9
|
+
input
|
|
10
|
+
.slice(start, end)
|
|
11
|
+
.reduce((memo, i) => memo + ('0' + i.toString(16)).slice(-2), '')
|
|
12
|
+
|
|
13
|
+
export const readInt16LE = (input, offset = 0) => {
|
|
14
|
+
const val = input[offset] + input[offset + 1] * 2 ** 8
|
|
15
|
+
return val | ((val & (2 ** 15)) * 0x1fffe)
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export const readUInt16BE = (input, offset = 0) =>
|
|
19
|
+
input[offset] * 2 ** 8 + input[offset + 1]
|
|
20
|
+
|
|
21
|
+
export const readUInt16LE = (input, offset = 0) =>
|
|
22
|
+
input[offset] + input[offset + 1] * 2 ** 8
|
|
23
|
+
|
|
24
|
+
export const readUInt24LE = (input, offset = 0) =>
|
|
25
|
+
input[offset] + input[offset + 1] * 2 ** 8 + input[offset + 2] * 2 ** 16
|
|
26
|
+
|
|
27
|
+
export const readInt32LE = (input, offset = 0) =>
|
|
28
|
+
input[offset] +
|
|
29
|
+
input[offset + 1] * 2 ** 8 +
|
|
30
|
+
input[offset + 2] * 2 ** 16 +
|
|
31
|
+
(input[offset + 3] << 24)
|
|
32
|
+
|
|
33
|
+
export const readUInt32BE = (input, offset = 0) =>
|
|
34
|
+
input[offset] * 2 ** 24 +
|
|
35
|
+
input[offset + 1] * 2 ** 16 +
|
|
36
|
+
input[offset + 2] * 2 ** 8 +
|
|
37
|
+
input[offset + 3]
|
|
38
|
+
|
|
39
|
+
export const readUInt32LE = (input, offset = 0) =>
|
|
40
|
+
input[offset] +
|
|
41
|
+
input[offset + 1] * 2 ** 8 +
|
|
42
|
+
input[offset + 2] * 2 ** 16 +
|
|
43
|
+
input[offset + 3] * 2 ** 24
|
|
44
|
+
|
|
45
|
+
// Abstract reading multi-byte unsigned integers
|
|
46
|
+
const methods = {
|
|
47
|
+
readUInt16BE,
|
|
48
|
+
readUInt16LE,
|
|
49
|
+
readUInt32BE,
|
|
50
|
+
readUInt32LE,
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function readUInt(
|
|
54
|
+
input,
|
|
55
|
+
bits,
|
|
56
|
+
offset,
|
|
57
|
+
isBigEndian,
|
|
58
|
+
) {
|
|
59
|
+
offset = offset || 0
|
|
60
|
+
const endian = isBigEndian ? 'BE' : 'LE'
|
|
61
|
+
const methodName = 'readUInt' + bits + endian
|
|
62
|
+
return methods[methodName](input, offset)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function readBox(buffer, offset) {
|
|
66
|
+
if (buffer.length - offset < 4) return
|
|
67
|
+
const boxSize = readUInt32BE(buffer, offset)
|
|
68
|
+
if (buffer.length - offset < boxSize) return
|
|
69
|
+
return {
|
|
70
|
+
name: toUTF8String(buffer, 4 + offset, 8 + offset),
|
|
71
|
+
offset,
|
|
72
|
+
size: boxSize,
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function findBox(buffer, boxName, offset) {
|
|
77
|
+
while (offset < buffer.length) {
|
|
78
|
+
const box = readBox(buffer, offset)
|
|
79
|
+
if (!box) break
|
|
80
|
+
if (box.name === boxName) return box
|
|
81
|
+
offset += box.size
|
|
82
|
+
}
|
|
83
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
// based on https://developers.google.com/speed/webp/docs/riff_container
|
|
2
|
+
import { toHexString, toUTF8String, readInt16LE, readUInt24LE } from './utils'
|
|
3
|
+
|
|
4
|
+
function calculateExtended(input) {
|
|
5
|
+
return {
|
|
6
|
+
height: 1 + readUInt24LE(input, 7),
|
|
7
|
+
width: 1 + readUInt24LE(input, 4),
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function calculateLossless(input) {
|
|
12
|
+
return {
|
|
13
|
+
height:
|
|
14
|
+
1 +
|
|
15
|
+
(((input[4] & 0xf) << 10) | (input[3] << 2) | ((input[2] & 0xc0) >> 6)),
|
|
16
|
+
width: 1 + (((input[2] & 0x3f) << 8) | input[1]),
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function calculateLossy(input) {
|
|
21
|
+
// `& 0x3fff` returns the last 14 bits
|
|
22
|
+
// TO-DO: include webp scaling in the calculations
|
|
23
|
+
return {
|
|
24
|
+
height: readInt16LE(input, 8) & 0x3fff,
|
|
25
|
+
width: readInt16LE(input, 6) & 0x3fff,
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export const WEBP = {
|
|
30
|
+
validate(input) {
|
|
31
|
+
const riffHeader = 'RIFF' === toUTF8String(input, 0, 4)
|
|
32
|
+
const webpHeader = 'WEBP' === toUTF8String(input, 8, 12)
|
|
33
|
+
const vp8Header = 'VP8' === toUTF8String(input, 12, 15)
|
|
34
|
+
return riffHeader && webpHeader && vp8Header
|
|
35
|
+
},
|
|
36
|
+
|
|
37
|
+
calculate(input) {
|
|
38
|
+
const chunkHeader = toUTF8String(input, 12, 16)
|
|
39
|
+
input = input.slice(20, 30)
|
|
40
|
+
|
|
41
|
+
// Extended webp stream signature
|
|
42
|
+
if (chunkHeader === 'VP8X') {
|
|
43
|
+
const extendedHeader = input[0]
|
|
44
|
+
const validStart = (extendedHeader & 0xc0) === 0
|
|
45
|
+
const validEnd = (extendedHeader & 0x01) === 0
|
|
46
|
+
if (validStart && validEnd) {
|
|
47
|
+
return calculateExtended(input)
|
|
48
|
+
} else {
|
|
49
|
+
// TODO: breaking change
|
|
50
|
+
throw new TypeError('Invalid WebP')
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Lossless webp stream signature
|
|
55
|
+
if (chunkHeader === 'VP8 ' && input[0] !== 0x2f) {
|
|
56
|
+
return calculateLossy(input)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Lossy webp stream signature
|
|
60
|
+
const signature = toHexString(input, 3, 6)
|
|
61
|
+
if (chunkHeader === 'VP8L' && signature !== '9d012a') {
|
|
62
|
+
return calculateLossless(input)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
throw new TypeError('Invalid WebP')
|
|
66
|
+
},
|
|
67
|
+
}
|
package/src/util.js
CHANGED
|
@@ -32,9 +32,7 @@ export function formatFileSize (value) { // 文件大小格式化,自动换算
|
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
34
|
export async function getPhotoInfo(file) { // 获取照片的经纬度
|
|
35
|
-
let
|
|
36
|
-
sliceFile.name = file.name
|
|
37
|
-
let exif = await getPhotoTags(sliceFile)
|
|
35
|
+
let exif = await getPhotoTags(file)
|
|
38
36
|
let {GPSAltitude, GPSLatitude, GPSLatitudeRef, GPSLongitude, GPSLongitudeRef, DateTimeOriginal, Model, PixelXDimension, PixelYDimension} = exif
|
|
39
37
|
let position = null
|
|
40
38
|
if (GPSLongitude && GPSLatitude) {
|
|
@@ -56,15 +54,13 @@ export async function getPhotoInfo(file) { // 获取照片的经纬度
|
|
|
56
54
|
export function getPhotoMeta(file) { // 获取照片的元数据,包括exif和md5,为了提高计算效率,只取前64k进行计算
|
|
57
55
|
return new Promise((resolve, reject) => {
|
|
58
56
|
let md5, exif
|
|
59
|
-
|
|
60
|
-
sliceFile.name = file.name
|
|
61
|
-
getPhotoTags(sliceFile).then(data => {
|
|
57
|
+
getPhotoTags(file).then(data => {
|
|
62
58
|
exif = data
|
|
63
59
|
done()
|
|
64
60
|
})
|
|
65
61
|
let fileReader = new FileReader()
|
|
66
62
|
let spark = new SparkMD5.ArrayBuffer()
|
|
67
|
-
fileReader.readAsArrayBuffer(
|
|
63
|
+
fileReader.readAsArrayBuffer(file)
|
|
68
64
|
fileReader.onload = e => {
|
|
69
65
|
spark.append(e.target.result)
|
|
70
66
|
md5 = spark.end()
|