@mapcatch/util 1.0.7 → 1.0.8-a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,18 @@
1
+ import { toUTF8String, readUInt32LE } from './utils'
2
+
3
+ export const KTX = {
4
+ validate: (input) => {
5
+ const signature = toUTF8String(input, 1, 7)
6
+ return ['KTX 11', 'KTX 20'].includes(signature)
7
+ },
8
+
9
+ calculate: (input) => {
10
+ const type = input[5] === 0x31 ? 'ktx' : 'ktx2'
11
+ const offset = type === 'ktx' ? 36 : 20
12
+ return ({
13
+ height: readUInt32LE(input, offset + 4),
14
+ width: readUInt32LE(input, offset),
15
+ type,
16
+ })
17
+ },
18
+ }
@@ -0,0 +1,36 @@
1
+ import { toUTF8String, readUInt32BE } from './utils'
2
+
3
+ const pngSignature = 'PNG\r\n\x1a\n'
4
+ const pngImageHeaderChunkName = 'IHDR'
5
+
6
+ // Used to detect "fried" png's: http://www.jongware.com/pngdefry.html
7
+ const pngFriedChunkName = 'CgBI'
8
+
9
+ export const PNG = {
10
+ validate(input) {
11
+ if (pngSignature === toUTF8String(input, 1, 8)) {
12
+ let chunkName = toUTF8String(input, 12, 16)
13
+ if (chunkName === pngFriedChunkName) {
14
+ chunkName = toUTF8String(input, 28, 32)
15
+ }
16
+ if (chunkName !== pngImageHeaderChunkName) {
17
+ throw new TypeError('Invalid PNG')
18
+ }
19
+ return true
20
+ }
21
+ return false
22
+ },
23
+
24
+ calculate(input) {
25
+ if (toUTF8String(input, 12, 16) === pngFriedChunkName) {
26
+ return {
27
+ height: readUInt32BE(input, 36),
28
+ width: readUInt32BE(input, 32),
29
+ }
30
+ }
31
+ return {
32
+ height: readUInt32BE(input, 20),
33
+ width: readUInt32BE(input, 16),
34
+ }
35
+ },
36
+ }
@@ -0,0 +1,74 @@
1
+ import { toUTF8String } from './utils'
2
+
3
+ const PNMTypes = {
4
+ P1: 'pbm/ascii',
5
+ P2: 'pgm/ascii',
6
+ P3: 'ppm/ascii',
7
+ P4: 'pbm',
8
+ P5: 'pgm',
9
+ P6: 'ppm',
10
+ P7: 'pam',
11
+ PF: 'pfm',
12
+ }
13
+
14
+ const handlers = {
15
+ default: (lines) => {
16
+ let dimensions = []
17
+
18
+ while (lines.length > 0) {
19
+ const line = lines.shift()
20
+ if (line[0] === '#') {
21
+ continue
22
+ }
23
+ dimensions = line.split(' ')
24
+ break
25
+ }
26
+
27
+ if (dimensions.length === 2) {
28
+ return {
29
+ height: parseInt(dimensions[1], 10),
30
+ width: parseInt(dimensions[0], 10),
31
+ }
32
+ } else {
33
+ throw new TypeError('Invalid PNM')
34
+ }
35
+ },
36
+ pam: (lines) => {
37
+ const size = {}
38
+ while (lines.length > 0) {
39
+ const line = lines.shift()
40
+ if (line.length > 16 || line.charCodeAt(0) > 128) {
41
+ continue
42
+ }
43
+ const [key, value] = line.split(' ')
44
+ if (key && value) {
45
+ size[key.toLowerCase()] = parseInt(value, 10)
46
+ }
47
+ if (size.height && size.width) {
48
+ break
49
+ }
50
+ }
51
+
52
+ if (size.height && size.width) {
53
+ return {
54
+ height: size.height,
55
+ width: size.width,
56
+ }
57
+ } else {
58
+ throw new TypeError('Invalid PAM')
59
+ }
60
+ },
61
+ }
62
+
63
+ export const PNM = {
64
+ validate: (input) => toUTF8String(input, 0, 2) in PNMTypes,
65
+
66
+ calculate(input) {
67
+ const signature = toUTF8String(input, 0, 2)
68
+ const type = PNMTypes[signature]
69
+ // TODO: this probably generates garbage. move to a stream based parser
70
+ const lines = toUTF8String(input, 3).split(/[\r\n]+/)
71
+ const handler = handlers[type] || handlers.default
72
+ return handler(lines)
73
+ },
74
+ }
@@ -0,0 +1,10 @@
1
+ import { toUTF8String, readUInt32BE } from './utils'
2
+
3
+ export const PSD = {
4
+ validate: (input) => toUTF8String(input, 0, 4) === '8BPS',
5
+
6
+ calculate: (input) => ({
7
+ height: readUInt32BE(input, 14),
8
+ width: readUInt32BE(input, 18),
9
+ }),
10
+ }
@@ -0,0 +1,100 @@
1
+ import { toUTF8String } from './utils'
2
+
3
+ const svgReg = /<svg\s([^>"']|"[^"]*"|'[^']*')*>/
4
+
5
+ const extractorRegExps = {
6
+ height: /\sheight=(['"])([^%]+?)\1/,
7
+ root: svgReg,
8
+ viewbox: /\sviewBox=(['"])(.+?)\1/i,
9
+ width: /\swidth=(['"])([^%]+?)\1/,
10
+ }
11
+
12
+ const INCH_CM = 2.54
13
+ const units = {
14
+ in: 96,
15
+ cm: 96 / INCH_CM,
16
+ em: 16,
17
+ ex: 8,
18
+ m: (96 / INCH_CM) * 100,
19
+ mm: 96 / INCH_CM / 10,
20
+ pc: 96 / 72 / 12,
21
+ pt: 96 / 72,
22
+ px: 1,
23
+ }
24
+
25
+ const unitsReg = new RegExp(
26
+ `^([0-9.]+(?:e\\d+)?)(${Object.keys(units).join('|')})?$`,
27
+ )
28
+
29
+ function parseLength(len) {
30
+ const m = unitsReg.exec(len)
31
+ if (!m) {
32
+ return undefined
33
+ }
34
+ return Math.round(Number(m[1]) * (units[m[2]] || 1))
35
+ }
36
+
37
+ function parseViewbox(viewbox) {
38
+ const bounds = viewbox.split(' ')
39
+ return {
40
+ height: parseLength(bounds[3]),
41
+ width: parseLength(bounds[2]),
42
+ }
43
+ }
44
+
45
+ function parseAttributes(root) {
46
+ const width = root.match(extractorRegExps.width)
47
+ const height = root.match(extractorRegExps.height)
48
+ const viewbox = root.match(extractorRegExps.viewbox)
49
+ return {
50
+ height: height && (parseLength(height[2])),
51
+ viewbox: viewbox && (parseViewbox(viewbox[2])),
52
+ width: width && (parseLength(width[2])),
53
+ }
54
+ }
55
+
56
+ function calculateByDimensions(attrs) {
57
+ return {
58
+ height: attrs.height,
59
+ width: attrs.width,
60
+ }
61
+ }
62
+
63
+ function calculateByViewbox(attrs, viewbox) {
64
+ const ratio = (viewbox.width) / (viewbox.height)
65
+ if (attrs.width) {
66
+ return {
67
+ height: Math.floor(attrs.width / ratio),
68
+ width: attrs.width,
69
+ }
70
+ }
71
+ if (attrs.height) {
72
+ return {
73
+ height: attrs.height,
74
+ width: Math.floor(attrs.height * ratio),
75
+ }
76
+ }
77
+ return {
78
+ height: viewbox.height,
79
+ width: viewbox.width,
80
+ }
81
+ }
82
+
83
+ export const SVG = {
84
+ // Scan only the first kilo-byte to speed up the check on larger files
85
+ validate: (input) => svgReg.test(toUTF8String(input, 0, 1000)),
86
+
87
+ calculate(input) {
88
+ const root = toUTF8String(input).match(extractorRegExps.root)
89
+ if (root) {
90
+ const attrs = parseAttributes(root[0])
91
+ if (attrs.width && attrs.height) {
92
+ return calculateByDimensions(attrs)
93
+ }
94
+ if (attrs.viewbox) {
95
+ return calculateByViewbox(attrs, attrs.viewbox)
96
+ }
97
+ }
98
+ throw new TypeError('Invalid SVG')
99
+ },
100
+ }
@@ -0,0 +1,14 @@
1
+ import { readUInt16LE } from './utils'
2
+
3
+ export const TGA = {
4
+ validate(input) {
5
+ return readUInt16LE(input, 0) === 0 && readUInt16LE(input, 4) === 0
6
+ },
7
+
8
+ calculate(input) {
9
+ return {
10
+ height: readUInt16LE(input, 14),
11
+ width: readUInt16LE(input, 12),
12
+ }
13
+ },
14
+ }
@@ -0,0 +1,92 @@
1
+ // based on http://www.compix.com/fileformattif.htm
2
+ // TO-DO: support big-endian as well
3
+ import { readUInt, toHexString, toUTF8String } from './utils'
4
+
5
+ // Read IFD (image-file-directory) into a buffer
6
+ function readIFD(input, isBigEndian) {
7
+ const ifdOffset = readUInt(input, 32, 4, isBigEndian)
8
+ return input.slice(ifdOffset + 2)
9
+ }
10
+
11
+ // TIFF values seem to be messed up on Big-Endian, this helps
12
+ function readValue(input, isBigEndian) {
13
+ const low = readUInt(input, 16, 8, isBigEndian)
14
+ const high = readUInt(input, 16, 10, isBigEndian)
15
+ return (high << 16) + low
16
+ }
17
+
18
+ // move to the next tag
19
+ function nextTag(input) {
20
+ if (input.length > 24) {
21
+ return input.slice(12)
22
+ }
23
+ }
24
+
25
+ // Extract IFD tags from TIFF metadata
26
+ function extractTags(input, isBigEndian) {
27
+ const tags = {}
28
+
29
+ let temp = input
30
+ while (temp && temp.length) {
31
+ const code = readUInt(temp, 16, 0, isBigEndian)
32
+ const type = readUInt(temp, 16, 2, isBigEndian)
33
+ const length = readUInt(temp, 32, 4, isBigEndian)
34
+
35
+ // 0 means end of IFD
36
+ if (code === 0) {
37
+ break
38
+ } else {
39
+ // 256 is width, 257 is height
40
+ // if (code === 256 || code === 257) {
41
+ if (length === 1 && (type === 3 || type === 4)) {
42
+ tags[code] = readValue(temp, isBigEndian)
43
+ }
44
+
45
+ // move to the next tag
46
+ temp = nextTag(temp)
47
+ }
48
+ }
49
+
50
+ return tags
51
+ }
52
+
53
+ // Test if the TIFF is Big Endian or Little Endian
54
+ function determineEndianness(input) {
55
+ const signature = toUTF8String(input, 0, 2)
56
+ if ('II' === signature) {
57
+ return 'LE'
58
+ } else if ('MM' === signature) {
59
+ return 'BE'
60
+ }
61
+ }
62
+
63
+ const signatures = [
64
+ // '492049', // currently not supported
65
+ '49492a00', // Little endian
66
+ '4d4d002a', // Big Endian
67
+ // '4d4d002a', // BigTIFF > 4GB. currently not supported
68
+ ]
69
+
70
+ export const TIFF = {
71
+ validate: (input) => signatures.includes(toHexString(input, 0, 4)),
72
+
73
+ calculate(input) {
74
+ // Determine BE/LE
75
+ const isBigEndian = determineEndianness(input) === 'BE'
76
+
77
+ // read the IFD
78
+ const ifdBuffer = readIFD(input, isBigEndian)
79
+
80
+ // extract the tags from the IFD
81
+ const tags = extractTags(ifdBuffer, isBigEndian)
82
+
83
+ const width = tags[256]
84
+ const height = tags[257]
85
+
86
+ if (!width || !height) {
87
+ throw new TypeError('Invalid Tiff. Missing tags')
88
+ }
89
+
90
+ return { height, width }
91
+ },
92
+ }
@@ -0,0 +1,83 @@
1
+ const decoder = new TextDecoder()
2
+ export const toUTF8String = (
3
+ input,
4
+ start = 0,
5
+ end = input.length,
6
+ ) => decoder.decode(input.slice(start, end))
7
+
8
+ export const toHexString = (input, start = 0, end = input.length) =>
9
+ input
10
+ .slice(start, end)
11
+ .reduce((memo, i) => memo + ('0' + i.toString(16)).slice(-2), '')
12
+
13
+ export const readInt16LE = (input, offset = 0) => {
14
+ const val = input[offset] + input[offset + 1] * 2 ** 8
15
+ return val | ((val & (2 ** 15)) * 0x1fffe)
16
+ }
17
+
18
+ export const readUInt16BE = (input, offset = 0) =>
19
+ input[offset] * 2 ** 8 + input[offset + 1]
20
+
21
+ export const readUInt16LE = (input, offset = 0) =>
22
+ input[offset] + input[offset + 1] * 2 ** 8
23
+
24
+ export const readUInt24LE = (input, offset = 0) =>
25
+ input[offset] + input[offset + 1] * 2 ** 8 + input[offset + 2] * 2 ** 16
26
+
27
+ export const readInt32LE = (input, offset = 0) =>
28
+ input[offset] +
29
+ input[offset + 1] * 2 ** 8 +
30
+ input[offset + 2] * 2 ** 16 +
31
+ (input[offset + 3] << 24)
32
+
33
+ export const readUInt32BE = (input, offset = 0) =>
34
+ input[offset] * 2 ** 24 +
35
+ input[offset + 1] * 2 ** 16 +
36
+ input[offset + 2] * 2 ** 8 +
37
+ input[offset + 3]
38
+
39
+ export const readUInt32LE = (input, offset = 0) =>
40
+ input[offset] +
41
+ input[offset + 1] * 2 ** 8 +
42
+ input[offset + 2] * 2 ** 16 +
43
+ input[offset + 3] * 2 ** 24
44
+
45
+ // Abstract reading multi-byte unsigned integers
46
+ const methods = {
47
+ readUInt16BE,
48
+ readUInt16LE,
49
+ readUInt32BE,
50
+ readUInt32LE,
51
+ }
52
+
53
+ export function readUInt(
54
+ input,
55
+ bits,
56
+ offset,
57
+ isBigEndian,
58
+ ) {
59
+ offset = offset || 0
60
+ const endian = isBigEndian ? 'BE' : 'LE'
61
+ const methodName = 'readUInt' + bits + endian
62
+ return methods[methodName](input, offset)
63
+ }
64
+
65
+ function readBox(buffer, offset) {
66
+ if (buffer.length - offset < 4) return
67
+ const boxSize = readUInt32BE(buffer, offset)
68
+ if (buffer.length - offset < boxSize) return
69
+ return {
70
+ name: toUTF8String(buffer, 4 + offset, 8 + offset),
71
+ offset,
72
+ size: boxSize,
73
+ }
74
+ }
75
+
76
+ export function findBox(buffer, boxName, offset) {
77
+ while (offset < buffer.length) {
78
+ const box = readBox(buffer, offset)
79
+ if (!box) break
80
+ if (box.name === boxName) return box
81
+ offset += box.size
82
+ }
83
+ }
@@ -0,0 +1,67 @@
1
+ // based on https://developers.google.com/speed/webp/docs/riff_container
2
+ import { toHexString, toUTF8String, readInt16LE, readUInt24LE } from './utils'
3
+
4
+ function calculateExtended(input) {
5
+ return {
6
+ height: 1 + readUInt24LE(input, 7),
7
+ width: 1 + readUInt24LE(input, 4),
8
+ }
9
+ }
10
+
11
+ function calculateLossless(input) {
12
+ return {
13
+ height:
14
+ 1 +
15
+ (((input[4] & 0xf) << 10) | (input[3] << 2) | ((input[2] & 0xc0) >> 6)),
16
+ width: 1 + (((input[2] & 0x3f) << 8) | input[1]),
17
+ }
18
+ }
19
+
20
+ function calculateLossy(input) {
21
+ // `& 0x3fff` returns the last 14 bits
22
+ // TO-DO: include webp scaling in the calculations
23
+ return {
24
+ height: readInt16LE(input, 8) & 0x3fff,
25
+ width: readInt16LE(input, 6) & 0x3fff,
26
+ }
27
+ }
28
+
29
+ export const WEBP = {
30
+ validate(input) {
31
+ const riffHeader = 'RIFF' === toUTF8String(input, 0, 4)
32
+ const webpHeader = 'WEBP' === toUTF8String(input, 8, 12)
33
+ const vp8Header = 'VP8' === toUTF8String(input, 12, 15)
34
+ return riffHeader && webpHeader && vp8Header
35
+ },
36
+
37
+ calculate(input) {
38
+ const chunkHeader = toUTF8String(input, 12, 16)
39
+ input = input.slice(20, 30)
40
+
41
+ // Extended webp stream signature
42
+ if (chunkHeader === 'VP8X') {
43
+ const extendedHeader = input[0]
44
+ const validStart = (extendedHeader & 0xc0) === 0
45
+ const validEnd = (extendedHeader & 0x01) === 0
46
+ if (validStart && validEnd) {
47
+ return calculateExtended(input)
48
+ } else {
49
+ // TODO: breaking change
50
+ throw new TypeError('Invalid WebP')
51
+ }
52
+ }
53
+
54
+ // Lossless webp stream signature
55
+ if (chunkHeader === 'VP8 ' && input[0] !== 0x2f) {
56
+ return calculateLossy(input)
57
+ }
58
+
59
+ // Lossy webp stream signature
60
+ const signature = toHexString(input, 3, 6)
61
+ if (chunkHeader === 'VP8L' && signature !== '9d012a') {
62
+ return calculateLossless(input)
63
+ }
64
+
65
+ throw new TypeError('Invalid WebP')
66
+ },
67
+ }