@netlify/headers-parser 7.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2021 Netlify <team@netlify.com>
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,3 @@
1
+ # Netlify Headers Parser
2
+
3
+ Parses headers rules from both `_headers` and `netlify.toml` and normalizes them to an array of objects.
package/lib/all.d.ts ADDED
@@ -0,0 +1,9 @@
1
+ export declare const parseAllHeaders: ({ headersFiles, netlifyConfigPath, configHeaders, minimal, }: {
2
+ headersFiles?: never[] | undefined;
3
+ netlifyConfigPath: any;
4
+ configHeaders?: never[] | undefined;
5
+ minimal?: boolean | undefined;
6
+ }) => Promise<{
7
+ headers: import("./types.js").Header[];
8
+ errors: Error[];
9
+ }>;
package/lib/all.js ADDED
@@ -0,0 +1,36 @@
1
+ import { parseFileHeaders } from './line_parser.js';
2
+ import { mergeHeaders } from './merge.js';
3
+ import { parseConfigHeaders } from './netlify_config_parser.js';
4
+ import { normalizeHeaders } from './normalize.js';
5
+ import { splitResults, concatResults } from './results.js';
6
+ // Parse all headers from `netlify.toml` and `_headers` file, then normalize
7
+ // and validate those.
8
+ export const parseAllHeaders = async function ({ headersFiles = [], netlifyConfigPath, configHeaders = [], minimal = false, }) {
9
+ const [{ headers: fileHeaders, errors: fileParseErrors }, { headers: parsedConfigHeaders, errors: configParseErrors },] = await Promise.all([getFileHeaders(headersFiles), getConfigHeaders(netlifyConfigPath)]);
10
+ const { headers: normalizedFileHeaders, errors: fileNormalizeErrors } = normalizeHeaders(fileHeaders, minimal);
11
+ const { headers: normalizedConfigParseHeaders, errors: configParseNormalizeErrors } = normalizeHeaders(parsedConfigHeaders, minimal);
12
+ const { headers: normalizedConfigHeaders, errors: configNormalizeErrors } = normalizeHeaders(configHeaders, minimal);
13
+ const { headers, errors: mergeErrors } = mergeHeaders({
14
+ fileHeaders: normalizedFileHeaders,
15
+ configHeaders: [...normalizedConfigParseHeaders, ...normalizedConfigHeaders],
16
+ });
17
+ const errors = [
18
+ ...fileParseErrors,
19
+ ...fileNormalizeErrors,
20
+ ...configParseErrors,
21
+ ...configParseNormalizeErrors,
22
+ ...configNormalizeErrors,
23
+ ...mergeErrors,
24
+ ];
25
+ return { headers, errors };
26
+ };
27
+ const getFileHeaders = async function (headersFiles) {
28
+ const resultsArrays = await Promise.all(headersFiles.map(parseFileHeaders));
29
+ return concatResults(resultsArrays);
30
+ };
31
+ const getConfigHeaders = async function (netlifyConfigPath) {
32
+ if (netlifyConfigPath === undefined) {
33
+ return splitResults([]);
34
+ }
35
+ return await parseConfigHeaders(netlifyConfigPath);
36
+ };
@@ -0,0 +1 @@
1
+ export function getForRegExp(forPath: any): RegExp;
@@ -0,0 +1,29 @@
1
+ import escapeStringRegExp from 'escape-string-regexp';
2
+ // Retrieve `forRegExp` which is a `RegExp` used to match the `for` path
3
+ export const getForRegExp = function (forPath) {
4
+ const pattern = forPath.split('/').map(trimString).filter(Boolean).map(getPartRegExp).join('/');
5
+ return new RegExp(`^/${pattern}/?$`, 'iu');
6
+ };
7
+ const trimString = function (part) {
8
+ return part.trimEnd();
9
+ };
10
+ const getPartRegExp = function (part) {
11
+ // Placeholder like `/segment/:placeholder/test`
12
+ // Matches everything up to a /
13
+ if (part.startsWith(':')) {
14
+ return '([^/]+)';
15
+ }
16
+ // Standalone catch-all wildcard like `/segment/*`
17
+ // Unlike `:placeholder`, the whole part is optional
18
+ if (part === '*') {
19
+ return '?(.*)';
20
+ }
21
+ // Non-standalone catch-all wildcard like `/segment/hello*world/test`
22
+ if (part.includes('*')) {
23
+ // @todo use `part.replaceAll('*', ...)` after dropping support for
24
+ // Node <15.0.0
25
+ return part.replace(CATCH_ALL_CHAR_REGEXP, '(.*)');
26
+ }
27
+ return escapeStringRegExp(part);
28
+ };
29
+ const CATCH_ALL_CHAR_REGEXP = /\*/g;
package/lib/index.d.ts ADDED
@@ -0,0 +1 @@
1
+ export { parseAllHeaders } from './all.js';
package/lib/index.js ADDED
@@ -0,0 +1 @@
1
+ export { parseAllHeaders } from './all.js';
@@ -0,0 +1,4 @@
1
+ export declare const parseFileHeaders: (headersFile: string) => Promise<{
2
+ headers: any;
3
+ errors: any[];
4
+ }>;
@@ -0,0 +1,84 @@
1
+ import { promises as fs } from 'fs';
2
+ import { pathExists } from 'path-exists';
3
+ import { splitResults } from './results.js';
4
+ // Parse `_headers` file to an array of objects following the same syntax as
5
+ // the `headers` property in `netlify.toml`
6
+ export const parseFileHeaders = async function (headersFile) {
7
+ const results = await parseHeaders(headersFile);
8
+ const { headers, errors: parseErrors } = splitResults(results);
9
+ const { headers: reducedHeaders, errors: reducedErrors } = headers.reduce(reduceLine, { headers: [], errors: [] });
10
+ const errors = [...parseErrors, ...reducedErrors];
11
+ return { headers: reducedHeaders, errors };
12
+ };
13
+ const parseHeaders = async function (headersFile) {
14
+ if (!(await pathExists(headersFile))) {
15
+ return [];
16
+ }
17
+ const text = await readHeadersFile(headersFile);
18
+ if (typeof text !== 'string') {
19
+ return [text];
20
+ }
21
+ return text.split('\n').map(normalizeLine).filter(hasHeader).map(parseLine).filter(Boolean);
22
+ };
23
+ const readHeadersFile = async function (headersFile) {
24
+ try {
25
+ return await fs.readFile(headersFile, 'utf-8');
26
+ }
27
+ catch {
28
+ return new Error(`Could not read headers file: ${headersFile}`);
29
+ }
30
+ };
31
+ const normalizeLine = function (line, index) {
32
+ return { line: line.trim(), index };
33
+ };
34
+ const hasHeader = function ({ line }) {
35
+ return line !== '' && !line.startsWith('#');
36
+ };
37
+ const parseLine = function ({ line, index }) {
38
+ try {
39
+ return parseHeaderLine(line);
40
+ }
41
+ catch (error) {
42
+ return new Error(`Could not parse header line ${index + 1}:
43
+ ${line}
44
+ ${error.message}`);
45
+ }
46
+ };
47
+ // Parse a single header line
48
+ const parseHeaderLine = function (line) {
49
+ if (isPathLine(line)) {
50
+ return { path: line };
51
+ }
52
+ if (!line.includes(HEADER_SEPARATOR)) {
53
+ return;
54
+ }
55
+ const [rawName, ...rawValue] = line.split(HEADER_SEPARATOR);
56
+ const name = rawName.trim();
57
+ if (name === '') {
58
+ throw new Error(`Missing header name`);
59
+ }
60
+ const value = rawValue.join(HEADER_SEPARATOR).trim();
61
+ if (value === '') {
62
+ throw new Error(`Missing header value`);
63
+ }
64
+ return { name, value };
65
+ };
66
+ const isPathLine = function (line) {
67
+ return line.startsWith('/');
68
+ };
69
+ const HEADER_SEPARATOR = ':';
70
+ const reduceLine = function ({ headers, errors }, { path, name, value }) {
71
+ if (path !== undefined) {
72
+ return { headers: [...headers, { for: path, values: {} }], errors };
73
+ }
74
+ if (headers.length === 0) {
75
+ const error = new Error(`Path should come before header "${name}"`);
76
+ return { headers, errors: [...errors, error] };
77
+ }
78
+ const previousHeaders = headers.slice(0, -1);
79
+ const currentHeader = headers[headers.length - 1];
80
+ const { values } = currentHeader;
81
+ const newValue = values[name] === undefined ? value : `${values[name]}, ${value}`;
82
+ const newHeaders = [...previousHeaders, { ...currentHeader, values: { ...values, [name]: newValue } }];
83
+ return { headers: newHeaders, errors };
84
+ };
package/lib/merge.d.ts ADDED
@@ -0,0 +1,8 @@
1
+ import type { Header } from './types.js';
2
+ export declare const mergeHeaders: ({ fileHeaders, configHeaders, }: {
3
+ fileHeaders: (Error | Header)[];
4
+ configHeaders: (Error | Header)[];
5
+ }) => {
6
+ headers: Header[];
7
+ errors: Error[];
8
+ };
package/lib/merge.js ADDED
@@ -0,0 +1,50 @@
1
+ import stringify from 'fast-safe-stringify';
2
+ import { splitResults } from './results.js';
3
+ // Merge headers from `_headers` with the ones from `netlify.toml`.
4
+ // When:
5
+ // - Both `netlify.toml` headers and `_headers` are specified, paths are
6
+ // merged. If the same path is specified in both, their headers are merged.
7
+ // If the same path and header is specified both in `netlify.toml` and
8
+ // `_headers`, the one in `netlify.toml` is used (i.e. overrides, does not
9
+ // concatenate).
10
+ // - The same path is specified twice in `netlify.toml` headers, their headers
11
+ // are merged. If the same header is specified twice in different places for
12
+ // the same path, the later one overrides (does not concatenate) any earlier
13
+ // ones. If the same header is specified twice in the same place for the same
14
+ // path, it is concatenated as a comma-separated list string.
15
+ // - The same path is specified twice in `_headers`, the behavior is the same
16
+ // as `netlify.toml` headers.
17
+ export const mergeHeaders = function ({ fileHeaders, configHeaders, }) {
18
+ const results = [...fileHeaders, ...configHeaders];
19
+ const { headers, errors } = splitResults(results);
20
+ const mergedHeaders = removeDuplicates(headers);
21
+ return { headers: mergedHeaders, errors };
22
+ };
23
+ // Remove duplicates. This is especially likely considering `fileHeaders` might
24
+ // have been previously merged to `configHeaders`, which happens when
25
+ // `netlifyConfig.headers` is modified by plugins.
26
+ // The latest duplicate value is the one kept, hence why we need to iterate the
27
+ // array backwards and reverse it at the end
28
+ const removeDuplicates = function (headers) {
29
+ const uniqueHeaders = new Set();
30
+ const result = [];
31
+ for (let i = headers.length - 1; i >= 0; i--) {
32
+ const h = headers[i];
33
+ const key = generateHeaderKey(h);
34
+ if (uniqueHeaders.has(key))
35
+ continue;
36
+ uniqueHeaders.add(key);
37
+ result.push(h);
38
+ }
39
+ return result.reverse();
40
+ };
41
+ // We generate a unique header key based on JSON stringify. However, because some
42
+ // properties can be regexes, we need to replace those by their toString representation
43
+ // given the default will be and empty object
44
+ const generateHeaderKey = function (header) {
45
+ return stringify.default.stableStringify(header, (_, value) => {
46
+ if (value instanceof RegExp)
47
+ return value.toString();
48
+ return value;
49
+ });
50
+ };
@@ -0,0 +1,4 @@
1
+ export declare const parseConfigHeaders: (netlifyConfigPath: string) => Promise<{
2
+ headers: any[];
3
+ errors: Error[];
4
+ }>;
@@ -0,0 +1,30 @@
1
+ import { promises as fs } from 'fs';
2
+ import { parse as loadToml } from '@iarna/toml';
3
+ import { pathExists } from 'path-exists';
4
+ import { splitResults } from './results.js';
5
+ // Parse `headers` field in "netlify.toml" to an array of objects.
6
+ // This field is already an array of objects, so it only validates and
7
+ // normalizes it.
8
+ export const parseConfigHeaders = async function (netlifyConfigPath) {
9
+ if (!(await pathExists(netlifyConfigPath))) {
10
+ return splitResults([]);
11
+ }
12
+ const headers = await parseConfig(netlifyConfigPath);
13
+ return splitResults(headers);
14
+ };
15
+ // Load the configuration file and parse it (TOML)
16
+ const parseConfig = async function (configPath) {
17
+ try {
18
+ const configString = await fs.readFile(configPath, 'utf8');
19
+ const config = loadToml(configString);
20
+ // Convert `null` prototype objects to normal plain objects
21
+ const { headers = [] } = JSON.parse(JSON.stringify(config));
22
+ if (!Array.isArray(headers)) {
23
+ throw new TypeError(`"headers" must be an array`);
24
+ }
25
+ return headers;
26
+ }
27
+ catch (error) {
28
+ return [new Error(`Could not parse configuration file: ${error}`)];
29
+ }
30
+ };
@@ -0,0 +1,8 @@
1
+ import type { Header } from './types.js';
2
+ export declare const normalizeHeaders: (headers: any, minimal: boolean) => {
3
+ headers: Header[];
4
+ errors: Error[];
5
+ } | {
6
+ headers: TypeError[];
7
+ errors: Error[];
8
+ };
@@ -0,0 +1,112 @@
1
+ import isPlainObj from 'is-plain-obj';
2
+ import mapObj from 'map-obj';
3
+ import { getForRegExp } from './for_regexp.js';
4
+ import { splitResults } from './results.js';
5
+ // Validate and normalize an array of `headers` objects.
6
+ // This step is performed after `headers` have been parsed from either
7
+ // `netlify.toml` or `_headers`.
8
+ export const normalizeHeaders = function (headers, minimal) {
9
+ if (!Array.isArray(headers)) {
10
+ const error = new TypeError(`Headers must be an array not: ${headers}`);
11
+ return splitResults([error]);
12
+ }
13
+ const results = headers
14
+ .map((header, index) => parseHeader(header, index, minimal))
15
+ .filter(Boolean);
16
+ return splitResults(results);
17
+ };
18
+ const parseHeader = function (header, index, minimal) {
19
+ if (!isPlainObj(header)) {
20
+ return new TypeError(`Header must be an object not: ${header}`);
21
+ }
22
+ try {
23
+ return parseHeaderObject(header, minimal);
24
+ }
25
+ catch (error) {
26
+ return new Error(`Could not parse header number ${index + 1}:
27
+ ${JSON.stringify(header)}
28
+ ${error.message}`);
29
+ }
30
+ };
31
+ // Parse a single `headers` object
32
+ const parseHeaderObject = function ({ for: rawPath, values: rawValues }, minimal) {
33
+ const forPath = normalizePath(rawPath);
34
+ if (rawValues === undefined) {
35
+ return;
36
+ }
37
+ const values = normalizeValues(rawValues);
38
+ if (Object.keys(values).length === 0) {
39
+ return;
40
+ }
41
+ const header = {
42
+ for: forPath,
43
+ values,
44
+ };
45
+ if (!minimal) {
46
+ header.forRegExp = getForRegExp(forPath);
47
+ }
48
+ return header;
49
+ };
50
+ // Normalize and validate the `for` field
51
+ const normalizePath = function (rawPath) {
52
+ if (rawPath === undefined) {
53
+ throw new TypeError('Missing "for" field');
54
+ }
55
+ if (typeof rawPath !== 'string') {
56
+ throw new TypeError(`"for" must be a string not: ${rawPath}`);
57
+ }
58
+ return rawPath.trim();
59
+ };
60
+ // Normalize and validate the `values` field
61
+ const normalizeValues = function (rawValues) {
62
+ if (!isPlainObj(rawValues)) {
63
+ throw new TypeError(`"values" must be an object not: ${rawValues}`);
64
+ }
65
+ return mapObj(rawValues, normalizeValue);
66
+ };
67
+ // Normalize and validate each header `values`
68
+ const normalizeValue = function (rawKey, rawValue) {
69
+ const key = rawKey.trim();
70
+ if (key === '' || key === 'undefined') {
71
+ throw new Error('Empty header name');
72
+ }
73
+ const value = normalizeRawValue(key, rawValue);
74
+ return [key, value];
75
+ };
76
+ const normalizeRawValue = function (key, rawValue) {
77
+ if (typeof rawValue === 'string') {
78
+ return normalizeMultipleValues(normalizeStringValue(rawValue));
79
+ }
80
+ if (Array.isArray(rawValue)) {
81
+ return rawValue.map((singleValue, index) => normalizeArrayItemValue(`${key}[${index}]`, singleValue)).join(', ');
82
+ }
83
+ throw new TypeError(`Header "${key}" value must be a string not: ${rawValue}`);
84
+ };
85
+ // Multiple values can be specified by using whitespaces and commas.
86
+ // For example:
87
+ // [[headers]]
88
+ // for = "/*"
89
+ // [headers.values]
90
+ // cache-control = '''
91
+ // max-age=0,
92
+ // no-cache,
93
+ // no-store,
94
+ // must-revalidate'''
95
+ // Is normalized to:
96
+ // [[headers]]
97
+ // for = "/*"
98
+ // [headers.values]
99
+ // cache-control = "max-age=0, no-cache, no-store, must-revalidate"
100
+ const normalizeMultipleValues = function (value) {
101
+ return value.split(MULTIPLE_VALUES_REGEXP).join(', ');
102
+ };
103
+ const MULTIPLE_VALUES_REGEXP = /\s*,\s*/g;
104
+ const normalizeArrayItemValue = function (key, singleValue) {
105
+ if (typeof singleValue !== 'string') {
106
+ throw new TypeError(`Header "${key}" value must be a string not: ${singleValue}`);
107
+ }
108
+ return normalizeStringValue(singleValue);
109
+ };
110
+ const normalizeStringValue = function (stringValue) {
111
+ return stringValue.trim();
112
+ };
@@ -0,0 +1,12 @@
1
+ import type { Header } from './types.js';
2
+ export declare function splitResults<Type>(results: (Error | Type)[]): {
3
+ headers: Type[];
4
+ errors: Error[];
5
+ };
6
+ export declare const concatResults: (resultsArrays: {
7
+ headers: Header[];
8
+ errors: Error[];
9
+ }[]) => {
10
+ headers: Header[];
11
+ errors: Error[];
12
+ };
package/lib/results.js ADDED
@@ -0,0 +1,15 @@
1
+ // If one header fails to parse, we still try to return the other ones
2
+ export function splitResults(results) {
3
+ const headers = results.filter((result) => !isError(result));
4
+ const errors = results.filter(isError);
5
+ return { headers, errors };
6
+ }
7
+ const isError = function (result) {
8
+ return result instanceof Error;
9
+ };
10
+ // Concatenate an array of `{ headers, errors }`
11
+ export const concatResults = function (resultsArrays) {
12
+ const headers = resultsArrays.flatMap(({ headers }) => headers);
13
+ const errors = resultsArrays.flatMap(({ errors }) => errors);
14
+ return { headers, errors };
15
+ };
package/lib/types.d.ts ADDED
@@ -0,0 +1,7 @@
1
+ export type Header = {
2
+ for: string;
3
+ forRegExp?: RegExp;
4
+ values: {
5
+ [key: string]: string;
6
+ };
7
+ };
package/lib/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
package/package.json ADDED
@@ -0,0 +1,51 @@
1
+ {
2
+ "name": "@netlify/headers-parser",
3
+ "version": "7.2.0",
4
+ "description": "Parses Netlify headers into a JavaScript object representation",
5
+ "type": "module",
6
+ "exports": "./lib/index.js",
7
+ "main": "./lib/index.js",
8
+ "types": "./lib/index.d.js",
9
+ "files": [
10
+ "lib/**/*"
11
+ ],
12
+ "scripts": {
13
+ "prebuild": "rm -rf lib",
14
+ "build": "tsc",
15
+ "test": "vitest run",
16
+ "test:bench": "vitest bench",
17
+ "test:dev": "vitest",
18
+ "test:ci": "vitest run --reporter=default && vitest bench --run --passWithNoTests"
19
+ },
20
+ "keywords": [
21
+ "netlify"
22
+ ],
23
+ "engines": {
24
+ "node": "^14.16.0 || >=16.0.0"
25
+ },
26
+ "author": "Netlify",
27
+ "license": "MIT",
28
+ "dependencies": {
29
+ "@iarna/toml": "^2.2.5",
30
+ "escape-string-regexp": "^5.0.0",
31
+ "fast-safe-stringify": "^2.0.7",
32
+ "is-plain-obj": "^4.0.0",
33
+ "map-obj": "^5.0.0",
34
+ "path-exists": "^5.0.0"
35
+ },
36
+ "devDependencies": {
37
+ "@types/node": "^14.18.53",
38
+ "typescript": "^5.0.0",
39
+ "vitest": "^0.34.0"
40
+ },
41
+ "repository": {
42
+ "type": "git",
43
+ "url": "https://github.com/netlify/build.git",
44
+ "directory": "packages/headers-parser"
45
+ },
46
+ "bugs": {
47
+ "url": "https://github.com/netlify/build/issues"
48
+ },
49
+ "homepage": "https://github.com/netlify/build#readme",
50
+ "gitHead": "131a644bfde5205f730f3369b778d8914c7c0382"
51
+ }