@weborigami/origami 0.3.0 → 0.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -4
- package/src/dev/crawler/audit.js +85 -0
- package/src/{site → dev}/crawler/crawl.js +3 -66
- package/src/{site → dev}/crawler/crawlResources.js +44 -18
- package/src/dev/crawler/findPaths.js +90 -0
- package/src/dev/crawler/pathsInCss.js +51 -0
- package/src/dev/crawler/pathsInHtml.js +161 -0
- package/src/dev/crawler/pathsInImageMap.js +25 -0
- package/src/dev/crawler/pathsInJs.js +140 -0
- package/src/dev/crawler/pathsInRobotsTxt.js +20 -0
- package/src/dev/crawler/pathsInSitemap.js +20 -0
- package/src/dev/crawler/utilities.js +125 -0
- package/src/dev/dev.js +2 -0
- package/src/handlers/handlers.js +2 -0
- package/src/handlers/ts.handler.js +1 -0
- package/src/help/help.yaml +6 -6
- package/src/origami/csv.js +6 -1
- package/src/site/site.js +0 -2
- package/src/text/htmlDom.js +6 -0
- package/src/text/text.js +1 -0
- package/src/tree/map.js +7 -29
- package/src/site/audit.js +0 -19
- package/src/site/crawler/findPaths.js +0 -266
- package/src/site/crawler/utilities.js +0 -37
- package/src/tree/parseExtensions.js +0 -44
|
@@ -1,266 +0,0 @@
|
|
|
1
|
-
import { extension, toString } from "@weborigami/async-tree";
|
|
2
|
-
import { isCrawlableHref, normalizeHref } from "./utilities.js";
|
|
3
|
-
|
|
4
|
-
// Filter the paths to those that are local to the site.
|
|
5
|
-
function filterPaths(paths, baseUrl, localPath) {
|
|
6
|
-
// Convert paths to absolute URLs.
|
|
7
|
-
const localUrl = new URL(localPath, baseUrl);
|
|
8
|
-
const basePathname = baseUrl.pathname;
|
|
9
|
-
// @ts-ignore
|
|
10
|
-
const absoluteUrls = paths.map((path) => new URL(path, localUrl));
|
|
11
|
-
|
|
12
|
-
// Convert the absolute URLs to paths relative to the baseHref. If the URL
|
|
13
|
-
// points outside the tree rooted at the baseHref, the relative path will be
|
|
14
|
-
// null. We ignore the protocol in this test, because in practice sites often
|
|
15
|
-
// fumble the use of http and https, treating them interchangeably.
|
|
16
|
-
const relativePaths = absoluteUrls.map((url) => {
|
|
17
|
-
if (url.host === baseUrl.host && url.pathname.startsWith(basePathname)) {
|
|
18
|
-
const path = url.pathname.slice(basePathname.length);
|
|
19
|
-
// The process of creating the URLs will have escaped characters. We
|
|
20
|
-
// remove them. This has the side-effect of removing them if they existed
|
|
21
|
-
// in the original path; it would be better if we avoided that.
|
|
22
|
-
return decodeURIComponent(path);
|
|
23
|
-
} else {
|
|
24
|
-
return null;
|
|
25
|
-
}
|
|
26
|
-
});
|
|
27
|
-
|
|
28
|
-
// Filter out the null paths.
|
|
29
|
-
/** @type {string[]} */
|
|
30
|
-
// @ts-ignore
|
|
31
|
-
const filteredPaths = relativePaths.filter((path) => path);
|
|
32
|
-
return filteredPaths;
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
/**
|
|
36
|
-
* Given a value retrieved from a site using a given key (name), determine what
|
|
37
|
-
* kind of file it is and, based on that, find the paths it references.
|
|
38
|
-
*/
|
|
39
|
-
export default function findPaths(value, key, baseUrl, localPath) {
|
|
40
|
-
const text = toString(value);
|
|
41
|
-
|
|
42
|
-
// We guess the value is HTML is if its key has an .html extension or
|
|
43
|
-
// doesn't have an extension, or the value starts with `<`.
|
|
44
|
-
const ext = key ? extension.extname(key).toLowerCase() : "";
|
|
45
|
-
let foundPaths;
|
|
46
|
-
if (ext === ".html" || ext === ".htm" || ext === ".xhtml") {
|
|
47
|
-
foundPaths = findPathsInHtml(text);
|
|
48
|
-
} else if (ext === ".css") {
|
|
49
|
-
foundPaths = findPathsInCss(text);
|
|
50
|
-
} else if (ext === ".js") {
|
|
51
|
-
foundPaths = findPathsInJs(text);
|
|
52
|
-
} else if (ext === ".map") {
|
|
53
|
-
foundPaths = findPathsInImageMap(text);
|
|
54
|
-
} else if (key === "robots.txt") {
|
|
55
|
-
foundPaths = findPathsInRobotsTxt(text);
|
|
56
|
-
} else if (key === "sitemap.xml") {
|
|
57
|
-
foundPaths = findPathsInSitemapXml(text);
|
|
58
|
-
} else if (ext === "" && text?.trim().startsWith("<")) {
|
|
59
|
-
// Probably HTML
|
|
60
|
-
foundPaths = findPathsInHtml(text);
|
|
61
|
-
} else {
|
|
62
|
-
// Doesn't have an extension we want to process
|
|
63
|
-
return {
|
|
64
|
-
crawlablePaths: [],
|
|
65
|
-
resourcePaths: [],
|
|
66
|
-
};
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
const crawlablePaths = filterPaths(
|
|
70
|
-
foundPaths.crawlablePaths,
|
|
71
|
-
baseUrl,
|
|
72
|
-
localPath
|
|
73
|
-
);
|
|
74
|
-
|
|
75
|
-
const resourcePaths = filterPaths(
|
|
76
|
-
foundPaths.resourcePaths,
|
|
77
|
-
baseUrl,
|
|
78
|
-
localPath
|
|
79
|
-
);
|
|
80
|
-
|
|
81
|
-
return {
|
|
82
|
-
crawlablePaths,
|
|
83
|
-
resourcePaths,
|
|
84
|
-
};
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
function findPathsInCss(css) {
|
|
88
|
-
const resourcePaths = [];
|
|
89
|
-
let match;
|
|
90
|
-
|
|
91
|
-
// Find `url()` functions.
|
|
92
|
-
const urlRegex = /url\(["']?(?<href>[^"')]*?)["']?\)/g;
|
|
93
|
-
while ((match = urlRegex.exec(css))) {
|
|
94
|
-
const href = normalizeHref(match.groups?.href);
|
|
95
|
-
if (href) {
|
|
96
|
-
resourcePaths.push(href);
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
return {
|
|
101
|
-
crawlablePaths: [],
|
|
102
|
-
resourcePaths,
|
|
103
|
-
};
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
// These are ancient server-side image maps. They're so old that it's hard to
|
|
107
|
-
// find documentation on them, but they're used on the reference Space Jam
|
|
108
|
-
// website we use for testing the crawler. Example:
|
|
109
|
-
// https://www.spacejam.com/1996/bin/bball.map
|
|
110
|
-
function findPathsInImageMap(imageMap) {
|
|
111
|
-
const resourcePaths = [];
|
|
112
|
-
let match;
|
|
113
|
-
|
|
114
|
-
// Find hrefs as the second column in each line.
|
|
115
|
-
const hrefRegex = /^\w+ (?<href>\S+)(\s*$| [\d, ]+$)/gm;
|
|
116
|
-
while ((match = hrefRegex.exec(imageMap))) {
|
|
117
|
-
const href = normalizeHref(match.groups?.href);
|
|
118
|
-
if (href) {
|
|
119
|
-
resourcePaths.push(href);
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
return {
|
|
124
|
-
crawlablePaths: [],
|
|
125
|
-
resourcePaths,
|
|
126
|
-
};
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
function findPathsInJs(js) {
|
|
130
|
-
const crawlablePaths = [];
|
|
131
|
-
let match;
|
|
132
|
-
|
|
133
|
-
// Find `import` statements.
|
|
134
|
-
const importRegex = /import [\s\S]+?from\s+["'](?<import>[^"']*)["'];/g;
|
|
135
|
-
while ((match = importRegex.exec(js))) {
|
|
136
|
-
const href = normalizeHref(match.groups?.import);
|
|
137
|
-
if (href) {
|
|
138
|
-
crawlablePaths.push(href);
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
return {
|
|
143
|
-
crawlablePaths,
|
|
144
|
-
resourcePaths: [],
|
|
145
|
-
};
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
function findPathsInHtml(html) {
|
|
149
|
-
const crawlablePaths = [];
|
|
150
|
-
const resourcePaths = [];
|
|
151
|
-
let match;
|
|
152
|
-
|
|
153
|
-
// Find `href` attributes in anchor and link tags.
|
|
154
|
-
const linkRegex =
|
|
155
|
-
/<(?:a|A|link|LINK)[\s][^>]*?(?:href|HREF)=["'](?<link>[^>]*?)["'][^>]*>/g;
|
|
156
|
-
while ((match = linkRegex.exec(html))) {
|
|
157
|
-
// Links can point to be other crawlable paths and resource paths.
|
|
158
|
-
// We guess the type based on the extension.
|
|
159
|
-
const href = normalizeHref(match.groups?.link);
|
|
160
|
-
if (href) {
|
|
161
|
-
if (isCrawlableHref(href)) {
|
|
162
|
-
crawlablePaths.push(href);
|
|
163
|
-
} else {
|
|
164
|
-
resourcePaths.push(href);
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
// Find `src` attributes in img and script tags.
|
|
170
|
-
const srcRegex =
|
|
171
|
-
/<(?<tag>img|IMG|script|SCRIPT)[\s][^>]*?(?:src|SRC)=["'](?<src>[^>]*?)["'][^>]*>/g;
|
|
172
|
-
while ((match = srcRegex.exec(html))) {
|
|
173
|
-
const tag = match.groups?.tag;
|
|
174
|
-
const src = normalizeHref(match.groups?.src);
|
|
175
|
-
if (src) {
|
|
176
|
-
if (tag === "script" || tag === "SCRIPT") {
|
|
177
|
-
crawlablePaths.push(src);
|
|
178
|
-
} else {
|
|
179
|
-
resourcePaths.push(src);
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
// Find `url()` functions in CSS.
|
|
185
|
-
const urlRegex = /url\(["']?(?<href>[^"')]*?)["']?\)/g;
|
|
186
|
-
while ((match = urlRegex.exec(html))) {
|
|
187
|
-
const href = normalizeHref(match.groups?.href);
|
|
188
|
-
if (href) {
|
|
189
|
-
resourcePaths.push(href);
|
|
190
|
-
}
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
// Find `src` attribute on frame tags.
|
|
194
|
-
const frameRegex =
|
|
195
|
-
/<(?:frame|FRAME)[\s][^>]*?(?:src|SRC)=["'](?<href>[^>]*?)["'][^>]*>/g;
|
|
196
|
-
while ((match = frameRegex.exec(html))) {
|
|
197
|
-
const href = normalizeHref(match.groups?.href);
|
|
198
|
-
if (href) {
|
|
199
|
-
crawlablePaths.push(href);
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
// Find ancient `background` attribute on body tag.
|
|
204
|
-
const backgroundRegex =
|
|
205
|
-
/<(?:body|BODY)[\s][^>]*?(?:background|BACKGROUND)=["'](?<href>[^>]*?)["'][^>]*>/g;
|
|
206
|
-
while ((match = backgroundRegex.exec(html))) {
|
|
207
|
-
const href = normalizeHref(match.groups?.href);
|
|
208
|
-
if (href) {
|
|
209
|
-
resourcePaths.push(href);
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
|
|
213
|
-
// Find `href` attribute on area tags.
|
|
214
|
-
const areaRegex =
|
|
215
|
-
/<(?:area|AREA)[\s][^>]*?(?:href|HREF)=["'](?<href>[^>]*?)["'][^>]*>/g;
|
|
216
|
-
while ((match = areaRegex.exec(html))) {
|
|
217
|
-
const href = normalizeHref(match.groups?.href);
|
|
218
|
-
if (href) {
|
|
219
|
-
crawlablePaths.push(href);
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
// Also look for JS `import` statements that might be in <script type="module"> tags.
|
|
224
|
-
const jsResults = findPathsInJs(html);
|
|
225
|
-
crawlablePaths.push(...jsResults.crawlablePaths);
|
|
226
|
-
|
|
227
|
-
return { crawlablePaths, resourcePaths };
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
function findPathsInRobotsTxt(txt) {
|
|
231
|
-
const crawlablePaths = [];
|
|
232
|
-
let match;
|
|
233
|
-
|
|
234
|
-
// Find `Sitemap` directives.
|
|
235
|
-
const sitemapRegex = /Sitemap:\s*(?<href>[^\s]*)/g;
|
|
236
|
-
while ((match = sitemapRegex.exec(txt))) {
|
|
237
|
-
const href = normalizeHref(match.groups?.href);
|
|
238
|
-
if (href) {
|
|
239
|
-
crawlablePaths.push(href);
|
|
240
|
-
}
|
|
241
|
-
}
|
|
242
|
-
|
|
243
|
-
return {
|
|
244
|
-
crawlablePaths,
|
|
245
|
-
resourcePaths: [],
|
|
246
|
-
};
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
function findPathsInSitemapXml(xml) {
|
|
250
|
-
const crawlablePaths = [];
|
|
251
|
-
let match;
|
|
252
|
-
|
|
253
|
-
// Find `loc` elements.
|
|
254
|
-
const locRegex = /<loc>(?<href>[^<]*)<\/loc>/g;
|
|
255
|
-
while ((match = locRegex.exec(xml))) {
|
|
256
|
-
const href = normalizeHref(match.groups?.href);
|
|
257
|
-
if (href) {
|
|
258
|
-
crawlablePaths.push(href);
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
return {
|
|
263
|
-
crawlablePaths,
|
|
264
|
-
resourcePaths: [],
|
|
265
|
-
};
|
|
266
|
-
}
|
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
import { extension, trailingSlash } from "@weborigami/async-tree";
|
|
2
|
-
|
|
3
|
-
// A fake base URL used to handle cases where an href is relative and must be
|
|
4
|
-
// treated relative to some base URL.
|
|
5
|
-
const fakeBaseUrl = new URL("https://fake");
|
|
6
|
-
|
|
7
|
-
export function isCrawlableHref(href) {
|
|
8
|
-
// Use a fake base URL to cover the case where the href is relative.
|
|
9
|
-
const url = new URL(href, fakeBaseUrl);
|
|
10
|
-
const pathname = url.pathname;
|
|
11
|
-
const lastKey = pathname.split("/").pop() ?? "";
|
|
12
|
-
if (lastKey === "robots.txt" || lastKey === "sitemap.xml") {
|
|
13
|
-
return true;
|
|
14
|
-
}
|
|
15
|
-
const ext = extension.extname(lastKey);
|
|
16
|
-
// We assume an empty extension is HTML.
|
|
17
|
-
const crawlableExtensions = [".html", ".css", ".js", ".map", ".xhtml", ""];
|
|
18
|
-
return crawlableExtensions.includes(ext);
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
// Remove any search parameters or hash from the href. Preserve absolute or
|
|
22
|
-
// relative nature of URL. If the URL only has a search or hash, return null.
|
|
23
|
-
export function normalizeHref(href) {
|
|
24
|
-
// Remove everything after a `#` or `?` character.
|
|
25
|
-
const normalized = href.split(/[?#]/)[0];
|
|
26
|
-
return normalized === "" ? null : normalized;
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
// For indexing and storage purposes, treat a path that ends in a trailing slash
|
|
30
|
-
// as if it ends in index.html.
|
|
31
|
-
export function normalizeKeys(keys) {
|
|
32
|
-
const normalized = keys.slice();
|
|
33
|
-
if (normalized.length === 0 || trailingSlash.has(normalized.at(-1))) {
|
|
34
|
-
normalized.push("index.html");
|
|
35
|
-
}
|
|
36
|
-
return normalized;
|
|
37
|
-
}
|
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Given a string specifying an extension or a mapping of one extension to another,
|
|
3
|
-
* return the source and result extensions.
|
|
4
|
-
*
|
|
5
|
-
* Syntax:
|
|
6
|
-
* .foo source and result extension are the same
|
|
7
|
-
* .foo→.bar Unicode Rightwards Arrow
|
|
8
|
-
* .foo→ Unicode Rightwards Arrow, no result extension
|
|
9
|
-
* →.bar Unicode Rightwards Arrow, no source extension
|
|
10
|
-
* .foo->.bar hyphen and greater-than sign
|
|
11
|
-
*
|
|
12
|
-
* @param {string} specifier
|
|
13
|
-
*/
|
|
14
|
-
export default function parseExtensions(specifier) {
|
|
15
|
-
const lowercase = specifier?.toLowerCase() ?? "";
|
|
16
|
-
const extensionRegex =
|
|
17
|
-
/^((?<sourceExtension>\/|\.\S*)?\s*(→|->)\s*(?<resultExtension>\/|\.\S*)?)|(?<extension>\/|\.\S*)$/;
|
|
18
|
-
const match = lowercase.match(extensionRegex);
|
|
19
|
-
if (!match) {
|
|
20
|
-
throw new Error(`Invalid file extension specifier "${specifier}".`);
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
// @ts-ignore
|
|
24
|
-
let { extension, resultExtension, sourceExtension } = match.groups;
|
|
25
|
-
if (extension) {
|
|
26
|
-
// foo
|
|
27
|
-
return {
|
|
28
|
-
resultExtension: extension,
|
|
29
|
-
sourceExtension: extension,
|
|
30
|
-
};
|
|
31
|
-
} else {
|
|
32
|
-
// foo→bar
|
|
33
|
-
|
|
34
|
-
if (resultExtension === undefined && sourceExtension === undefined) {
|
|
35
|
-
throw new Error(
|
|
36
|
-
`A file extension mapping must indicate a source or result extension: "${specifier}".`
|
|
37
|
-
);
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
resultExtension ??= "";
|
|
41
|
-
sourceExtension ??= "";
|
|
42
|
-
return { resultExtension, sourceExtension };
|
|
43
|
-
}
|
|
44
|
-
}
|