@vercel/agent-readability 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +227 -0
- package/dist/cli/index.cjs +228 -0
- package/dist/index.cjs +175 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +82 -0
- package/dist/index.d.ts +82 -0
- package/dist/index.js +141 -0
- package/dist/index.js.map +1 -0
- package/dist/next/index.cjs +166 -0
- package/dist/next/index.cjs.map +1 -0
- package/dist/next/index.d.cts +35 -0
- package/dist/next/index.d.ts +35 -0
- package/dist/next/index.js +140 -0
- package/dist/next/index.js.map +1 -0
- package/package.json +91 -0
- package/skill/SKILL.md +171 -0
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/next/index.ts
|
|
21
|
+
var next_exports = {};
|
|
22
|
+
__export(next_exports, {
|
|
23
|
+
agentReadabilityMatcher: () => agentReadabilityMatcher,
|
|
24
|
+
withAgentReadability: () => withAgentReadability
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(next_exports);
|
|
27
|
+
var import_server = require("next/server");
|
|
28
|
+
|
|
29
|
+
// src/patterns.ts
|
|
30
|
+
var AI_AGENT_UA_PATTERNS = [
|
|
31
|
+
// Anthropic — https://support.claude.com/en/articles/8896518
|
|
32
|
+
"claudebot",
|
|
33
|
+
"claude-searchbot",
|
|
34
|
+
"claude-user",
|
|
35
|
+
"anthropic-ai",
|
|
36
|
+
"claude-web",
|
|
37
|
+
// OpenAI — https://platform.openai.com/docs/bots
|
|
38
|
+
"chatgpt",
|
|
39
|
+
"gptbot",
|
|
40
|
+
"oai-searchbot",
|
|
41
|
+
"openai",
|
|
42
|
+
// Google AI
|
|
43
|
+
"gemini",
|
|
44
|
+
"bard",
|
|
45
|
+
"google-cloudvertexbot",
|
|
46
|
+
"google-extended",
|
|
47
|
+
// Meta
|
|
48
|
+
"meta-externalagent",
|
|
49
|
+
"meta-externalfetcher",
|
|
50
|
+
"meta-webindexer",
|
|
51
|
+
// Search/Research AI
|
|
52
|
+
"perplexity",
|
|
53
|
+
"youbot",
|
|
54
|
+
"you.com",
|
|
55
|
+
"deepseekbot",
|
|
56
|
+
// Coding assistants
|
|
57
|
+
"cursor",
|
|
58
|
+
"github-copilot",
|
|
59
|
+
"codeium",
|
|
60
|
+
"tabnine",
|
|
61
|
+
"sourcegraph",
|
|
62
|
+
// Other AI agents / data scrapers
|
|
63
|
+
"cohere-ai",
|
|
64
|
+
"bytespider",
|
|
65
|
+
"amazonbot",
|
|
66
|
+
"ai2bot",
|
|
67
|
+
"diffbot",
|
|
68
|
+
"omgili",
|
|
69
|
+
"omgilibot"
|
|
70
|
+
];
|
|
71
|
+
var SIGNATURE_AGENT_DOMAINS = ["chatgpt.com"];
|
|
72
|
+
var TRADITIONAL_BOT_PATTERNS = [
|
|
73
|
+
"googlebot",
|
|
74
|
+
"bingbot",
|
|
75
|
+
"yandexbot",
|
|
76
|
+
"baiduspider",
|
|
77
|
+
"duckduckbot",
|
|
78
|
+
"slurp",
|
|
79
|
+
"msnbot",
|
|
80
|
+
"facebot",
|
|
81
|
+
"twitterbot",
|
|
82
|
+
"linkedinbot",
|
|
83
|
+
"whatsapp",
|
|
84
|
+
"telegrambot",
|
|
85
|
+
"pingdom",
|
|
86
|
+
"uptimerobot",
|
|
87
|
+
"newrelic",
|
|
88
|
+
"datadog",
|
|
89
|
+
"statuspage",
|
|
90
|
+
"site24x7",
|
|
91
|
+
"applebot"
|
|
92
|
+
];
|
|
93
|
+
var BOT_LIKE_REGEX = /bot|agent|fetch|crawl|spider|search/i;
|
|
94
|
+
|
|
95
|
+
// src/detection.ts
|
|
96
|
+
function isAIAgent(request) {
|
|
97
|
+
const userAgent = request.headers.get("user-agent");
|
|
98
|
+
const lowerUA = userAgent?.toLowerCase() ?? "";
|
|
99
|
+
if (lowerUA && AI_AGENT_UA_PATTERNS.some((pattern) => lowerUA.includes(pattern))) {
|
|
100
|
+
return { detected: true, method: "ua-match" };
|
|
101
|
+
}
|
|
102
|
+
const signatureAgent = request.headers.get("signature-agent");
|
|
103
|
+
if (signatureAgent) {
|
|
104
|
+
const lowerSig = signatureAgent.toLowerCase();
|
|
105
|
+
if (SIGNATURE_AGENT_DOMAINS.some((domain) => lowerSig.includes(domain))) {
|
|
106
|
+
return { detected: true, method: "signature-agent" };
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
const secFetchMode = request.headers.get("sec-fetch-mode");
|
|
110
|
+
if (!secFetchMode && lowerUA && BOT_LIKE_REGEX.test(lowerUA)) {
|
|
111
|
+
const isTraditionalBot = TRADITIONAL_BOT_PATTERNS.some((pattern) => lowerUA.includes(pattern));
|
|
112
|
+
if (!isTraditionalBot) {
|
|
113
|
+
return { detected: true, method: "heuristic" };
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return { detected: false, method: null };
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// src/negotiation.ts
|
|
120
|
+
var DEFAULT_MARKDOWN_TYPES = ["text/markdown", "text/x-markdown"];
|
|
121
|
+
function acceptsMarkdown(request, options) {
|
|
122
|
+
const accept = request.headers.get("accept");
|
|
123
|
+
if (!accept) return false;
|
|
124
|
+
const types = options?.mediaTypes ?? DEFAULT_MARKDOWN_TYPES;
|
|
125
|
+
const lowerAccept = accept.toLowerCase();
|
|
126
|
+
return types.some((type) => lowerAccept.includes(type));
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// src/next/index.ts
|
|
130
|
+
function withAgentReadability(options, handler) {
|
|
131
|
+
return async (req, event) => {
|
|
132
|
+
const { pathname } = req.nextUrl;
|
|
133
|
+
const prefix = options.docsPrefix ?? "/docs";
|
|
134
|
+
if (pathname.startsWith(prefix)) {
|
|
135
|
+
const result = isAIAgent(req);
|
|
136
|
+
const markdownAccepted = acceptsMarkdown(req);
|
|
137
|
+
if (result.detected || markdownAccepted) {
|
|
138
|
+
if (options.onDetection) {
|
|
139
|
+
const method = result.detected ? result.method : "accept-header";
|
|
140
|
+
const detection = options.onDetection({
|
|
141
|
+
path: pathname,
|
|
142
|
+
method,
|
|
143
|
+
userAgent: req.headers.get("user-agent")
|
|
144
|
+
});
|
|
145
|
+
if (detection instanceof Promise) {
|
|
146
|
+
event.waitUntil(detection);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
const rewriteUrl = req.nextUrl.clone();
|
|
150
|
+
rewriteUrl.pathname = options.rewrite(pathname);
|
|
151
|
+
return import_server.NextResponse.rewrite(rewriteUrl);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
if (handler) return handler(req, event);
|
|
155
|
+
return import_server.NextResponse.next();
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
var agentReadabilityMatcher = [
|
|
159
|
+
"/((?!_next|api|.*\\..*|favicon|manifest|robots|health|status).*)"
|
|
160
|
+
];
|
|
161
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
162
|
+
0 && (module.exports = {
|
|
163
|
+
agentReadabilityMatcher,
|
|
164
|
+
withAgentReadability
|
|
165
|
+
});
|
|
166
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/next/index.ts","../../src/patterns.ts","../../src/detection.ts","../../src/negotiation.ts"],"sourcesContent":["import type { NextFetchEvent, NextRequest } from \"next/server\";\nimport { NextResponse } from \"next/server\";\nimport { isAIAgent } from \"../detection\";\nimport { acceptsMarkdown } from \"../negotiation\";\nimport type { DetectionMethod } from \"../types\";\n\nexport interface AgentReadabilityOptions {\n\t/** URL prefix to intercept. Default: '/docs' */\n\tdocsPrefix?: string;\n\t/** Maps request path to markdown route */\n\trewrite: (pathname: string) => string;\n\t/** Fire-and-forget callback. Async returns are passed to event.waitUntil(). */\n\tonDetection?: (info: {\n\t\tpath: string;\n\t\tmethod: DetectionMethod | \"accept-header\";\n\t\tuserAgent: string | null;\n\t}) => void | Promise<void>;\n}\n\nexport type MiddlewareHandler = (\n\treq: NextRequest,\n\tevent: NextFetchEvent,\n) => NextResponse | Response | Promise<NextResponse | Response>;\n\n/**\n * Higher-order function that wraps Next.js middleware with AI agent detection.\n * Detected AI agents or requests with Accept: text/markdown are rewritten\n * to the markdown route returned by `options.rewrite()`.\n *\n * Composes with existing middleware:\n * ```ts\n * export default withAgentReadability(\n * { rewrite: (p) => `/md${p}` },\n * (req, event) => existingMiddleware(req, event),\n * )\n * ```\n */\nexport function withAgentReadability(\n\toptions: AgentReadabilityOptions,\n\thandler?: MiddlewareHandler,\n): MiddlewareHandler {\n\treturn async (req: NextRequest, event: NextFetchEvent) => {\n\t\tconst { pathname } = req.nextUrl;\n\t\tconst prefix = options.docsPrefix ?? \"/docs\";\n\n\t\tif (pathname.startsWith(prefix)) {\n\t\t\tconst result = isAIAgent(req);\n\t\t\tconst markdownAccepted = acceptsMarkdown(req);\n\n\t\t\tif (result.detected || markdownAccepted) {\n\t\t\t\tif (options.onDetection) {\n\t\t\t\t\tconst method = result.detected ? result.method : \"accept-header\";\n\t\t\t\t\tconst detection = options.onDetection({\n\t\t\t\t\t\tpath: pathname,\n\t\t\t\t\t\tmethod,\n\t\t\t\t\t\tuserAgent: req.headers.get(\"user-agent\"),\n\t\t\t\t\t});\n\t\t\t\t\tif (detection instanceof Promise) {\n\t\t\t\t\t\tevent.waitUntil(detection);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tconst rewriteUrl = req.nextUrl.clone();\n\t\t\t\trewriteUrl.pathname = options.rewrite(pathname);\n\t\t\t\treturn NextResponse.rewrite(rewriteUrl);\n\t\t\t}\n\t\t}\n\n\t\tif (handler) return handler(req, event);\n\t\treturn NextResponse.next();\n\t};\n}\n\n/** Recommended matcher that excludes Next.js internals and static files. */\nexport const agentReadabilityMatcher = [\n\t\"/((?!_next|api|.*\\\\..*|favicon|manifest|robots|health|status).*)\",\n] as const;\n","/**\n * Layer 1: Known AI agent UA substrings (lowercase).\n * Curated from https://bots.fyi/?tags=ai_assistant + official vendor docs.\n * Last reviewed: 2026-03-20\n */\nexport const AI_AGENT_UA_PATTERNS: readonly string[] = [\n\t// Anthropic — https://support.claude.com/en/articles/8896518\n\t\"claudebot\",\n\t\"claude-searchbot\",\n\t\"claude-user\",\n\t\"anthropic-ai\",\n\t\"claude-web\",\n\n\t// OpenAI — https://platform.openai.com/docs/bots\n\t\"chatgpt\",\n\t\"gptbot\",\n\t\"oai-searchbot\",\n\t\"openai\",\n\n\t// Google AI\n\t\"gemini\",\n\t\"bard\",\n\t\"google-cloudvertexbot\",\n\t\"google-extended\",\n\n\t// Meta\n\t\"meta-externalagent\",\n\t\"meta-externalfetcher\",\n\t\"meta-webindexer\",\n\n\t// Search/Research AI\n\t\"perplexity\",\n\t\"youbot\",\n\t\"you.com\",\n\t\"deepseekbot\",\n\n\t// Coding assistants\n\t\"cursor\",\n\t\"github-copilot\",\n\t\"codeium\",\n\t\"tabnine\",\n\t\"sourcegraph\",\n\n\t// Other AI agents / data scrapers\n\t\"cohere-ai\",\n\t\"bytespider\",\n\t\"amazonbot\",\n\t\"ai2bot\",\n\t\"diffbot\",\n\t\"omgili\",\n\t\"omgilibot\",\n];\n\n/**\n * Layer 2: Known AI service URLs in Signature-Agent header (RFC 9421).\n */\nexport const SIGNATURE_AGENT_DOMAINS: readonly string[] = [\"chatgpt.com\"];\n\n/**\n * Layer 3: Traditional bot exclusion list. Bots that should NOT trigger the\n * heuristic layer (search engine crawlers, social previews, monitoring tools).\n */\nexport const TRADITIONAL_BOT_PATTERNS: readonly string[] = [\n\t\"googlebot\",\n\t\"bingbot\",\n\t\"yandexbot\",\n\t\"baiduspider\",\n\t\"duckduckbot\",\n\t\"slurp\",\n\t\"msnbot\",\n\t\"facebot\",\n\t\"twitterbot\",\n\t\"linkedinbot\",\n\t\"whatsapp\",\n\t\"telegrambot\",\n\t\"pingdom\",\n\t\"uptimerobot\",\n\t\"newrelic\",\n\t\"datadog\",\n\t\"statuspage\",\n\t\"site24x7\",\n\t\"applebot\",\n];\n\n/**\n * Broad regex for bot-like UA strings (used only in Layer 3 heuristic).\n * No word boundaries — keywords commonly appear in compound names.\n */\nexport const BOT_LIKE_REGEX: RegExp = /bot|agent|fetch|crawl|spider|search/i;\n","import {\n\tAI_AGENT_UA_PATTERNS,\n\tBOT_LIKE_REGEX,\n\tSIGNATURE_AGENT_DOMAINS,\n\tTRADITIONAL_BOT_PATTERNS,\n} from \"./patterns\";\nimport type { DetectionResult, MinimalRequest } from \"./types\";\n\n/**\n * Detects AI agents from HTTP request headers.\n *\n * Three detection layers (checked in order):\n * 1. Known UA patterns (definitive)\n * 2. Signature-Agent header (definitive, RFC 9421)\n * 3. Missing sec-fetch-mode heuristic (catches unknown bots)\n *\n * Optimizes for recall over precision: serving markdown to a non-AI bot\n * is low-harm; missing an AI agent means a worse experience.\n */\nexport function isAIAgent(request: MinimalRequest): DetectionResult {\n\tconst userAgent = request.headers.get(\"user-agent\");\n\tconst lowerUA = userAgent?.toLowerCase() ?? \"\";\n\n\t// Layer 1: Known UA pattern match\n\tif (lowerUA && AI_AGENT_UA_PATTERNS.some((pattern) => lowerUA.includes(pattern))) {\n\t\treturn { detected: true, method: \"ua-match\" };\n\t}\n\n\t// Layer 2: Signature-Agent header (RFC 9421, used by ChatGPT agent)\n\tconst signatureAgent = request.headers.get(\"signature-agent\");\n\tif (signatureAgent) {\n\t\tconst lowerSig = signatureAgent.toLowerCase();\n\t\tif (SIGNATURE_AGENT_DOMAINS.some((domain) => lowerSig.includes(domain))) {\n\t\t\treturn { detected: true, method: \"signature-agent\" };\n\t\t}\n\t}\n\n\t// Layer 3: Missing browser fingerprint heuristic\n\t// Real browsers (Chrome 76+, Firefox 90+, Safari 16.4+) send sec-fetch-mode\n\t// on navigation requests. Its absence signals a programmatic client.\n\tconst secFetchMode = request.headers.get(\"sec-fetch-mode\");\n\tif (!secFetchMode && lowerUA && BOT_LIKE_REGEX.test(lowerUA)) {\n\t\tconst isTraditionalBot = TRADITIONAL_BOT_PATTERNS.some((pattern) => lowerUA.includes(pattern));\n\t\tif (!isTraditionalBot) {\n\t\t\treturn { detected: true, method: \"heuristic\" };\n\t\t}\n\t}\n\n\treturn { detected: false, method: null };\n}\n","import { isAIAgent } from \"./detection\";\nimport type { DetectionResult, MinimalRequest } from \"./types\";\n\nconst DEFAULT_MARKDOWN_TYPES = [\"text/markdown\", \"text/x-markdown\"];\n\nexport interface AcceptMarkdownOptions {\n\tmediaTypes?: string[];\n}\n\n/**\n * Check if the request prefers markdown via the Accept header.\n */\nexport function acceptsMarkdown(request: MinimalRequest, options?: AcceptMarkdownOptions): boolean {\n\tconst accept = request.headers.get(\"accept\");\n\tif (!accept) return false;\n\n\tconst types = options?.mediaTypes ?? DEFAULT_MARKDOWN_TYPES;\n\tconst lowerAccept = accept.toLowerCase();\n\treturn types.some((type) => lowerAccept.includes(type));\n}\n\nexport interface ShouldServeMarkdownResult {\n\tserve: boolean;\n\treason: \"agent\" | \"accept-header\" | null;\n\tdetection: DetectionResult;\n}\n\n/**\n * Combines agent detection and content negotiation into one call.\n * Returns whether to serve markdown and why.\n */\nexport function shouldServeMarkdown(\n\trequest: MinimalRequest,\n\toptions?: AcceptMarkdownOptions,\n): ShouldServeMarkdownResult {\n\tconst detection = isAIAgent(request);\n\tif (detection.detected) {\n\t\treturn { serve: true, reason: \"agent\", detection };\n\t}\n\n\tif (acceptsMarkdown(request, options)) {\n\t\treturn { serve: true, reason: \"accept-header\", detection };\n\t}\n\n\treturn { serve: false, reason: null, detection };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA6B;;;ACItB,IAAM,uBAA0C;AAAA;AAAA,EAEtD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD;AAKO,IAAM,0BAA6C,CAAC,aAAa;AAMjE,IAAM,2BAA8C;AAAA,EAC1D;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD;AAMO,IAAM,iBAAyB;;;ACrE/B,SAAS,UAAU,SAA0C;AACnE,QAAM,YAAY,QAAQ,QAAQ,IAAI,YAAY;AAClD,QAAM,UAAU,WAAW,YAAY,KAAK;AAG5C,MAAI,WAAW,qBAAqB,KAAK,CAAC,YAAY,QAAQ,SAAS,OAAO,CAAC,GAAG;AACjF,WAAO,EAAE,UAAU,MAAM,QAAQ,WAAW;AAAA,EAC7C;AAGA,QAAM,iBAAiB,QAAQ,QAAQ,IAAI,iBAAiB;AAC5D,MAAI,gBAAgB;AACnB,UAAM,WAAW,eAAe,YAAY;AAC5C,QAAI,wBAAwB,KAAK,CAAC,WAAW,SAAS,SAAS,MAAM,CAAC,GAAG;AACxE,aAAO,EAAE,UAAU,MAAM,QAAQ,kBAAkB;AAAA,IACpD;AAAA,EACD;AAKA,QAAM,eAAe,QAAQ,QAAQ,IAAI,gBAAgB;AACzD,MAAI,CAAC,gBAAgB,WAAW,eAAe,KAAK,OAAO,GAAG;AAC7D,UAAM,mBAAmB,yBAAyB,KAAK,CAAC,YAAY,QAAQ,SAAS,OAAO,CAAC;AAC7F,QAAI,CAAC,kBAAkB;AACtB,aAAO,EAAE,UAAU,MAAM,QAAQ,YAAY;AAAA,IAC9C;AAAA,EACD;AAEA,SAAO,EAAE,UAAU,OAAO,QAAQ,KAAK;AACxC;;;AC9CA,IAAM,yBAAyB,CAAC,iBAAiB,iBAAiB;AAS3D,SAAS,gBAAgB,SAAyB,SAA0C;AAClG,QAAM,SAAS,QAAQ,QAAQ,IAAI,QAAQ;AAC3C,MAAI,CAAC,OAAQ,QAAO;AAEpB,QAAM,QAAQ,SAAS,cAAc;AACrC,QAAM,cAAc,OAAO,YAAY;AACvC,SAAO,MAAM,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AACvD;;;AHkBO,SAAS,qBACf,SACA,SACoB;AACpB,SAAO,OAAO,KAAkB,UAA0B;AACzD,UAAM,EAAE,SAAS,IAAI,IAAI;AACzB,UAAM,SAAS,QAAQ,cAAc;AAErC,QAAI,SAAS,WAAW,MAAM,GAAG;AAChC,YAAM,SAAS,UAAU,GAAG;AAC5B,YAAM,mBAAmB,gBAAgB,GAAG;AAE5C,UAAI,OAAO,YAAY,kBAAkB;AACxC,YAAI,QAAQ,aAAa;AACxB,gBAAM,SAAS,OAAO,WAAW,OAAO,SAAS;AACjD,gBAAM,YAAY,QAAQ,YAAY;AAAA,YACrC,MAAM;AAAA,YACN;AAAA,YACA,WAAW,IAAI,QAAQ,IAAI,YAAY;AAAA,UACxC,CAAC;AACD,cAAI,qBAAqB,SAAS;AACjC,kBAAM,UAAU,SAAS;AAAA,UAC1B;AAAA,QACD;AAEA,cAAM,aAAa,IAAI,QAAQ,MAAM;AACrC,mBAAW,WAAW,QAAQ,QAAQ,QAAQ;AAC9C,eAAO,2BAAa,QAAQ,UAAU;AAAA,MACvC;AAAA,IACD;AAEA,QAAI,QAAS,QAAO,QAAQ,KAAK,KAAK;AACtC,WAAO,2BAAa,KAAK;AAAA,EAC1B;AACD;AAGO,IAAM,0BAA0B;AAAA,EACtC;AACD;","names":[]}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { NextRequest, NextFetchEvent, NextResponse } from 'next/server';
|
|
2
|
+
|
|
3
|
+
type DetectionMethod = "ua-match" | "signature-agent" | "heuristic";
|
|
4
|
+
|
|
5
|
+
interface AgentReadabilityOptions {
|
|
6
|
+
/** URL prefix to intercept. Default: '/docs' */
|
|
7
|
+
docsPrefix?: string;
|
|
8
|
+
/** Maps request path to markdown route */
|
|
9
|
+
rewrite: (pathname: string) => string;
|
|
10
|
+
/** Fire-and-forget callback. Async returns are passed to event.waitUntil(). */
|
|
11
|
+
onDetection?: (info: {
|
|
12
|
+
path: string;
|
|
13
|
+
method: DetectionMethod | "accept-header";
|
|
14
|
+
userAgent: string | null;
|
|
15
|
+
}) => void | Promise<void>;
|
|
16
|
+
}
|
|
17
|
+
type MiddlewareHandler = (req: NextRequest, event: NextFetchEvent) => NextResponse | Response | Promise<NextResponse | Response>;
|
|
18
|
+
/**
|
|
19
|
+
* Higher-order function that wraps Next.js middleware with AI agent detection.
|
|
20
|
+
* Detected AI agents or requests with Accept: text/markdown are rewritten
|
|
21
|
+
* to the markdown route returned by `options.rewrite()`.
|
|
22
|
+
*
|
|
23
|
+
* Composes with existing middleware:
|
|
24
|
+
* ```ts
|
|
25
|
+
* export default withAgentReadability(
|
|
26
|
+
* { rewrite: (p) => `/md${p}` },
|
|
27
|
+
* (req, event) => existingMiddleware(req, event),
|
|
28
|
+
* )
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
declare function withAgentReadability(options: AgentReadabilityOptions, handler?: MiddlewareHandler): MiddlewareHandler;
|
|
32
|
+
/** Recommended matcher that excludes Next.js internals and static files. */
|
|
33
|
+
declare const agentReadabilityMatcher: readonly ["/((?!_next|api|.*\\..*|favicon|manifest|robots|health|status).*)"];
|
|
34
|
+
|
|
35
|
+
export { type AgentReadabilityOptions, type MiddlewareHandler, agentReadabilityMatcher, withAgentReadability };
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { NextRequest, NextFetchEvent, NextResponse } from 'next/server';
|
|
2
|
+
|
|
3
|
+
type DetectionMethod = "ua-match" | "signature-agent" | "heuristic";
|
|
4
|
+
|
|
5
|
+
interface AgentReadabilityOptions {
|
|
6
|
+
/** URL prefix to intercept. Default: '/docs' */
|
|
7
|
+
docsPrefix?: string;
|
|
8
|
+
/** Maps request path to markdown route */
|
|
9
|
+
rewrite: (pathname: string) => string;
|
|
10
|
+
/** Fire-and-forget callback. Async returns are passed to event.waitUntil(). */
|
|
11
|
+
onDetection?: (info: {
|
|
12
|
+
path: string;
|
|
13
|
+
method: DetectionMethod | "accept-header";
|
|
14
|
+
userAgent: string | null;
|
|
15
|
+
}) => void | Promise<void>;
|
|
16
|
+
}
|
|
17
|
+
type MiddlewareHandler = (req: NextRequest, event: NextFetchEvent) => NextResponse | Response | Promise<NextResponse | Response>;
|
|
18
|
+
/**
|
|
19
|
+
* Higher-order function that wraps Next.js middleware with AI agent detection.
|
|
20
|
+
* Detected AI agents or requests with Accept: text/markdown are rewritten
|
|
21
|
+
* to the markdown route returned by `options.rewrite()`.
|
|
22
|
+
*
|
|
23
|
+
* Composes with existing middleware:
|
|
24
|
+
* ```ts
|
|
25
|
+
* export default withAgentReadability(
|
|
26
|
+
* { rewrite: (p) => `/md${p}` },
|
|
27
|
+
* (req, event) => existingMiddleware(req, event),
|
|
28
|
+
* )
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
declare function withAgentReadability(options: AgentReadabilityOptions, handler?: MiddlewareHandler): MiddlewareHandler;
|
|
32
|
+
/** Recommended matcher that excludes Next.js internals and static files. */
|
|
33
|
+
declare const agentReadabilityMatcher: readonly ["/((?!_next|api|.*\\..*|favicon|manifest|robots|health|status).*)"];
|
|
34
|
+
|
|
35
|
+
export { type AgentReadabilityOptions, type MiddlewareHandler, agentReadabilityMatcher, withAgentReadability };
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
// src/next/index.ts
|
|
2
|
+
import { NextResponse } from "next/server";
|
|
3
|
+
|
|
4
|
+
// src/patterns.ts
|
|
5
|
+
var AI_AGENT_UA_PATTERNS = [
|
|
6
|
+
// Anthropic — https://support.claude.com/en/articles/8896518
|
|
7
|
+
"claudebot",
|
|
8
|
+
"claude-searchbot",
|
|
9
|
+
"claude-user",
|
|
10
|
+
"anthropic-ai",
|
|
11
|
+
"claude-web",
|
|
12
|
+
// OpenAI — https://platform.openai.com/docs/bots
|
|
13
|
+
"chatgpt",
|
|
14
|
+
"gptbot",
|
|
15
|
+
"oai-searchbot",
|
|
16
|
+
"openai",
|
|
17
|
+
// Google AI
|
|
18
|
+
"gemini",
|
|
19
|
+
"bard",
|
|
20
|
+
"google-cloudvertexbot",
|
|
21
|
+
"google-extended",
|
|
22
|
+
// Meta
|
|
23
|
+
"meta-externalagent",
|
|
24
|
+
"meta-externalfetcher",
|
|
25
|
+
"meta-webindexer",
|
|
26
|
+
// Search/Research AI
|
|
27
|
+
"perplexity",
|
|
28
|
+
"youbot",
|
|
29
|
+
"you.com",
|
|
30
|
+
"deepseekbot",
|
|
31
|
+
// Coding assistants
|
|
32
|
+
"cursor",
|
|
33
|
+
"github-copilot",
|
|
34
|
+
"codeium",
|
|
35
|
+
"tabnine",
|
|
36
|
+
"sourcegraph",
|
|
37
|
+
// Other AI agents / data scrapers
|
|
38
|
+
"cohere-ai",
|
|
39
|
+
"bytespider",
|
|
40
|
+
"amazonbot",
|
|
41
|
+
"ai2bot",
|
|
42
|
+
"diffbot",
|
|
43
|
+
"omgili",
|
|
44
|
+
"omgilibot"
|
|
45
|
+
];
|
|
46
|
+
var SIGNATURE_AGENT_DOMAINS = ["chatgpt.com"];
|
|
47
|
+
var TRADITIONAL_BOT_PATTERNS = [
|
|
48
|
+
"googlebot",
|
|
49
|
+
"bingbot",
|
|
50
|
+
"yandexbot",
|
|
51
|
+
"baiduspider",
|
|
52
|
+
"duckduckbot",
|
|
53
|
+
"slurp",
|
|
54
|
+
"msnbot",
|
|
55
|
+
"facebot",
|
|
56
|
+
"twitterbot",
|
|
57
|
+
"linkedinbot",
|
|
58
|
+
"whatsapp",
|
|
59
|
+
"telegrambot",
|
|
60
|
+
"pingdom",
|
|
61
|
+
"uptimerobot",
|
|
62
|
+
"newrelic",
|
|
63
|
+
"datadog",
|
|
64
|
+
"statuspage",
|
|
65
|
+
"site24x7",
|
|
66
|
+
"applebot"
|
|
67
|
+
];
|
|
68
|
+
var BOT_LIKE_REGEX = /bot|agent|fetch|crawl|spider|search/i;
|
|
69
|
+
|
|
70
|
+
// src/detection.ts
|
|
71
|
+
function isAIAgent(request) {
|
|
72
|
+
const userAgent = request.headers.get("user-agent");
|
|
73
|
+
const lowerUA = userAgent?.toLowerCase() ?? "";
|
|
74
|
+
if (lowerUA && AI_AGENT_UA_PATTERNS.some((pattern) => lowerUA.includes(pattern))) {
|
|
75
|
+
return { detected: true, method: "ua-match" };
|
|
76
|
+
}
|
|
77
|
+
const signatureAgent = request.headers.get("signature-agent");
|
|
78
|
+
if (signatureAgent) {
|
|
79
|
+
const lowerSig = signatureAgent.toLowerCase();
|
|
80
|
+
if (SIGNATURE_AGENT_DOMAINS.some((domain) => lowerSig.includes(domain))) {
|
|
81
|
+
return { detected: true, method: "signature-agent" };
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
const secFetchMode = request.headers.get("sec-fetch-mode");
|
|
85
|
+
if (!secFetchMode && lowerUA && BOT_LIKE_REGEX.test(lowerUA)) {
|
|
86
|
+
const isTraditionalBot = TRADITIONAL_BOT_PATTERNS.some((pattern) => lowerUA.includes(pattern));
|
|
87
|
+
if (!isTraditionalBot) {
|
|
88
|
+
return { detected: true, method: "heuristic" };
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
return { detected: false, method: null };
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// src/negotiation.ts
|
|
95
|
+
var DEFAULT_MARKDOWN_TYPES = ["text/markdown", "text/x-markdown"];
|
|
96
|
+
function acceptsMarkdown(request, options) {
|
|
97
|
+
const accept = request.headers.get("accept");
|
|
98
|
+
if (!accept) return false;
|
|
99
|
+
const types = options?.mediaTypes ?? DEFAULT_MARKDOWN_TYPES;
|
|
100
|
+
const lowerAccept = accept.toLowerCase();
|
|
101
|
+
return types.some((type) => lowerAccept.includes(type));
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// src/next/index.ts
|
|
105
|
+
function withAgentReadability(options, handler) {
|
|
106
|
+
return async (req, event) => {
|
|
107
|
+
const { pathname } = req.nextUrl;
|
|
108
|
+
const prefix = options.docsPrefix ?? "/docs";
|
|
109
|
+
if (pathname.startsWith(prefix)) {
|
|
110
|
+
const result = isAIAgent(req);
|
|
111
|
+
const markdownAccepted = acceptsMarkdown(req);
|
|
112
|
+
if (result.detected || markdownAccepted) {
|
|
113
|
+
if (options.onDetection) {
|
|
114
|
+
const method = result.detected ? result.method : "accept-header";
|
|
115
|
+
const detection = options.onDetection({
|
|
116
|
+
path: pathname,
|
|
117
|
+
method,
|
|
118
|
+
userAgent: req.headers.get("user-agent")
|
|
119
|
+
});
|
|
120
|
+
if (detection instanceof Promise) {
|
|
121
|
+
event.waitUntil(detection);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
const rewriteUrl = req.nextUrl.clone();
|
|
125
|
+
rewriteUrl.pathname = options.rewrite(pathname);
|
|
126
|
+
return NextResponse.rewrite(rewriteUrl);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
if (handler) return handler(req, event);
|
|
130
|
+
return NextResponse.next();
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
var agentReadabilityMatcher = [
|
|
134
|
+
"/((?!_next|api|.*\\..*|favicon|manifest|robots|health|status).*)"
|
|
135
|
+
];
|
|
136
|
+
export {
|
|
137
|
+
agentReadabilityMatcher,
|
|
138
|
+
withAgentReadability
|
|
139
|
+
};
|
|
140
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/next/index.ts","../../src/patterns.ts","../../src/detection.ts","../../src/negotiation.ts"],"sourcesContent":["import type { NextFetchEvent, NextRequest } from \"next/server\";\nimport { NextResponse } from \"next/server\";\nimport { isAIAgent } from \"../detection\";\nimport { acceptsMarkdown } from \"../negotiation\";\nimport type { DetectionMethod } from \"../types\";\n\nexport interface AgentReadabilityOptions {\n\t/** URL prefix to intercept. Default: '/docs' */\n\tdocsPrefix?: string;\n\t/** Maps request path to markdown route */\n\trewrite: (pathname: string) => string;\n\t/** Fire-and-forget callback. Async returns are passed to event.waitUntil(). */\n\tonDetection?: (info: {\n\t\tpath: string;\n\t\tmethod: DetectionMethod | \"accept-header\";\n\t\tuserAgent: string | null;\n\t}) => void | Promise<void>;\n}\n\nexport type MiddlewareHandler = (\n\treq: NextRequest,\n\tevent: NextFetchEvent,\n) => NextResponse | Response | Promise<NextResponse | Response>;\n\n/**\n * Higher-order function that wraps Next.js middleware with AI agent detection.\n * Detected AI agents or requests with Accept: text/markdown are rewritten\n * to the markdown route returned by `options.rewrite()`.\n *\n * Composes with existing middleware:\n * ```ts\n * export default withAgentReadability(\n * { rewrite: (p) => `/md${p}` },\n * (req, event) => existingMiddleware(req, event),\n * )\n * ```\n */\nexport function withAgentReadability(\n\toptions: AgentReadabilityOptions,\n\thandler?: MiddlewareHandler,\n): MiddlewareHandler {\n\treturn async (req: NextRequest, event: NextFetchEvent) => {\n\t\tconst { pathname } = req.nextUrl;\n\t\tconst prefix = options.docsPrefix ?? \"/docs\";\n\n\t\tif (pathname.startsWith(prefix)) {\n\t\t\tconst result = isAIAgent(req);\n\t\t\tconst markdownAccepted = acceptsMarkdown(req);\n\n\t\t\tif (result.detected || markdownAccepted) {\n\t\t\t\tif (options.onDetection) {\n\t\t\t\t\tconst method = result.detected ? result.method : \"accept-header\";\n\t\t\t\t\tconst detection = options.onDetection({\n\t\t\t\t\t\tpath: pathname,\n\t\t\t\t\t\tmethod,\n\t\t\t\t\t\tuserAgent: req.headers.get(\"user-agent\"),\n\t\t\t\t\t});\n\t\t\t\t\tif (detection instanceof Promise) {\n\t\t\t\t\t\tevent.waitUntil(detection);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tconst rewriteUrl = req.nextUrl.clone();\n\t\t\t\trewriteUrl.pathname = options.rewrite(pathname);\n\t\t\t\treturn NextResponse.rewrite(rewriteUrl);\n\t\t\t}\n\t\t}\n\n\t\tif (handler) return handler(req, event);\n\t\treturn NextResponse.next();\n\t};\n}\n\n/** Recommended matcher that excludes Next.js internals and static files. */\nexport const agentReadabilityMatcher = [\n\t\"/((?!_next|api|.*\\\\..*|favicon|manifest|robots|health|status).*)\",\n] as const;\n","/**\n * Layer 1: Known AI agent UA substrings (lowercase).\n * Curated from https://bots.fyi/?tags=ai_assistant + official vendor docs.\n * Last reviewed: 2026-03-20\n */\nexport const AI_AGENT_UA_PATTERNS: readonly string[] = [\n\t// Anthropic — https://support.claude.com/en/articles/8896518\n\t\"claudebot\",\n\t\"claude-searchbot\",\n\t\"claude-user\",\n\t\"anthropic-ai\",\n\t\"claude-web\",\n\n\t// OpenAI — https://platform.openai.com/docs/bots\n\t\"chatgpt\",\n\t\"gptbot\",\n\t\"oai-searchbot\",\n\t\"openai\",\n\n\t// Google AI\n\t\"gemini\",\n\t\"bard\",\n\t\"google-cloudvertexbot\",\n\t\"google-extended\",\n\n\t// Meta\n\t\"meta-externalagent\",\n\t\"meta-externalfetcher\",\n\t\"meta-webindexer\",\n\n\t// Search/Research AI\n\t\"perplexity\",\n\t\"youbot\",\n\t\"you.com\",\n\t\"deepseekbot\",\n\n\t// Coding assistants\n\t\"cursor\",\n\t\"github-copilot\",\n\t\"codeium\",\n\t\"tabnine\",\n\t\"sourcegraph\",\n\n\t// Other AI agents / data scrapers\n\t\"cohere-ai\",\n\t\"bytespider\",\n\t\"amazonbot\",\n\t\"ai2bot\",\n\t\"diffbot\",\n\t\"omgili\",\n\t\"omgilibot\",\n];\n\n/**\n * Layer 2: Known AI service URLs in Signature-Agent header (RFC 9421).\n */\nexport const SIGNATURE_AGENT_DOMAINS: readonly string[] = [\"chatgpt.com\"];\n\n/**\n * Layer 3: Traditional bot exclusion list. Bots that should NOT trigger the\n * heuristic layer (search engine crawlers, social previews, monitoring tools).\n */\nexport const TRADITIONAL_BOT_PATTERNS: readonly string[] = [\n\t\"googlebot\",\n\t\"bingbot\",\n\t\"yandexbot\",\n\t\"baiduspider\",\n\t\"duckduckbot\",\n\t\"slurp\",\n\t\"msnbot\",\n\t\"facebot\",\n\t\"twitterbot\",\n\t\"linkedinbot\",\n\t\"whatsapp\",\n\t\"telegrambot\",\n\t\"pingdom\",\n\t\"uptimerobot\",\n\t\"newrelic\",\n\t\"datadog\",\n\t\"statuspage\",\n\t\"site24x7\",\n\t\"applebot\",\n];\n\n/**\n * Broad regex for bot-like UA strings (used only in Layer 3 heuristic).\n * No word boundaries — keywords commonly appear in compound names.\n */\nexport const BOT_LIKE_REGEX: RegExp = /bot|agent|fetch|crawl|spider|search/i;\n","import {\n\tAI_AGENT_UA_PATTERNS,\n\tBOT_LIKE_REGEX,\n\tSIGNATURE_AGENT_DOMAINS,\n\tTRADITIONAL_BOT_PATTERNS,\n} from \"./patterns\";\nimport type { DetectionResult, MinimalRequest } from \"./types\";\n\n/**\n * Detects AI agents from HTTP request headers.\n *\n * Three detection layers (checked in order):\n * 1. Known UA patterns (definitive)\n * 2. Signature-Agent header (definitive, RFC 9421)\n * 3. Missing sec-fetch-mode heuristic (catches unknown bots)\n *\n * Optimizes for recall over precision: serving markdown to a non-AI bot\n * is low-harm; missing an AI agent means a worse experience.\n */\nexport function isAIAgent(request: MinimalRequest): DetectionResult {\n\tconst userAgent = request.headers.get(\"user-agent\");\n\tconst lowerUA = userAgent?.toLowerCase() ?? \"\";\n\n\t// Layer 1: Known UA pattern match\n\tif (lowerUA && AI_AGENT_UA_PATTERNS.some((pattern) => lowerUA.includes(pattern))) {\n\t\treturn { detected: true, method: \"ua-match\" };\n\t}\n\n\t// Layer 2: Signature-Agent header (RFC 9421, used by ChatGPT agent)\n\tconst signatureAgent = request.headers.get(\"signature-agent\");\n\tif (signatureAgent) {\n\t\tconst lowerSig = signatureAgent.toLowerCase();\n\t\tif (SIGNATURE_AGENT_DOMAINS.some((domain) => lowerSig.includes(domain))) {\n\t\t\treturn { detected: true, method: \"signature-agent\" };\n\t\t}\n\t}\n\n\t// Layer 3: Missing browser fingerprint heuristic\n\t// Real browsers (Chrome 76+, Firefox 90+, Safari 16.4+) send sec-fetch-mode\n\t// on navigation requests. Its absence signals a programmatic client.\n\tconst secFetchMode = request.headers.get(\"sec-fetch-mode\");\n\tif (!secFetchMode && lowerUA && BOT_LIKE_REGEX.test(lowerUA)) {\n\t\tconst isTraditionalBot = TRADITIONAL_BOT_PATTERNS.some((pattern) => lowerUA.includes(pattern));\n\t\tif (!isTraditionalBot) {\n\t\t\treturn { detected: true, method: \"heuristic\" };\n\t\t}\n\t}\n\n\treturn { detected: false, method: null };\n}\n","import { isAIAgent } from \"./detection\";\nimport type { DetectionResult, MinimalRequest } from \"./types\";\n\nconst DEFAULT_MARKDOWN_TYPES = [\"text/markdown\", \"text/x-markdown\"];\n\nexport interface AcceptMarkdownOptions {\n\tmediaTypes?: string[];\n}\n\n/**\n * Check if the request prefers markdown via the Accept header.\n */\nexport function acceptsMarkdown(request: MinimalRequest, options?: AcceptMarkdownOptions): boolean {\n\tconst accept = request.headers.get(\"accept\");\n\tif (!accept) return false;\n\n\tconst types = options?.mediaTypes ?? DEFAULT_MARKDOWN_TYPES;\n\tconst lowerAccept = accept.toLowerCase();\n\treturn types.some((type) => lowerAccept.includes(type));\n}\n\nexport interface ShouldServeMarkdownResult {\n\tserve: boolean;\n\treason: \"agent\" | \"accept-header\" | null;\n\tdetection: DetectionResult;\n}\n\n/**\n * Combines agent detection and content negotiation into one call.\n * Returns whether to serve markdown and why.\n */\nexport function shouldServeMarkdown(\n\trequest: MinimalRequest,\n\toptions?: AcceptMarkdownOptions,\n): ShouldServeMarkdownResult {\n\tconst detection = isAIAgent(request);\n\tif (detection.detected) {\n\t\treturn { serve: true, reason: \"agent\", detection };\n\t}\n\n\tif (acceptsMarkdown(request, options)) {\n\t\treturn { serve: true, reason: \"accept-header\", detection };\n\t}\n\n\treturn { serve: false, reason: null, detection };\n}\n"],"mappings":";AACA,SAAS,oBAAoB;;;ACItB,IAAM,uBAA0C;AAAA;AAAA,EAEtD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD;AAKO,IAAM,0BAA6C,CAAC,aAAa;AAMjE,IAAM,2BAA8C;AAAA,EAC1D;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACD;AAMO,IAAM,iBAAyB;;;ACrE/B,SAAS,UAAU,SAA0C;AACnE,QAAM,YAAY,QAAQ,QAAQ,IAAI,YAAY;AAClD,QAAM,UAAU,WAAW,YAAY,KAAK;AAG5C,MAAI,WAAW,qBAAqB,KAAK,CAAC,YAAY,QAAQ,SAAS,OAAO,CAAC,GAAG;AACjF,WAAO,EAAE,UAAU,MAAM,QAAQ,WAAW;AAAA,EAC7C;AAGA,QAAM,iBAAiB,QAAQ,QAAQ,IAAI,iBAAiB;AAC5D,MAAI,gBAAgB;AACnB,UAAM,WAAW,eAAe,YAAY;AAC5C,QAAI,wBAAwB,KAAK,CAAC,WAAW,SAAS,SAAS,MAAM,CAAC,GAAG;AACxE,aAAO,EAAE,UAAU,MAAM,QAAQ,kBAAkB;AAAA,IACpD;AAAA,EACD;AAKA,QAAM,eAAe,QAAQ,QAAQ,IAAI,gBAAgB;AACzD,MAAI,CAAC,gBAAgB,WAAW,eAAe,KAAK,OAAO,GAAG;AAC7D,UAAM,mBAAmB,yBAAyB,KAAK,CAAC,YAAY,QAAQ,SAAS,OAAO,CAAC;AAC7F,QAAI,CAAC,kBAAkB;AACtB,aAAO,EAAE,UAAU,MAAM,QAAQ,YAAY;AAAA,IAC9C;AAAA,EACD;AAEA,SAAO,EAAE,UAAU,OAAO,QAAQ,KAAK;AACxC;;;AC9CA,IAAM,yBAAyB,CAAC,iBAAiB,iBAAiB;AAS3D,SAAS,gBAAgB,SAAyB,SAA0C;AAClG,QAAM,SAAS,QAAQ,QAAQ,IAAI,QAAQ;AAC3C,MAAI,CAAC,OAAQ,QAAO;AAEpB,QAAM,QAAQ,SAAS,cAAc;AACrC,QAAM,cAAc,OAAO,YAAY;AACvC,SAAO,MAAM,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AACvD;;;AHkBO,SAAS,qBACf,SACA,SACoB;AACpB,SAAO,OAAO,KAAkB,UAA0B;AACzD,UAAM,EAAE,SAAS,IAAI,IAAI;AACzB,UAAM,SAAS,QAAQ,cAAc;AAErC,QAAI,SAAS,WAAW,MAAM,GAAG;AAChC,YAAM,SAAS,UAAU,GAAG;AAC5B,YAAM,mBAAmB,gBAAgB,GAAG;AAE5C,UAAI,OAAO,YAAY,kBAAkB;AACxC,YAAI,QAAQ,aAAa;AACxB,gBAAM,SAAS,OAAO,WAAW,OAAO,SAAS;AACjD,gBAAM,YAAY,QAAQ,YAAY;AAAA,YACrC,MAAM;AAAA,YACN;AAAA,YACA,WAAW,IAAI,QAAQ,IAAI,YAAY;AAAA,UACxC,CAAC;AACD,cAAI,qBAAqB,SAAS;AACjC,kBAAM,UAAU,SAAS;AAAA,UAC1B;AAAA,QACD;AAEA,cAAM,aAAa,IAAI,QAAQ,MAAM;AACrC,mBAAW,WAAW,QAAQ,QAAQ,QAAQ;AAC9C,eAAO,aAAa,QAAQ,UAAU;AAAA,MACvC;AAAA,IACD;AAEA,QAAI,QAAS,QAAO,QAAQ,KAAK,KAAK;AACtC,WAAO,aAAa,KAAK;AAAA,EAC1B;AACD;AAGO,IAAM,0BAA0B;AAAA,EACtC;AACD;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@vercel/agent-readability",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Detect AI agents. Serve them markdown. Audit your site against the Agent Readability Spec.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.cjs",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"sideEffects": false,
|
|
10
|
+
"exports": {
|
|
11
|
+
".": {
|
|
12
|
+
"import": {
|
|
13
|
+
"types": "./dist/index.d.ts",
|
|
14
|
+
"default": "./dist/index.js"
|
|
15
|
+
},
|
|
16
|
+
"require": {
|
|
17
|
+
"types": "./dist/index.d.cts",
|
|
18
|
+
"default": "./dist/index.cjs"
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"./next": {
|
|
22
|
+
"import": {
|
|
23
|
+
"types": "./dist/next/index.d.ts",
|
|
24
|
+
"default": "./dist/next/index.js"
|
|
25
|
+
},
|
|
26
|
+
"require": {
|
|
27
|
+
"types": "./dist/next/index.d.cts",
|
|
28
|
+
"default": "./dist/next/index.cjs"
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
},
|
|
32
|
+
"bin": {
|
|
33
|
+
"agent-readability": "./dist/cli/index.cjs"
|
|
34
|
+
},
|
|
35
|
+
"files": [
|
|
36
|
+
"dist",
|
|
37
|
+
"skill"
|
|
38
|
+
],
|
|
39
|
+
"engines": {
|
|
40
|
+
"node": ">=20.0.0"
|
|
41
|
+
},
|
|
42
|
+
"peerDependencies": {
|
|
43
|
+
"next": ">=14"
|
|
44
|
+
},
|
|
45
|
+
"peerDependenciesMeta": {
|
|
46
|
+
"next": {
|
|
47
|
+
"optional": true
|
|
48
|
+
}
|
|
49
|
+
},
|
|
50
|
+
"devDependencies": {
|
|
51
|
+
"@biomejs/biome": "^1.9",
|
|
52
|
+
"@changesets/cli": "^2",
|
|
53
|
+
"@types/node": "^22",
|
|
54
|
+
"@vitest/coverage-v8": "^3",
|
|
55
|
+
"cheerio": "^1",
|
|
56
|
+
"citty": "^0.1",
|
|
57
|
+
"next": "^15",
|
|
58
|
+
"p-limit": "^6",
|
|
59
|
+
"picocolors": "^1",
|
|
60
|
+
"tsup": "^8",
|
|
61
|
+
"typescript": "^5.8",
|
|
62
|
+
"vitest": "^3"
|
|
63
|
+
},
|
|
64
|
+
"publishConfig": {
|
|
65
|
+
"access": "restricted"
|
|
66
|
+
},
|
|
67
|
+
"license": "MIT",
|
|
68
|
+
"repository": {
|
|
69
|
+
"type": "git",
|
|
70
|
+
"url": "https://github.com/vercel-labs/agent-readability.git"
|
|
71
|
+
},
|
|
72
|
+
"keywords": [
|
|
73
|
+
"ai",
|
|
74
|
+
"agent",
|
|
75
|
+
"readability",
|
|
76
|
+
"markdown",
|
|
77
|
+
"detection",
|
|
78
|
+
"next.js",
|
|
79
|
+
"middleware"
|
|
80
|
+
],
|
|
81
|
+
"scripts": {
|
|
82
|
+
"build": "rm -rf dist && tsup",
|
|
83
|
+
"test": "vitest",
|
|
84
|
+
"test:ci": "vitest run --coverage",
|
|
85
|
+
"lint": "biome check .",
|
|
86
|
+
"lint:fix": "biome check --write .",
|
|
87
|
+
"typecheck": "tsc --noEmit",
|
|
88
|
+
"changeset": "changeset",
|
|
89
|
+
"release": "changeset publish"
|
|
90
|
+
}
|
|
91
|
+
}
|