nodebench-mcp 2.28.0 → 2.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +86 -0
- package/dist/db.js +69 -0
- package/dist/db.js.map +1 -1
- package/dist/engine/conformance.d.ts +31 -0
- package/dist/engine/conformance.js +81 -0
- package/dist/engine/conformance.js.map +1 -0
- package/dist/engine/contextBridge.d.ts +67 -0
- package/dist/engine/contextBridge.js +392 -0
- package/dist/engine/contextBridge.js.map +1 -0
- package/dist/engine/server.d.ts +23 -0
- package/dist/engine/server.js +481 -0
- package/dist/engine/server.js.map +1 -0
- package/dist/engine/session.d.ts +55 -0
- package/dist/engine/session.js +139 -0
- package/dist/engine/session.js.map +1 -0
- package/dist/index.js +113 -11
- package/dist/index.js.map +1 -1
- package/dist/sandboxApi.d.ts +20 -0
- package/dist/sandboxApi.js +99 -0
- package/dist/sandboxApi.js.map +1 -0
- package/dist/tools/contextSandboxTools.d.ts +15 -0
- package/dist/tools/contextSandboxTools.js +469 -0
- package/dist/tools/contextSandboxTools.js.map +1 -0
- package/dist/tools/contextTools.d.ts +11 -0
- package/dist/tools/contextTools.js +175 -0
- package/dist/tools/contextTools.js.map +1 -0
- package/dist/tools/designGovernanceTools.d.ts +20 -0
- package/dist/tools/designGovernanceTools.js +872 -0
- package/dist/tools/designGovernanceTools.js.map +1 -0
- package/dist/tools/openclawTools.d.ts +1 -0
- package/dist/tools/openclawTools.js +780 -0
- package/dist/tools/openclawTools.js.map +1 -1
- package/dist/tools/progressiveDiscoveryTools.js +3 -3
- package/dist/tools/progressiveDiscoveryTools.js.map +1 -1
- package/dist/tools/researchOptimizerTools.d.ts +17 -0
- package/dist/tools/researchOptimizerTools.js +454 -0
- package/dist/tools/researchOptimizerTools.js.map +1 -0
- package/dist/tools/scraplingTools.d.ts +15 -0
- package/dist/tools/scraplingTools.js +278 -0
- package/dist/tools/scraplingTools.js.map +1 -0
- package/dist/tools/thompsonProtocolTools.d.ts +58 -0
- package/dist/tools/thompsonProtocolTools.js +864 -0
- package/dist/tools/thompsonProtocolTools.js.map +1 -0
- package/dist/tools/toolRegistry.js +625 -0
- package/dist/tools/toolRegistry.js.map +1 -1
- package/dist/toolsetRegistry.js +14 -0
- package/dist/toolsetRegistry.js.map +1 -1
- package/package.json +6 -2
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Scrapling tools — Adaptive web scraping via Scrapling Python bridge server.
|
|
3
|
+
*
|
|
4
|
+
* Calls the scrapling_bridge FastAPI server over HTTP (port 8008).
|
|
5
|
+
* Requires SCRAPLING_SERVER_URL env var (default: http://localhost:8008).
|
|
6
|
+
*
|
|
7
|
+
* Tiers:
|
|
8
|
+
* http — Basic HTTP fetch with stealthy headers
|
|
9
|
+
* stealth — Anti-bot bypass (Cloudflare, TLS fingerprinting)
|
|
10
|
+
* dynamic — Full browser rendering (Playwright)
|
|
11
|
+
*
|
|
12
|
+
* Note: Web scraping should comply with target site ToS. User responsibility.
|
|
13
|
+
*/
|
|
14
|
+
// ─── HTTP helpers ─────────────────────────────────────────────────────────────
|
|
15
|
+
const DEFAULT_SERVER = "http://localhost:8008";
|
|
16
|
+
function getServerUrl() {
|
|
17
|
+
return (process.env.SCRAPLING_SERVER_URL || DEFAULT_SERVER).replace(/\/$/, "");
|
|
18
|
+
}
|
|
19
|
+
async function scraplingRequest(endpoint, method, body) {
|
|
20
|
+
const url = `${getServerUrl()}${endpoint}`;
|
|
21
|
+
try {
|
|
22
|
+
const res = await fetch(url, {
|
|
23
|
+
method,
|
|
24
|
+
headers: body ? { "Content-Type": "application/json" } : undefined,
|
|
25
|
+
body: body ? JSON.stringify(body) : undefined,
|
|
26
|
+
});
|
|
27
|
+
return await res.json();
|
|
28
|
+
}
|
|
29
|
+
catch (e) {
|
|
30
|
+
return {
|
|
31
|
+
error: true,
|
|
32
|
+
message: `Scrapling bridge unreachable at ${url}: ${e.message}`,
|
|
33
|
+
suggestion: "Ensure the Scrapling bridge server is running. Start with: cd python-mcp-servers/scrapling_bridge && uvicorn server:app --port 8008",
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
// ─── Tools ────────────────────────────────────────────────────────────────────
|
|
38
|
+
export const scraplingTools = [
|
|
39
|
+
// 1. scrapling_fetch
|
|
40
|
+
{
|
|
41
|
+
name: "scrapling_fetch",
|
|
42
|
+
description: "Fetch a URL with adaptive scraping. Auto-selects fetcher tier: 'http' for public pages, 'stealth' for anti-bot bypass (Cloudflare, TLS fingerprinting), 'dynamic' for JS-rendered pages. Returns page title, text preview, status code. Optionally extract data inline with CSS/XPath selectors. Requires Scrapling bridge server (SCRAPLING_SERVER_URL, default localhost:8008). Web scraping must comply with target site ToS.",
|
|
43
|
+
inputSchema: {
|
|
44
|
+
type: "object",
|
|
45
|
+
properties: {
|
|
46
|
+
url: { type: "string", description: "URL to fetch" },
|
|
47
|
+
tier: {
|
|
48
|
+
type: "string",
|
|
49
|
+
enum: ["http", "stealth", "dynamic"],
|
|
50
|
+
description: "Fetcher tier (default: http). Use 'stealth' for Cloudflare sites, 'dynamic' for JS-rendered pages.",
|
|
51
|
+
},
|
|
52
|
+
impersonate: {
|
|
53
|
+
type: "string",
|
|
54
|
+
description: "Browser TLS fingerprint to impersonate (e.g. 'chrome')",
|
|
55
|
+
},
|
|
56
|
+
extract: {
|
|
57
|
+
type: "object",
|
|
58
|
+
properties: {
|
|
59
|
+
selectors: {
|
|
60
|
+
type: "object",
|
|
61
|
+
additionalProperties: { type: "string" },
|
|
62
|
+
description: "Map of name -> CSS/XPath selector to extract",
|
|
63
|
+
},
|
|
64
|
+
},
|
|
65
|
+
description: "Optional inline extraction selectors",
|
|
66
|
+
},
|
|
67
|
+
proxy: { type: "string", description: "Optional proxy URL" },
|
|
68
|
+
timeout: { type: "number", description: "Request timeout in seconds (default: 30)" },
|
|
69
|
+
},
|
|
70
|
+
required: ["url"],
|
|
71
|
+
},
|
|
72
|
+
handler: async (params) => {
|
|
73
|
+
return await scraplingRequest("/fetch", "POST", {
|
|
74
|
+
url: params.url,
|
|
75
|
+
tier: params.tier || "http",
|
|
76
|
+
impersonate: params.impersonate,
|
|
77
|
+
extract: params.extract,
|
|
78
|
+
proxy: params.proxy,
|
|
79
|
+
timeout: params.timeout || 30,
|
|
80
|
+
stealthy_headers: true,
|
|
81
|
+
});
|
|
82
|
+
},
|
|
83
|
+
},
|
|
84
|
+
// 2. scrapling_extract
|
|
85
|
+
{
|
|
86
|
+
name: "scrapling_extract",
|
|
87
|
+
description: "Extract structured data from a URL using CSS or XPath selectors. Zero LLM tokens — deterministic extraction. Use CSS selectors like 'h1::text', '.price::text', 'a[href]::attr(href)'. Use XPath for complex queries starting with '//'. Falls back to fetch_url + LLM extraction if selectors fail. Requires Scrapling bridge server.",
|
|
88
|
+
inputSchema: {
|
|
89
|
+
type: "object",
|
|
90
|
+
properties: {
|
|
91
|
+
url: { type: "string", description: "URL to extract from" },
|
|
92
|
+
selectors: {
|
|
93
|
+
type: "object",
|
|
94
|
+
additionalProperties: { type: "string" },
|
|
95
|
+
description: "Map of field name -> CSS or XPath selector. E.g. { title: 'h1::text', prices: '.price::text' }",
|
|
96
|
+
},
|
|
97
|
+
tier: {
|
|
98
|
+
type: "string",
|
|
99
|
+
enum: ["http", "stealth", "dynamic"],
|
|
100
|
+
description: "Fetcher tier (default: http)",
|
|
101
|
+
},
|
|
102
|
+
impersonate: { type: "string", description: "Browser fingerprint" },
|
|
103
|
+
proxy: { type: "string", description: "Optional proxy URL" },
|
|
104
|
+
},
|
|
105
|
+
required: ["url", "selectors"],
|
|
106
|
+
},
|
|
107
|
+
handler: async (params) => {
|
|
108
|
+
return await scraplingRequest("/extract", "POST", {
|
|
109
|
+
url: params.url,
|
|
110
|
+
selectors: params.selectors,
|
|
111
|
+
tier: params.tier || "http",
|
|
112
|
+
impersonate: params.impersonate,
|
|
113
|
+
proxy: params.proxy,
|
|
114
|
+
timeout: params.timeout || 30,
|
|
115
|
+
});
|
|
116
|
+
},
|
|
117
|
+
},
|
|
118
|
+
// 3. scrapling_batch_fetch
|
|
119
|
+
{
|
|
120
|
+
name: "scrapling_batch_fetch",
|
|
121
|
+
description: "Fetch multiple URLs in parallel with configurable concurrency. Use for competitive analysis, multi-source research, or batch data collection. Up to 20 URLs, 1-10 concurrent fetches. Each URL gets the same tier/proxy config. Returns per-URL results with success/failure status. Requires Scrapling bridge server.",
|
|
122
|
+
inputSchema: {
|
|
123
|
+
type: "object",
|
|
124
|
+
properties: {
|
|
125
|
+
urls: {
|
|
126
|
+
type: "array",
|
|
127
|
+
items: { type: "string" },
|
|
128
|
+
description: "URLs to fetch (1-20)",
|
|
129
|
+
},
|
|
130
|
+
tier: {
|
|
131
|
+
type: "string",
|
|
132
|
+
enum: ["http", "stealth", "dynamic"],
|
|
133
|
+
description: "Fetcher tier for all URLs (default: http)",
|
|
134
|
+
},
|
|
135
|
+
concurrency: {
|
|
136
|
+
type: "number",
|
|
137
|
+
description: "Max parallel fetches (default: 5, max: 10)",
|
|
138
|
+
},
|
|
139
|
+
extract: {
|
|
140
|
+
type: "object",
|
|
141
|
+
properties: {
|
|
142
|
+
selectors: {
|
|
143
|
+
type: "object",
|
|
144
|
+
additionalProperties: { type: "string" },
|
|
145
|
+
},
|
|
146
|
+
},
|
|
147
|
+
description: "Optional extraction selectors applied to all URLs",
|
|
148
|
+
},
|
|
149
|
+
impersonate: { type: "string" },
|
|
150
|
+
proxy: { type: "string" },
|
|
151
|
+
timeout: { type: "number" },
|
|
152
|
+
},
|
|
153
|
+
required: ["urls"],
|
|
154
|
+
},
|
|
155
|
+
handler: async (params) => {
|
|
156
|
+
return await scraplingRequest("/fetch/batch", "POST", {
|
|
157
|
+
urls: params.urls,
|
|
158
|
+
tier: params.tier || "http",
|
|
159
|
+
concurrency: params.concurrency || 5,
|
|
160
|
+
extract: params.extract,
|
|
161
|
+
impersonate: params.impersonate,
|
|
162
|
+
proxy: params.proxy,
|
|
163
|
+
timeout: params.timeout || 30,
|
|
164
|
+
stealthy_headers: true,
|
|
165
|
+
});
|
|
166
|
+
},
|
|
167
|
+
},
|
|
168
|
+
// 4. scrapling_track_element
|
|
169
|
+
{
|
|
170
|
+
name: "scrapling_track_element",
|
|
171
|
+
description: "Track an element across page versions using Scrapling's adaptive element relocation. Survives CSS class renames, DOM restructuring, and layout changes. Use for price monitoring, content change detection, or element stability checks. Returns element tag, text, attributes, and HTML. Requires Scrapling bridge server.",
|
|
172
|
+
inputSchema: {
|
|
173
|
+
type: "object",
|
|
174
|
+
properties: {
|
|
175
|
+
url: { type: "string", description: "URL of the page" },
|
|
176
|
+
selector: { type: "string", description: "CSS selector for the element to track" },
|
|
177
|
+
tier: {
|
|
178
|
+
type: "string",
|
|
179
|
+
enum: ["http", "stealth", "dynamic"],
|
|
180
|
+
description: "Fetcher tier (default: http)",
|
|
181
|
+
},
|
|
182
|
+
impersonate: { type: "string" },
|
|
183
|
+
proxy: { type: "string" },
|
|
184
|
+
},
|
|
185
|
+
required: ["url", "selector"],
|
|
186
|
+
},
|
|
187
|
+
handler: async (params) => {
|
|
188
|
+
return await scraplingRequest("/track", "POST", {
|
|
189
|
+
url: params.url,
|
|
190
|
+
selector: params.selector,
|
|
191
|
+
tier: params.tier || "http",
|
|
192
|
+
impersonate: params.impersonate,
|
|
193
|
+
proxy: params.proxy,
|
|
194
|
+
});
|
|
195
|
+
},
|
|
196
|
+
},
|
|
197
|
+
// 5. scrapling_crawl
|
|
198
|
+
{
|
|
199
|
+
name: "scrapling_crawl",
|
|
200
|
+
description: "Start a multi-page spider crawl with extraction. Crawls from start URLs, follows links matching a CSS selector, extracts data per page. Returns a session_id to poll with scrapling_crawl_status. Max 500 pages, 1-20 concurrent. Domain whitelist enforced. Use for SEC filing crawls, news aggregation, or site-wide data collection. Requires Scrapling bridge server.",
|
|
201
|
+
inputSchema: {
|
|
202
|
+
type: "object",
|
|
203
|
+
properties: {
|
|
204
|
+
start_urls: {
|
|
205
|
+
type: "array",
|
|
206
|
+
items: { type: "string" },
|
|
207
|
+
description: "URLs to start crawling from (1-10)",
|
|
208
|
+
},
|
|
209
|
+
max_pages: {
|
|
210
|
+
type: "number",
|
|
211
|
+
description: "Max pages to crawl (default: 50, max: 500)",
|
|
212
|
+
},
|
|
213
|
+
concurrency: {
|
|
214
|
+
type: "number",
|
|
215
|
+
description: "Concurrent fetches (default: 5, max: 20)",
|
|
216
|
+
},
|
|
217
|
+
selectors: {
|
|
218
|
+
type: "object",
|
|
219
|
+
additionalProperties: { type: "string" },
|
|
220
|
+
description: "CSS/XPath selectors to extract from each page",
|
|
221
|
+
},
|
|
222
|
+
follow_links: {
|
|
223
|
+
type: "string",
|
|
224
|
+
description: "CSS selector for links to follow (e.g. '.pagination a')",
|
|
225
|
+
},
|
|
226
|
+
domain_whitelist: {
|
|
227
|
+
type: "array",
|
|
228
|
+
items: { type: "string" },
|
|
229
|
+
description: "Only follow links to these domains",
|
|
230
|
+
},
|
|
231
|
+
},
|
|
232
|
+
required: ["start_urls"],
|
|
233
|
+
},
|
|
234
|
+
handler: async (params) => {
|
|
235
|
+
return await scraplingRequest("/crawl/start", "POST", {
|
|
236
|
+
start_urls: params.start_urls,
|
|
237
|
+
max_pages: params.max_pages || 50,
|
|
238
|
+
concurrency: params.concurrency || 5,
|
|
239
|
+
selectors: params.selectors || {},
|
|
240
|
+
follow_links: params.follow_links,
|
|
241
|
+
domain_whitelist: params.domain_whitelist || [],
|
|
242
|
+
});
|
|
243
|
+
},
|
|
244
|
+
},
|
|
245
|
+
// 6. scrapling_crawl_status
|
|
246
|
+
{
|
|
247
|
+
name: "scrapling_crawl_status",
|
|
248
|
+
description: "Check crawl progress and get collected items. Pass the session_id from scrapling_crawl. Returns status (running/completed/stopped), pages crawled, items with extracted data, and errors. Poll periodically until status is 'completed'. Requires Scrapling bridge server.",
|
|
249
|
+
inputSchema: {
|
|
250
|
+
type: "object",
|
|
251
|
+
properties: {
|
|
252
|
+
session_id: { type: "string", description: "Crawl session ID from scrapling_crawl" },
|
|
253
|
+
},
|
|
254
|
+
required: ["session_id"],
|
|
255
|
+
},
|
|
256
|
+
handler: async (params) => {
|
|
257
|
+
return await scraplingRequest(`/crawl/status?session_id=${encodeURIComponent(params.session_id)}`, "GET");
|
|
258
|
+
},
|
|
259
|
+
},
|
|
260
|
+
// 7. scrapling_crawl_stop
|
|
261
|
+
{
|
|
262
|
+
name: "scrapling_crawl_stop",
|
|
263
|
+
description: "Stop a running crawl session. Pass the session_id from scrapling_crawl. Items collected so far are preserved. Use when you have enough data or need to abort. Requires Scrapling bridge server.",
|
|
264
|
+
inputSchema: {
|
|
265
|
+
type: "object",
|
|
266
|
+
properties: {
|
|
267
|
+
session_id: { type: "string", description: "Crawl session ID to stop" },
|
|
268
|
+
},
|
|
269
|
+
required: ["session_id"],
|
|
270
|
+
},
|
|
271
|
+
handler: async (params) => {
|
|
272
|
+
return await scraplingRequest("/crawl/stop", "POST", {
|
|
273
|
+
session_id: params.session_id,
|
|
274
|
+
});
|
|
275
|
+
},
|
|
276
|
+
},
|
|
277
|
+
];
|
|
278
|
+
//# sourceMappingURL=scraplingTools.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"scraplingTools.js","sourceRoot":"","sources":["../../src/tools/scraplingTools.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAIH,iFAAiF;AAEjF,MAAM,cAAc,GAAG,uBAAuB,CAAC;AAE/C,SAAS,YAAY;IACnB,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,IAAI,cAAc,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;AACjF,CAAC;AAED,KAAK,UAAU,gBAAgB,CAC7B,QAAgB,EAChB,MAAsB,EACtB,IAA8B;IAE9B,MAAM,GAAG,GAAG,GAAG,YAAY,EAAE,GAAG,QAAQ,EAAE,CAAC;IAC3C,IAAI,CAAC;QACH,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;YAC3B,MAAM;YACN,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,EAAE,kBAAkB,EAAE,CAAC,CAAC,CAAC,SAAS;YAClE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS;SAC9C,CAAC,CAAC;QACH,OAAO,MAAM,GAAG,CAAC,IAAI,EAAE,CAAC;IAC1B,CAAC;IAAC,OAAO,CAAM,EAAE,CAAC;QAChB,OAAO;YACL,KAAK,EAAE,IAAI;YACX,OAAO,EAAE,mCAAmC,GAAG,KAAK,CAAC,CAAC,OAAO,EAAE;YAC/D,UAAU,EACR,qIAAqI;SACxI,CAAC;IACJ,CAAC;AACH,CAAC;AAED,iFAAiF;AAEjF,MAAM,CAAC,MAAM,cAAc,GAAc;IACvC,qBAAqB;IACrB;QACE,IAAI,EAAE,iBAAiB;QACvB,WAAW,EACT,kaAAka;QACpa,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE;gBACV,GAAG,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,cAAc,EAAE;gBACpD,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;oBACd,IAAI,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC;oBACpC,WAAW,EAAE,oGAAoG;iBAClH;gBACD,WAAW,EAAE;oBACX,IAAI,EAAE,QAAQ;oBACd,WAAW,EAAE,wDAAwD;iBACtE;gBACD,OAAO,EAAE;oBACP,IAAI,EAAE,QAAQ;oBACd,UAAU,EAAE;wBACV,SAAS,EAAE;4BACT,IAAI,EAAE,QAAQ;4BACd,oBAAoB,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;4BACxC,WAAW,EAAE,8CAA8C;yBAC5D;qBACF;oBACD,WAAW,EAAE,sCAAsC;iBACpD;gBACD,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oBAAoB,EAAE;gBAC5D,OAAO,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,0CAA0C,EAAE;aACrF;YACD,QAAQ,EAAE,CAAC,KAAK,CAAC;SAClB;QACD,OAAO,EAAE,KAAK,EAAE,MAAW,EAAE,EAAE;YAC7B,OAAO,MAAM,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE;gBAC9C,GAAG,EAAE,MAAM,CAAC,GAAG;gBACf,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,MAAM;gBAC3B,WAAW,EAAE,MAAM,CAAC,WAAW;gBAC/B,OAAO,EAAE,MAAM,CAAC,OAAO;gBACvB,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,EAAE;gBAC7B,gBAAgB,EAAE,IAAI;aACvB,CAAC,CAAC;QACL,CAAC;KACF;IAED,uBAAuB;IACvB;QACE,IAAI,EAAE,mBAAmB;QACzB,WAAW,EACT,wUAAwU;QAC1U,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE;gBACV,GAAG,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,qBAAqB,EAAE;gBAC3D,SAAS,EAAE;oBACT,IAAI,EAAE,QAAQ;oBACd,oBAAoB,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;oBACxC,WAAW,EAAE,gGAAgG;iBAC9G;gBACD,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;oBACd,IAAI,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC;oBACpC,WAAW,EAAE,8BAA8B;iBAC5C;gBACD,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,qBAAqB,EAAE;gBACnE,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oBAAoB,EAAE;aAC7D;YACD,QAAQ,EAAE,CAAC,KAAK,EAAE,WAAW,CAAC;SAC/B;QACD,OAAO,EAAE,KAAK,EAAE,MAAW,EAAE,EAAE;YAC7B,OAAO,MAAM,gBAAgB,CAAC,UAAU,EAAE,MAAM,EAAE;gBAChD,GAAG,EAAE,MAAM,CAAC,GAAG;gBACf,SAAS,EAAE,MAAM,CAAC,SAAS;gBAC3B,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,MAAM;gBAC3B,WAAW,EAAE,MAAM,CAAC,WAAW;gBAC/B,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,EAAE;aAC9B,CAAC,CAAC;QACL,CAAC;KACF;IAED,2BAA2B;IAC3B;QACE,IAAI,EAAE,uBAAuB;QAC7B,WAAW,EACT,wTAAwT;QAC1T,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE;gBACV,IAAI,EAAE;oBACJ,IAAI,EAAE,OAAO;oBACb,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;oBACzB,WAAW,EAAE,sBAAsB;iBACpC;gBACD,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;oBACd,IAAI,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC;oBACpC,WAAW,EAAE,2CAA2C;iBACzD;gBACD,WAAW,EAAE;oBACX,IAAI,EAAE,QAAQ;oBACd,WAAW,EAAE,4CAA4C;iBAC1D;gBACD,OAAO,EAAE;oBACP,IAAI,EAAE,QAAQ;oBACd,UAAU,EAAE;wBACV,SAAS,EAAE;4BACT,IAAI,EAAE,QAAQ;4BACd,oBAAoB,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;yBACzC;qBACF;oBACD,WAAW,EAAE,mDAAmD;iBACjE;gBACD,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBAC/B,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACzB,OAAO,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;aAC5B;YACD,QAAQ,EAAE,CAAC,MAAM,CAAC;SACnB;QACD,OAAO,EAAE,KAAK,EAAE,MAAW,EAAE,EAAE;YAC7B,OAAO,MAAM,gBAAgB,CAAC,cAAc,EAAE,MAAM,EAAE;gBACpD,IAAI,EAAE,MAAM,CAAC,IAAI;gBACjB,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,MAAM;gBAC3B,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,CAAC;gBACpC,OAAO,EAAE,MAAM,CAAC,OAAO;gBACvB,WAAW,EAAE,MAAM,CAAC,WAAW;gBAC/B,KAAK,EAAE,MAAM,CAAC,KAAK;gBACnB,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,EAAE;gBAC7B,gBAAgB,EAAE,IAAI;aACvB,CAAC,CAAC;QACL,CAAC;KACF;IAED,6BAA6B;IAC7B;QACE,IAAI,EAAE,yBAAyB;QAC/B,WAAW,EACT,6TAA6T;QAC/T,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE;gBACV,GAAG,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,iBAAiB,EAAE;gBACvD,QAAQ,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,uCAAuC,EAAE;gBAClF,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;oBACd,IAAI,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC;oBACpC,WAAW,EAAE,8BAA8B;iBAC5C;gBACD,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBAC/B,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;aAC1B;YACD,QAAQ,EAAE,CAAC,KAAK,EAAE,UAAU,CAAC;SAC9B;QACD,OAAO,EAAE,KAAK,EAAE,MAAW,EAAE,EAAE;YAC7B,OAAO,MAAM,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE;gBAC9C,GAAG,EAAE,MAAM,CAAC,GAAG;gBACf,QAAQ,EAAE,MAAM,CAAC,QAAQ;gBACzB,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,MAAM;gBAC3B,WAAW,EAAE,MAAM,CAAC,WAAW;gBAC/B,KAAK,EAAE,MAAM,CAAC,KAAK;aACpB,CAAC,CAAC;QACL,CAAC;KACF;IAED,qBAAqB;IACrB;QACE,IAAI,EAAE,iBAAiB;QACvB,WAAW,EACT,2WAA2W;QAC7W,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE;gBACV,UAAU,EAAE;oBACV,IAAI,EAAE,OAAO;oBACb,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;oBACzB,WAAW,EAAE,oCAAoC;iBAClD;gBACD,SAAS,EAAE;oBACT,IAAI,EAAE,QAAQ;oBACd,WAAW,EAAE,4CAA4C;iBAC1D;gBACD,WAAW,EAAE;oBACX,IAAI,EAAE,QAAQ;oBACd,WAAW,EAAE,0CAA0C;iBACxD;gBACD,SAAS,EAAE;oBACT,IAAI,EAAE,QAAQ;oBACd,oBAAoB,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;oBACxC,WAAW,EAAE,+CAA+C;iBAC7D;gBACD,YAAY,EAAE;oBACZ,IAAI,EAAE,QAAQ;oBACd,WAAW,EAAE,yDAAyD;iBACvE;gBACD,gBAAgB,EAAE;oBAChB,IAAI,EAAE,OAAO;oBACb,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;oBACzB,WAAW,EAAE,oCAAoC;iBAClD;aACF;YACD,QAAQ,EAAE,CAAC,YAAY,CAAC;SACzB;QACD,OAAO,EAAE,KAAK,EAAE,MAAW,EAAE,EAAE;YAC7B,OAAO,MAAM,gBAAgB,CAAC,cAAc,EAAE,MAAM,EAAE;gBACpD,UAAU,EAAE,MAAM,CAAC,UAAU;gBAC7B,SAAS,EAAE,MAAM,CAAC,SAAS,IAAI,EAAE;gBACjC,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,CAAC;gBACpC,SAAS,EAAE,MAAM,CAAC,SAAS,IAAI,EAAE;gBACjC,YAAY,EAAE,MAAM,CAAC,YAAY;gBACjC,gBAAgB,EAAE,MAAM,CAAC,gBAAgB,IAAI,EAAE;aAChD,CAAC,CAAC;QACL,CAAC;KACF;IAED,4BAA4B;IAC5B;QACE,IAAI,EAAE,wBAAwB;QAC9B,WAAW,EACT,4QAA4Q;QAC9Q,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE;gBACV,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,uCAAuC,EAAE;aACrF;YACD,QAAQ,EAAE,CAAC,YAAY,CAAC;SACzB;QACD,OAAO,EAAE,KAAK,EAAE,MAAW,EAAE,EAAE;YAC7B,OAAO,MAAM,gBAAgB,CAAC,4BAA4B,kBAAkB,CAAC,MAAM,CAAC,UAAU,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;QAC5G,CAAC;KACF;IAED,0BAA0B;IAC1B;QACE,IAAI,EAAE,sBAAsB;QAC5B,WAAW,EACT,iMAAiM;QACnM,WAAW,EAAE;YACX,IAAI,EAAE,QAAQ;YACd,UAAU,EAAE;gBACV,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,0BAA0B,EAAE;aACxE;YACD,QAAQ,EAAE,CAAC,YAAY,CAAC;SACzB;QACD,OAAO,EAAE,KAAK,EAAE,MAAW,EAAE,EAAE;YAC7B,OAAO,MAAM,gBAAgB,CAAC,aAAa,EAAE,MAAM,EAAE;gBACnD,UAAU,EAAE,MAAM,CAAC,UAAU;aAC9B,CAAC,CAAC;QACL,CAAC;KACF;CACF,CAAC"}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Thompson Protocol Tools — "Calculus Made Easy" approach to AI content
|
|
3
|
+
*
|
|
4
|
+
* 4-agent pipeline:
|
|
5
|
+
* 1. Thompson Writer — Plain English mandate, intuition-before-mechanics
|
|
6
|
+
* 2. Feynman Editor — Skeptical Beginner rejection loop
|
|
7
|
+
* 3. Visual Metaphor Mapper — 1:1 analogy→visual prompt generation
|
|
8
|
+
* 4. Anti-Elitism Linter — Mechanical ban list + readability scoring
|
|
9
|
+
*
|
|
10
|
+
* Plus orchestration:
|
|
11
|
+
* 5. thompson_pipeline — End-to-end orchestrator
|
|
12
|
+
* 6. thompson_quality_gate — Deterministic pass/fail checklist
|
|
13
|
+
*/
|
|
14
|
+
import type { McpTool } from "../types.js";
|
|
15
|
+
export declare const THOMPSON_SYSTEM_PROMPTS: {
|
|
16
|
+
writer: string;
|
|
17
|
+
feynman_editor: string;
|
|
18
|
+
visual_mapper: string;
|
|
19
|
+
anti_elitism_linter: string;
|
|
20
|
+
};
|
|
21
|
+
export interface ThompsonQualityChecklist {
|
|
22
|
+
hasPlainEnglishTranslations: boolean;
|
|
23
|
+
hasAnalogyPerConcept: boolean;
|
|
24
|
+
hasDifficultyAcknowledgment: boolean;
|
|
25
|
+
hasIntuitionBeforeMechanics: boolean;
|
|
26
|
+
passesFeynmanEdit: boolean;
|
|
27
|
+
passesAntiElitismLint: boolean;
|
|
28
|
+
hasVisualMetaphors: boolean;
|
|
29
|
+
fleschKincaidUnder10: boolean;
|
|
30
|
+
noBannedPhrases: boolean;
|
|
31
|
+
hasProgressiveComplexity: boolean;
|
|
32
|
+
}
|
|
33
|
+
export declare function deriveThompsonGrade(checklist: ThompsonQualityChecklist): "exemplary" | "passing" | "needs_work" | "failing";
|
|
34
|
+
declare function lintBannedPhrases(text: string): Array<{
|
|
35
|
+
phrase: string;
|
|
36
|
+
category: string;
|
|
37
|
+
replacement: string;
|
|
38
|
+
position: number;
|
|
39
|
+
}>;
|
|
40
|
+
declare function computeReadabilityMetrics(text: string): {
|
|
41
|
+
fleschKincaidGrade: number;
|
|
42
|
+
passiveVoicePct: number;
|
|
43
|
+
avgSentenceLength: number;
|
|
44
|
+
jargonDensity: number;
|
|
45
|
+
};
|
|
46
|
+
declare function countSyllables(word: string): number;
|
|
47
|
+
export declare function createThompsonProtocolTools(): McpTool[];
|
|
48
|
+
export declare const _testExports: {
|
|
49
|
+
lintBannedPhrases: typeof lintBannedPhrases;
|
|
50
|
+
computeReadabilityMetrics: typeof computeReadabilityMetrics;
|
|
51
|
+
countSyllables: typeof countSyllables;
|
|
52
|
+
BANNED_PHRASES: {
|
|
53
|
+
phrase: string;
|
|
54
|
+
category: string;
|
|
55
|
+
replacement: string;
|
|
56
|
+
}[];
|
|
57
|
+
};
|
|
58
|
+
export {};
|