@xiedada/nodemw-mcp-server 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -26,12 +26,2144 @@
26
26
  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27
27
  * POSSIBILITY OF SUCH DAMAGE.
28
28
  */
29
- import{StdioServerTransport as Ze}from"@modelcontextprotocol/sdk/server/stdio.js";import{parseArgs as to}from"util";import{McpServer as ne}from"@modelcontextprotocol/sdk/server/mcp.js";var K="nodemw-mcp-server/1.0";function Z(e,t=!1){let o,r=t?" Write operations are available.":" Running in guest mode \u2014 only read operations are available.";return e?o=`Connected to ${e.sitename} (${e.base}). Running ${e.generator}.${r} When connecting for the first time, call the get-site-info tool for full site details.`:o=`When connecting to this server for the first time, call the get-site-info tool to understand the target MediaWiki site (version, namespaces, extensions, etc.) before using other tools.${r}`,new ne({name:"nodemw-mcp-server",version:"1.0.0",description:o},{capabilities:{tools:{}}})}import{z as q}from"zod";import se from"nodemw";var F=null,ie=null,tt=!1;function et(e){ie=e}function ot(e){let{server:t,path:o,protocol:r,port:a,proxy:c,userAgent:m,concurrency:p,debug:u,username:g,password:M,domain:U,dryRun:D}=e;return new se({server:t,protocol:r||"https",port:a,path:o,proxy:c,userAgent:m||K,concurrency:p,debug:u,username:g||void 0,password:M||void 0,domain:U,dryRun:D})}async function le(e){try{return await s(e,"getSiteInfo",["general"]),!0}catch{return!1}}async function rt(e){let t=["/w",""];for(let o of t){let r={...e,path:o},a=ot(r);if(await le(a))return o}throw new Error('Could not auto-detect MediaWiki API path. Tried /w/api.php and /api.php. Please specify --path explicitly (e.g., --path /w or --path "" for root).')}async function nt(e){F=ot(e);let{username:t,password:o}=e;return t&&o&&await new Promise((r,a)=>{F.logIn(c=>{c?a(new Error(`Login failed for user '${t}': ${c.message}`)):(tt=!0,r())})}),F}function n(){if(!F)throw new Error("Bot not initialized. Server must be started first.");return F}function st(){return tt}function s(e,t,...o){return new Promise((r,a)=>{let c=(m,p)=>{m?a(m):r(p)};e[t](...o,c)})}function it(e){return e.tool("get-article","Retrieve the content of a wiki article",{title:q.string().describe("Article title"),followRedirect:q.boolean().optional().default(!0).describe("Follow redirects"),redirectInfo:q.boolean().optional().default(!1).describe("Include information about redirects")},{title:"Get article",readOnlyHint:!0,destructiveHint:!1},async({title:t,followRedirect:o,redirectInfo:r})=>ae(t,o,r))}async function ae(e,t,o){try{let r=await n();if(o){let a=await new Promise((u,g)=>{let M=(U,D,re)=>{U?g(U):u([D,re])};r.getArticle(e,t,M)}),[c,m]=a;return c==null?{content:[{type:"text",text:`Page "${e}" not found or has no content.`}],isError:!0}:{content:[{type:"text",text:m?`Content:
30
29
 
31
- ${c}
30
+ // src/index.ts
31
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
32
+ import { parseArgs } from "util";
33
+
34
+ // src/server.ts
35
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
36
+
37
+ // package.json
38
+ var package_default = {
39
+ name: "@xiedada/nodemw-mcp-server",
40
+ version: "0.0.6",
41
+ description: "MCP server for nodemw - MediaWiki API client",
42
+ type: "module",
43
+ main: "dist/index.js",
44
+ bin: {
45
+ "nodemw-mcp-server": "./dist/index.js"
46
+ },
47
+ scripts: {
48
+ build: "node scripts/build.js",
49
+ dev: "node scripts/build.js --watch",
50
+ lint: "eslint src/**/*.ts",
51
+ test: "vitest",
52
+ prepare: "npm run build",
53
+ clean: "rimraf dist"
54
+ },
55
+ keywords: [
56
+ "mcp",
57
+ "mediawiki",
58
+ "nodemw",
59
+ "wiki"
60
+ ],
61
+ author: "",
62
+ license: "BSD-2-Clause",
63
+ dependencies: {
64
+ "@modelcontextprotocol/sdk": "^1.0.0",
65
+ nodemw: "^0.26.0",
66
+ zod: "^3.23.8"
67
+ },
68
+ devDependencies: {
69
+ "@types/node": "^22.0.0",
70
+ "@typescript-eslint/eslint-plugin": "^8.0.0",
71
+ "@typescript-eslint/parser": "^8.0.0",
72
+ esbuild: "^0.24.0",
73
+ eslint: "^9.0.0",
74
+ rimraf: "^5.0.5",
75
+ typescript: "^5.5.0",
76
+ vitest: "^2.0.0"
77
+ },
78
+ files: [
79
+ "dist"
80
+ ],
81
+ engines: {
82
+ node: ">=18.0.0"
83
+ }
84
+ };
85
+
86
+ // src/server.ts
87
+ var USER_AGENT = "nodemw-mcp-server/1.0";
88
+ function createServer(siteInfo, authenticated2 = false) {
89
+ let description;
90
+ const authSuffix = authenticated2 ? " Write operations are available." : " Running in guest mode \u2014 only read operations are available.";
91
+ if (siteInfo) {
92
+ description = `Connected to ${siteInfo.sitename} (${siteInfo.base}). Running ${siteInfo.generator}.${authSuffix} When connecting for the first time, call the get-site-info tool for full site details.`;
93
+ } else {
94
+ description = `When connecting to this server for the first time, call the get-site-info tool to understand the target MediaWiki site (version, namespaces, extensions, etc.) before using other tools.${authSuffix}`;
95
+ }
96
+ return new McpServer(
97
+ {
98
+ name: "nodemw-mcp-server",
99
+ version: package_default.version,
100
+ description
101
+ },
102
+ { capabilities: { tools: {} } }
103
+ );
104
+ }
105
+
106
+ // src/tools/ro/get-article.ts
107
+ import { z } from "zod";
108
+
109
+ // src/common/nodemwBot.ts
110
+ import Bot from "nodemw";
111
+ var botInstance = null;
112
+ var serverConfig = null;
113
+ var authenticated = false;
114
+ function initServerConfig(config) {
115
+ serverConfig = config;
116
+ }
117
+ function createBotFromConfig(config) {
118
+ const {
119
+ server,
120
+ path,
121
+ protocol,
122
+ port,
123
+ proxy,
124
+ userAgent,
125
+ concurrency,
126
+ debug,
127
+ username,
128
+ password,
129
+ domain,
130
+ dryRun
131
+ } = config;
132
+ return new Bot({
133
+ server,
134
+ protocol: protocol || "https",
135
+ port,
136
+ path,
137
+ proxy,
138
+ userAgent: userAgent || USER_AGENT,
139
+ concurrency,
140
+ debug,
141
+ username: username || void 0,
142
+ password: password || void 0,
143
+ domain,
144
+ // @ts-expect-error: dryRun is supported by nodemw at runtime but missing from BotOptions types
145
+ dryRun
146
+ });
147
+ }
148
+ async function testApiConnection(bot) {
149
+ try {
150
+ await promisifyBotMethod(bot, "getSiteInfo", ["general"]);
151
+ return true;
152
+ } catch {
153
+ return false;
154
+ }
155
+ }
156
+ async function autoDetectPath(baseConfig) {
157
+ const pathsToTry = ["/w", ""];
158
+ for (const path of pathsToTry) {
159
+ const testConfig = { ...baseConfig, path };
160
+ const bot = createBotFromConfig(testConfig);
161
+ if (await testApiConnection(bot)) {
162
+ return path;
163
+ }
164
+ }
165
+ throw new Error(
166
+ 'Could not auto-detect MediaWiki API path. Tried /w/api.php and /api.php. Please specify --path explicitly (e.g., --path /w or --path "" for root).'
167
+ );
168
+ }
169
+ async function initBot(config) {
170
+ botInstance = createBotFromConfig(config);
171
+ const { username, password } = config;
172
+ if (username && password) {
173
+ await new Promise((resolve, reject) => {
174
+ botInstance.logIn((err) => {
175
+ if (err) {
176
+ reject(new Error(`Login failed for user '${username}': ${err.message}`));
177
+ } else {
178
+ authenticated = true;
179
+ resolve();
180
+ }
181
+ });
182
+ });
183
+ }
184
+ return botInstance;
185
+ }
186
+ function getBot() {
187
+ if (!botInstance) {
188
+ throw new Error("Bot not initialized. Server must be started first.");
189
+ }
190
+ return botInstance;
191
+ }
192
+ function isAuthenticated() {
193
+ return authenticated;
194
+ }
195
+ function promisifyBotMethod(bot, method, ...args) {
196
+ return new Promise((resolve, reject) => {
197
+ const callback = (err, result) => {
198
+ if (err) {
199
+ reject(err);
200
+ } else {
201
+ resolve(result);
202
+ }
203
+ };
204
+ bot[method](...args, callback);
205
+ });
206
+ }
207
+
208
+ // src/common/pageState.ts
209
+ var readState = /* @__PURE__ */ new Map();
210
+ function markAsRead(pageid, revid) {
211
+ readState.set(pageid, revid);
212
+ }
213
+ function isRead(pageid) {
214
+ return readState.has(pageid);
215
+ }
216
+ async function requireRead(title) {
217
+ const bot = getBot();
218
+ const pages = await promisifyBotMethod(
219
+ bot,
220
+ "getArticleInfo",
221
+ title,
222
+ { prop: "info" }
223
+ );
224
+ if (!Array.isArray(pages) || pages.length === 0) {
225
+ return 0;
226
+ }
227
+ const page = pages[0];
228
+ if (!page || page.missing) {
229
+ return 0;
230
+ }
231
+ if (page.pageid != null && page.lastrevid != null) {
232
+ if (!isRead(page.pageid)) {
233
+ throw new Error(
234
+ `Page "${title}" (pageid ${page.pageid}) has NOT been read. You MUST call get-article first to fetch the current page content before editing. This is a safety requirement to prevent accidental data loss.`
235
+ );
236
+ }
237
+ }
238
+ return page.pageid ?? 0;
239
+ }
240
+
241
+ // src/tools/ro/get-article.ts
242
+ async function recordReadState(title) {
243
+ try {
244
+ const bot = await getBot();
245
+ const pages = await promisifyBotMethod(
246
+ bot,
247
+ "getArticleInfo",
248
+ title,
249
+ { prop: "info" }
250
+ );
251
+ const page = Array.isArray(pages) ? pages[0] : null;
252
+ if (page?.pageid != null && page?.lastrevid != null) {
253
+ markAsRead(page.pageid, page.lastrevid);
254
+ }
255
+ } catch {
256
+ }
257
+ }
258
+ function getArticleTool(server) {
259
+ return server.tool(
260
+ "get-article",
261
+ "Retrieve the content of a wiki article",
262
+ {
263
+ title: z.string().describe("Article title"),
264
+ followRedirect: z.boolean().optional().default(true).describe("Follow redirects"),
265
+ redirectInfo: z.boolean().optional().default(false).describe("Include information about redirects")
266
+ },
267
+ {
268
+ title: "Get article",
269
+ readOnlyHint: true,
270
+ destructiveHint: false
271
+ },
272
+ async ({ title, followRedirect, redirectInfo }) => handleGetArticleTool(title, followRedirect, redirectInfo)
273
+ );
274
+ }
275
+ async function handleGetArticleTool(title, followRedirect, redirectInfo) {
276
+ try {
277
+ const bot = await getBot();
278
+ if (redirectInfo) {
279
+ const result = await new Promise((resolve, reject) => {
280
+ const callback = (err, content2, redirectInfo2) => {
281
+ if (err) {
282
+ reject(err);
283
+ } else {
284
+ resolve([content2, redirectInfo2]);
285
+ }
286
+ };
287
+ bot.getArticle(title, followRedirect, callback);
288
+ });
289
+ const [content, redirect] = result;
290
+ if (content == null) {
291
+ return {
292
+ content: [{ type: "text", text: `Page "${title}" not found or has no content.` }],
293
+ isError: true
294
+ };
295
+ }
296
+ const responseText = redirect ? `Content:
297
+
298
+ ${content}
32
299
 
33
300
  Redirect Information:
34
301
 
35
- ${JSON.stringify(m,null,2)}`:c}]}}else{let a=await s(r,"getArticle",e,t);return a==null?{content:[{type:"text",text:`Page "${e}" not found or has no content.`}],isError:!0}:{content:[{type:"text",text:a}]}}}catch(r){return{content:[{type:"text",text:`Error: ${r.message}`}],isError:!0}}}import{z as T}from"zod";function l(e){return{content:[{type:"text",text:JSON.stringify(e,null,2)}],structuredContent:e}}function i(e,t){return{content:[{type:"text",text:JSON.stringify({error:e,details:t?.message},null,2)}],isError:!0}}function lt(e){let t=e.tool("search","Search for wiki pages by keyword",{keyword:T.string().describe("Search keyword"),limit:T.number().optional().default(10).describe("Maximum number of results")},{title:"Search",readOnlyHint:!0,destructiveHint:!1},async({keyword:o,limit:r})=>ce(o,r));return t.update({outputSchema:{total:T.number(),limit:T.number(),keyword:T.string(),results:T.array(T.record(T.unknown()))}}),t}async function ce(e,t){try{let o=await n(),r=await s(o,"search",e),a=r.slice(0,t);return l({total:r.length,limit:t,keyword:e,results:a})}catch(o){return i("Failed to search",o)}}import{z as C}from"zod";function at(e){let t=e.tool("get-pages-in-category","Get all pages in a category",{category:C.string().describe("Category name (with or without Category: prefix)")},{title:"Get pages in category",readOnlyHint:!0,destructiveHint:!1},async({category:o})=>me(o));return t.update({outputSchema:{category:C.string(),pages:C.array(C.record(C.unknown())),count:C.number()}}),t}async function me(e){try{let t=await n(),o=e.replace(/^Category:/i,""),r=await s(t,"getPagesInCategory",o);return l({category:o,pages:r,count:r.length})}catch(t){return i("Failed to get pages in category",t)}}import{z as P}from"zod";function ct(e){let t=e.tool("get-categories","Get all categories matching a prefix",{prefix:P.string().optional().default("").describe("Prefix to filter categories")},{title:"Get categories",readOnlyHint:!0,destructiveHint:!1},async({prefix:o})=>pe(o));return t.update({outputSchema:{prefix:P.string(),categories:P.array(P.record(P.unknown())),count:P.number()}}),t}async function pe(e){try{let t=await n(),o=await s(t,"getCategories",e);return l({prefix:e,categories:o,count:o.length})}catch(t){return i("Failed to get categories",t)}}import{z as h}from"zod";function mt(e){let t=e.tool("get-users","Get all users matching a prefix",{prefix:h.string().optional().default("").describe("Prefix to filter usernames"),onlyWithEdits:h.boolean().optional().default(!1).describe("Only include users with at least one edit")},{title:"Get users",readOnlyHint:!0,destructiveHint:!1},async({prefix:o,onlyWithEdits:r})=>ue(o,r));return t.update({outputSchema:{prefix:h.string(),onlyWithEdits:h.boolean(),users:h.array(h.record(h.unknown())),count:h.number()}}),t}async function ue(e,t){try{let o=await n(),r=await s(o,"getUsers",{prefix:e,witheditsonly:t});return l({prefix:e,onlyWithEdits:t,users:r,count:r.length})}catch(o){return i("Failed to get users",o)}}import{z as A}from"zod";function pt(e){let t=e.tool("get-all-pages","Get all non-redirect pages from the wiki",{limit:A.number().optional().default(500).describe("Maximum number of pages to return")},{title:"Get all pages",readOnlyHint:!0,destructiveHint:!1},async({limit:o})=>de(o));return t.update({outputSchema:{total:A.number(),displayed:A.number(),pages:A.array(A.record(A.unknown())),limit:A.number()}}),t}async function de(e){try{let t=await n(),o=await s(t,"getAllPages"),r=o.slice(0,e);return l({total:o.length,displayed:r.length,pages:r,limit:e})}catch(t){return i("Failed to get all pages",t)}}import{z as j}from"zod";function ut(e){let t=e.tool("get-pages-in-namespace","Get all non-redirect pages in a specific namespace",{namespace:j.number().describe("Namespace number to filter pages")},{title:"Get pages in namespace",readOnlyHint:!0,destructiveHint:!1},async({namespace:o})=>ge(o));return t.update({outputSchema:{namespace:j.number(),pages:j.array(j.record(j.unknown())),count:j.number()}}),t}async function ge(e){try{let t=await n(),o=await s(t,"getPagesInNamespace",e);return l({namespace:e,pages:o,count:o.length})}catch(t){return i("Failed to get pages in namespace",t)}}import{z as B}from"zod";function dt(e){let t=e.tool("get-pages-by-prefix","Get pages starting with a specific prefix",{prefix:B.string().describe("Prefix to match page titles")},{title:"Get pages by prefix",readOnlyHint:!0,destructiveHint:!1},async({prefix:o})=>fe(o));return t.update({outputSchema:{prefix:B.string(),pages:B.array(B.record(B.unknown())),count:B.number()}}),t}async function fe(e){try{let t=await n(),o=await s(t,"getPagesByPrefix",e);return l({prefix:e,pages:o,count:o.length})}catch(t){return i("Failed to get pages by prefix",t)}}import{z as E}from"zod";function gt(e){let t=e.tool("get-pages-transcluding","Get all pages that transclude (include) a specific template",{template:E.string().describe("Template title to find transclusions")},{title:"Get pages transcluding template",readOnlyHint:!0,destructiveHint:!1},async({template:o})=>ye(o));return t.update({outputSchema:{template:E.string(),pages:E.array(E.record(E.unknown())),count:E.number()}}),t}async function ye(e){try{let t=await n(),r=(await s(t,"getPagesTranscluding",e))[1],a=Array.isArray(r)?r.filter(c=>c!=null&&typeof c=="object"&&"title"in c):[];return l({template:e,pages:a,count:a.length})}catch(t){return i("Failed to get pages transcluding template",t)}}import{z as R}from"zod";function ft(e){let t=e.tool("get-article-revisions","Get all revisions of a wiki article",{title:R.union([R.string(),R.number()]).describe("Article title or page ID")},{title:"Get article revisions",readOnlyHint:!0,destructiveHint:!1},async({title:o})=>Te(o));return t.update({outputSchema:{title:R.string(),revisions:R.array(R.record(R.unknown())),count:R.number()}}),t}async function Te(e){try{let t=await n(),r=(await s(t,"getArticleRevisions",e)).flat();return l({title:e,revisions:r,count:r.length})}catch(t){return i("Failed to get article revisions",t)}}import{z as b}from"zod";function yt(e){let t=e.tool("get-article-categories","Get all categories that an article belongs to",{title:b.union([b.string(),b.number()]).describe("Article title or page ID")},{title:"Get article categories",readOnlyHint:!0,destructiveHint:!1},async({title:o})=>he(o));return t.update({outputSchema:{title:b.string(),categories:b.array(b.record(b.unknown())),count:b.number()}}),t}async function he(e){try{let t=await n(),o=await s(t,"getArticleCategories",e);return l({title:e,categories:o,count:o.length})}catch(t){return i("Failed to get article categories",t)}}import{z as W}from"zod";function Tt(e){let t=e.tool("get-article-properties","Get page properties for a wiki article",{title:W.string().describe("Article title")},{title:"Get article properties",readOnlyHint:!0,destructiveHint:!1},async({title:o})=>Re(o));return t.update({outputSchema:{title:W.string(),properties:W.record(W.unknown())}}),t}async function Re(e){try{let t=await n(),o=await s(t,"getArticleProperties",e);return l({title:e,properties:o})}catch(t){return i("Failed to get article properties",t)}}import{z as d}from"zod";function ht(e){let t=e.tool("get-article-info","Get detailed information about one or more articles",{title:d.union([d.string(),d.number(),d.array(d.union([d.string(),d.number()]))]).describe("Article title, page ID, or array of titles/IDs"),properties:d.array(d.string()).optional().describe("Specific properties to retrieve")},{title:"Get article info",readOnlyHint:!0,destructiveHint:!1},async({title:o,properties:r})=>be(o,r));return t.update({outputSchema:{title:d.string(),results:d.array(d.record(d.unknown())),count:d.number()}}),t}async function be(e,t){try{let o=await n(),a=await s(o,"getArticleInfo",e,t?{inprop:t}:{}),c=Array.isArray(a)?a:[a];return l({title:e,results:c,count:c.length})}catch(o){return i("Failed to get article info",o)}}import{z as f}from"zod";function Rt(e){let t=e.tool("get-user-contribs","Get contributions made by a specific user",{username:f.string().describe("Username to get contributions for"),namespace:f.number().optional().describe("Filter contributions by namespace"),limit:f.number().optional().default(50).describe("Maximum number of contributions to return")},{title:"Get user contributions",readOnlyHint:!0,destructiveHint:!1},async({username:o,namespace:r,limit:a})=>xe(o,r,a));return t.update({outputSchema:{username:f.string(),namespace:f.number(),limit:f.number(),total:f.number(),displayed:f.number(),contributions:f.array(f.record(f.unknown()))}}),t}async function xe(e,t,o=50){try{let r=await n(),a={user:e,...t!==void 0&&{namespace:t}},c=await s(r,"getUserContribs",a),m=Array.isArray(c[1])?c[1]:[],p=m.slice(0,o);return l({username:e,namespace:t,limit:o,total:m.length,displayed:p.length,contributions:p})}catch(r){return i("Failed to get user contributions",r)}}function bt(e){let t=e.tool("whoami","Get information about the currently logged in user",{},{title:"Who am I",readOnlyHint:!0,destructiveHint:!1},async()=>ve());return t.update({outputSchema:{}}),t}async function ve(){try{let e=await n(),t=await s(e,"whoami");return l(t)}catch(e){return i("Failed to get current user info",e)}}import{z as we}from"zod";function xt(e){let t=e.tool("whois","Get information about a specific user",{username:we.string().describe("Username to look up")},{title:"Whois",readOnlyHint:!0,destructiveHint:!1},async({username:o})=>ke(o));return t.update({outputSchema:{}}),t}async function ke(e){try{let t=await n(),o=await s(t,"whois",e);return o.missing?i(`User "${e}" not found.`):l(o)}catch(t){return i("Failed to get user info",t)}}import{z as vt}from"zod";function wt(e){let t=e.tool("whoare","Get information about multiple wiki users",{usernames:vt.array(vt.string()).describe("Array of usernames to query")},{title:"Who are",readOnlyHint:!0,destructiveHint:!1},async o=>Se(o));return t.update({outputSchema:{}}),t}async function Se(e){try{let t=await n(),o=await s(t,"whoare",e.usernames);return l(o)}catch(t){return i("Failed to get user information",t)}}import{z as x}from"zod";function kt(e){let t=e.tool("get-images","Get list of images starting from a specific name",{startFrom:x.string().optional().default("").describe("Start from this image name"),limit:x.number().optional().default(50).describe("Maximum number of images to return")},{title:"Get images",readOnlyHint:!0,destructiveHint:!1},async({startFrom:o,limit:r})=>Ae(o,r));return t.update({outputSchema:{total:x.number(),limit:x.number(),startFrom:x.string(),images:x.array(x.record(x.unknown()))}}),t}async function Ae(e,t){try{let o=await n(),r=await new Promise((c,m)=>{o.getImages(e,(p,...u)=>{if(p)m(p);else{let g=u[0];c(Array.isArray(g)?g:[])}})}),a=r.slice(0,t);return l({total:r.length,limit:t,startFrom:e,images:a})}catch(o){return i("Failed to get images",o)}}import{z as v}from"zod";function St(e){let t=e.tool("get-images-from-article","Get all images embedded in a specific article",{title:v.union([v.string(),v.number()]).describe("Article title or page ID")},{title:"Get images from article",readOnlyHint:!0,destructiveHint:!1},async({title:o})=>Me(o));return t.update({outputSchema:{title:v.string(),images:v.array(v.record(v.unknown())),count:v.number()}}),t}async function Me(e){try{let t=await n(),o=await s(t,"getImagesFromArticle",e);return l({title:e,images:o,count:o.length})}catch(t){return i("Failed to get images from article",t)}}import{z as H}from"zod";function At(e){let t=e.tool("get-image-usage","Get all pages that use a specific image",{filename:H.string().describe("Image filename with File: prefix")},{title:"Get image usage",readOnlyHint:!0,destructiveHint:!1},async({filename:o})=>Ce(o));return t.update({outputSchema:{filename:H.string(),pages:H.array(H.record(H.unknown())),count:H.number()}}),t}async function Ce(e){try{let t=await n(),o=await s(t,"getImageUsage",e);return l({filename:e,pages:o,count:o.length})}catch(t){return i("Failed to get image usage",t)}}import{z as $}from"zod";function Mt(e){let t=e.tool("get-image-info","Get detailed information about an image file",{filename:$.string().describe("Image filename with File: prefix")},{title:"Get image info",readOnlyHint:!0,destructiveHint:!1},async({filename:o})=>Pe(o));return t.update({outputSchema:{filename:$.string(),info:$.record($.unknown())}}),t}async function Pe(e){try{let t=await n(),o=await s(t,"getImageInfo",e);return o?l({filename:e,info:o}):i(`Image "${e}" not found.`)}catch(t){return i("Failed to get image info",t)}}import{z as y}from"zod";function Ct(e){let t=e.tool("get-log","Get log entries of a specific type",{type:y.string().describe("Log type (e.g. delete, block, move)"),start:y.string().optional().default("").describe("Start timestamp (YYYYMMDDHHMMSS format)"),limit:y.number().optional().default(50).describe("Maximum number of entries to return")},{title:"Get log entries",readOnlyHint:!0,destructiveHint:!1},async({type:o,start:r,limit:a})=>je(o,r,a));return t.update({outputSchema:{type:y.string(),start:y.string(),limit:y.number(),total:y.number(),displayed:y.number(),entries:y.array(y.record(y.unknown()))}}),t}async function je(e,t,o){try{let r=await n(),a=await new Promise((m,p)=>{r.getLog(e,t,(u,...g)=>{if(u)p(u);else{let M=g[0];m(Array.isArray(M)?M:[])}})}),c=a.slice(0,o);return l({type:e,start:t,limit:o,total:a.length,displayed:c.length,entries:c})}catch(r){return i("Failed to get log entries",r)}}import{z as Pt}from"zod";function jt(e){return e.tool("expand-templates","Expand templates in wikitext",{text:Pt.string().describe("Wikitext with templates to expand"),title:Pt.string().optional().describe("Context page title")},{title:"Expand templates",readOnlyHint:!0,destructiveHint:!1},async({text:t,title:o})=>Be(t,o))}async function Be(e,t){try{let o=await n(),r=await s(o,"expandTemplates",e,t||"");return r==null?{content:[{type:"text",text:`Failed to expand templates for "${e}".`}],isError:!0}:{content:[{type:"text",text:r}]}}catch(o){return{content:[{type:"text",text:`Error: ${o.message}`}],isError:!0}}}import{z as Bt}from"zod";function Et(e){return e.tool("parse","Parse wikitext to HTML",{text:Bt.string().describe("Wikitext to parse"),title:Bt.string().optional().describe("Context page title")},{title:"Parse wikitext",readOnlyHint:!0,destructiveHint:!1},async({text:t,title:o})=>Ee(t,o))}async function Ee(e,t){try{let o=await n(),r=await s(o,"parse",e,t||""),a=r[1]||"",c=Array.isArray(r[2])?r[2]:[];return{content:[{type:"text",text:["Parsed XML structure:","",a,"",`Images found: ${c.length>0?c.join(", "):"none"}`].join(`
36
- `)}]}}catch(o){return{content:[{type:"text",text:`Error: ${o.message}`}],isError:!0}}}import{z as w}from"zod";function Ht(e){let t=e.tool("get-recent-changes","Get recent changes on the wiki",{start:w.string().optional().describe("Start timestamp"),limit:w.number().optional().default(50).describe("Maximum number of changes to return")},{title:"Get recent changes",readOnlyHint:!0,destructiveHint:!1},async({start:o,limit:r})=>He(o,r));return t.update({outputSchema:{total:w.number(),limit:w.number(),start:w.string(),changes:w.array(w.record(w.unknown()))}}),t}async function He(e,t=50){try{let o=await n(),r=await new Promise((c,m)=>{o.getRecentChanges(e,(p,...u)=>{if(p)m(p);else{let g=u[0];c(Array.isArray(g)?g:[])}})}),a=r.slice(0,t);return l({total:r.length,limit:t,start:e,changes:a})}catch(o){return i("Failed to get recent changes",o)}}import{z as It}from"zod";function Gt(e){let t=e.tool("get-site-info","Get site information from MediaWiki",{properties:It.array(It.string()).describe("List of site information properties to retrieve")},{title:"Get site info",readOnlyHint:!0,destructiveHint:!1},async({properties:o})=>Ie(o));return t.update({outputSchema:{}}),t}async function Ie(e){try{let t=await n(),o=await s(t,"getSiteInfo",e);return l(o||{})}catch(t){return i("Failed to get site info",t)}}function Ft(e){let t=e.tool("get-site-stats","Get site statistics",{},{title:"Get site stats",readOnlyHint:!0,destructiveHint:!1},async()=>Ge());return t.update({outputSchema:{}}),t}async function Ge(){try{let e=await n(),t=await s(e,"getSiteStats");return l(t)}catch(e){return i("Failed to get site stats",e)}}function Ot(e){let t=e.tool("get-mediawiki-version","Get MediaWiki version running on the server",{},{title:"Get MediaWiki version",readOnlyHint:!0,destructiveHint:!1},async()=>Fe());return t.update({outputSchema:{version:z.string()}}),t}async function Fe(){try{let e=await n(),t=await s(e,"getMediaWikiVersion");return l({version:t})}catch(e){return i("Failed to get MediaWiki version",e)}}import{z as I}from"zod";function Ut(e){let t=e.tool("get-query-page","Get results from a query page (special page)",{name:I.string().describe("Name of the query page")},{title:"Get query page results",readOnlyHint:!0,destructiveHint:!1},async({name:o})=>Oe(o));return t.update({outputSchema:{name:I.string(),results:I.array(I.record(I.unknown())),count:I.number()}}),t}async function Oe(e){try{let t=await n(),o=await s(t,"getQueryPage",e);return l({name:e,results:o,count:o.length})}catch(t){return i("Failed to get query page results",t)}}import{z as k}from"zod";function zt(e){let t=e.tool("get-external-links","Get all external links from an article",{title:k.union([k.string(),k.number()]).describe("Article title or page ID")},{title:"Get external links",readOnlyHint:!0,destructiveHint:!1},async({title:o})=>Ue(o));return t.update({outputSchema:{title:k.string(),links:k.array(k.record(k.unknown())),count:k.number()}}),t}async function Ue(e){try{let t=await n(),o=await s(t,"getExternalLinks",e);return l({title:e,links:o.map(r=>r["*"]),count:o.length})}catch(t){return i("Failed to get external links",t)}}import{z as G}from"zod";function Wt(e){let t=e.tool("get-backlinks","Get all backlinks to a specific page",{title:G.string().describe("Target page title to find backlinks for")},{title:"Get backlinks",readOnlyHint:!0,destructiveHint:!1},async({title:o})=>ze(o));return t.update({outputSchema:{target:G.string(),backlinks:G.array(G.record(G.unknown())),count:G.number()}}),t}async function ze(e){try{let t=await n(),o=await s(t,"getBacklinks",e);return l({target:e,backlinks:o,count:o.length})}catch(t){return i("Failed to get backlinks",t)}}import{z as N}from"zod";function $t(e){let t=e.tool("edit","Edit a wiki page (requires authentication)",{title:N.string().describe("Page title to edit"),content:N.string().describe("New content for the page"),summary:N.string().describe("Edit summary"),minor:N.boolean().optional().default(!1).describe("Mark as minor edit")},{title:"Edit page",readOnlyHint:!1,destructiveHint:!0},async o=>We(o));return t.update({outputSchema:{}}),t}async function We(e){try{let t=await n(),o=`[nodemw-mcp] ${e.summary}`,r=await s(t,"edit",e.title,e.content,o,e.minor||!1);return l(r)}catch(t){return i("Failed to edit page",t)}}import{z as O}from"zod";function Nt(e){let t=e.tool("append","Append content to a wiki page (requires authentication)",{title:O.string().describe("Page title"),content:O.string().describe("Content to append"),summary:O.string().describe("Edit summary")},{title:"Append to page",readOnlyHint:!1,destructiveHint:!0},async o=>$e(o));return t.update({outputSchema:{success:O.boolean(),title:O.string()}}),t}async function $e(e){try{let t=await n(),o=`[nodemw-mcp] ${e.summary}`;return await s(t,"append",e.title,e.content,o),l({success:!0,title:e.title})}catch(t){return i("Failed to append to page",t)}}import{z as _}from"zod";function Lt(e){let t=e.tool("prepend","Prepend content to a wiki page (requires authentication)",{title:_.string().describe("Page title to prepend to"),content:_.string().describe("Content to prepend"),summary:_.string().describe("Edit summary")},{title:"Prepend to page",readOnlyHint:!1,destructiveHint:!0},async o=>Ne(o));return t.update({outputSchema:{}}),t}async function Ne(e){try{let t=await n(),o=`[nodemw-mcp] ${e.summary}`,r=await s(t,"prepend",e.title,e.content,o);return l(r)}catch(t){return i("Failed to prepend to page",t)}}import{z as Q}from"zod";function Dt(e){let t=e.tool("move","Move (rename) a wiki page (requires authentication)",{from:Q.string().describe("Current page title"),to:Q.string().describe("New page title"),summary:Q.string().describe("Move summary")},{title:"Move page",readOnlyHint:!1,destructiveHint:!0},async o=>Le(o));return t.update({outputSchema:{}}),t}async function Le(e){try{let t=await n(),o=`[nodemw-mcp] ${e.summary}`,r=await s(t,"move",e.from,e.to,o);return l(r)}catch(t){return i("Failed to move page",t)}}import{z as qt}from"zod";function _t(e){let t=e.tool("delete","Delete a wiki page (requires authentication)",{title:qt.string().describe("Page title to delete"),reason:qt.string().describe("Reason for deletion")},{title:"Delete page",readOnlyHint:!1,destructiveHint:!0},async o=>De(o));return t.update({outputSchema:{}}),t}async function De(e){try{let t=await n(),o=`[nodemw-mcp] ${e.reason}`,r=await s(t,"delete",e.title,o);return l(r)}catch(t){return i("Failed to delete page",t)}}import{z as S}from"zod";function Qt(e){let t=e.tool("protect","Protect a wiki page (requires authentication)",{title:S.string().describe("Page title to protect"),protections:S.array(S.object({type:S.string().describe("Action type (e.g., edit, move)"),level:S.string().optional().default("all").describe("Protection level (e.g., sysop, autoconfirmed)"),expiry:S.string().optional().describe("Expiry time (e.g., 1 week, never)")})).describe("Protection settings"),reason:S.string().optional().describe("Reason for protection"),cascade:S.boolean().optional().default(!1).describe("Apply cascade protection")},{title:"Protect page",readOnlyHint:!1,destructiveHint:!0},async o=>qe(o));return t.update({outputSchema:{}}),t}async function qe(e){try{let t=await n(),o={};e.reason&&(o.reason=`[nodemw-mcp] ${e.reason}`),e.cascade&&(o.cascade=e.cascade);let r=await s(t,"protect",e.title,e.protections,o);return l(r)}catch(t){return i("Failed to protect page",t)}}import{z as L}from"zod";function Vt(e){let t=e.tool("purge","Purge cache for wiki pages",{titles:L.union([L.string(),L.array(L.string())]).describe("Page title(s) or category to purge")},{title:"Purge pages",readOnlyHint:!1,destructiveHint:!1},async o=>_e(o));return t.update({outputSchema:{}}),t}async function _e(e){try{let t=await n(),o=await s(t,"purge",e.titles);return l(o)}catch(t){return i("Failed to purge pages",t)}}import{z as V}from"zod";function Yt(e){let t=e.tool("send-email","Send email to a wiki user (requires authentication)",{username:V.string().describe("Username to email"),subject:V.string().describe("Email subject"),text:V.string().describe("Email content")},{title:"Send email",readOnlyHint:!1,destructiveHint:!1},async o=>Qe(o));return t.update({outputSchema:{}}),t}async function Qe(e){try{let t=await n(),o=await s(t,"sendEmail",e.username,e.subject,e.text);return l(o)}catch(t){return i("Failed to send email",t)}}import{z as Y}from"zod";function Jt(e){let t=e.tool("upload","Upload a file to wiki (requires authentication)",{filename:Y.string().describe("Destination filename on wiki"),content:Y.string().describe("File content as base64 string"),comment:Y.string().optional().describe("Upload comment")},{title:"Upload file",readOnlyHint:!1,destructiveHint:!1},async o=>Ve(o));return t.update({outputSchema:{}}),t}async function Ve(e){try{let t=await n(),o=Buffer.from(e.content,"base64"),r=e.comment?`[nodemw-mcp] ${e.comment}`:"[nodemw-mcp] File upload",a=await s(t,"upload",e.filename,o,r);return l(a)}catch(t){return i("Failed to upload file",t)}}import{z as J}from"zod";function Xt(e){let t=e.tool("upload-by-url","Upload a file to wiki from URL (requires authentication)",{filename:J.string().describe("Destination filename on wiki"),url:J.string().url().describe("Source URL to download file from"),summary:J.string().optional().describe("Upload summary")},{title:"Upload file by URL",readOnlyHint:!1,destructiveHint:!1},async o=>Ye(o));return t.update({outputSchema:{}}),t}async function Ye(e){try{let t=await n(),o=e.summary?`[nodemw-mcp] ${e.summary}`:"[nodemw-mcp] File upload from URL",r=await s(t,"uploadByUrl",e.filename,e.url,o);return l(r)}catch(t){return i("Failed to upload file by URL",t)}}import{z as X}from"zod";function Kt(e){let t=e.tool("add-flow-topic","Add a new Flow topic to a wiki page (requires authentication)",{title:X.string().describe("Page title to add topic to"),subject:X.string().describe("Topic subject"),content:X.string().describe("Topic content in wikitext")},{title:"Add Flow topic",readOnlyHint:!1,destructiveHint:!1},async o=>Je(o));return t.update({outputSchema:{}}),t}async function Je(e){try{let t=await n(),o=await s(t,"addFlowTopic",e.title,e.subject,e.content);return l(o)}catch(t){return i("Failed to add Flow topic",t)}}import{z as Zt}from"zod";function te(e){let t=e.tool("create-account","Create a new MediaWiki user account (requires authentication)",{username:Zt.string().describe("New account username"),password:Zt.string().describe("New account password")},{title:"Create user account",readOnlyHint:!1,destructiveHint:!1},async o=>Xe(o));return t.update({outputSchema:{}}),t}async function Xe(e){try{let t=await n(),o=await s(t,"createAccount",e.username,e.password);return l(o)}catch(t){return i("Failed to create account",t)}}var ee=[it,lt,at,ct,mt,pt,ut,dt,gt,ft,yt,Tt,ht,Rt,bt,xt,wt,kt,St,At,Mt,Ct,jt,Et,Ht,Gt,Ft,Ot,Ut,zt,Wt],Ke=[$t,Nt,Lt,Dt,_t,Qt,Vt,Yt,Jt,Xt,Kt,te];function oe(e,t=!0){let o=t?[...ee,...Ke]:ee,r=[];for(let a of o)try{r.push(a(e))}catch(c){console.error(`Error registering tool: ${c.message}`)}return r}function eo(){let{values:e,positionals:t}=to({options:{server:{type:"string",short:"s"},path:{type:"string"},endpoint:{type:"string"},user:{type:"string",short:"u"},pass:{type:"string",short:"p"},token:{type:"string"},"dry-run":{type:"boolean"}},strict:!1,allowPositionals:!0}),o=e.server??t[0]??process.env.NODEMW_MCP_SERVER;o||(console.error("Error: target server is required (-s, positional arg, or NODEMW_MCP_SERVER env)"),process.exit(1));let r,a,c;try{if(o.startsWith("http://")||o.startsWith("https://")){let u=new URL(o);r=u.hostname,a=u.protocol.replace(":",""),u.port&&(c=parseInt(u.port,10))}else r=o}catch{r=o}let m=process.env.NODEMW_MCP_ENDPOINT_PATH,p=!!(e.path??e.endpoint??m);return{config:{server:r,protocol:a,port:c,path:e.path??e.endpoint??m??"/w",username:e.user??process.env.NODEMW_MCP_MW_USER,password:e.pass??process.env.NODEMW_MCP_MW_PASS,token:e.token,dryRun:e["dry-run"]},pathExplicit:p}}async function oo(){let{config:e,pathExplicit:t}=eo();if(!t)try{e.path=await rt(e),console.error(`Auto-detected API path: ${e.path}`)}catch(p){console.error("Error:",p.message),process.exit(1)}et(e);try{await nt(e)}catch(p){console.error("Error:",p.message),process.exit(1)}let o=n(),r;try{let u=(await s(o,"getSiteInfo",["general"]))?.general;u&&(r={sitename:u.sitename||"Unknown",base:u.base||"",generator:u.generator||"MediaWiki"})}catch{console.error("Warning: Could not fetch site info for server description.")}let a=st(),c=Z(r,a);oe(c,a);let m=new Ze;await c.connect(m)}oo().catch(console.error);
37
- //# sourceMappingURL=index.js.map
302
+ ${JSON.stringify(redirect, null, 2)}` : content;
303
+ await recordReadState(title);
304
+ return {
305
+ content: [{ type: "text", text: responseText }]
306
+ };
307
+ } else {
308
+ const result = await promisifyBotMethod(
309
+ bot,
310
+ "getArticle",
311
+ title,
312
+ followRedirect
313
+ );
314
+ if (result == null) {
315
+ return {
316
+ content: [{ type: "text", text: `Page "${title}" not found or has no content.` }],
317
+ isError: true
318
+ };
319
+ }
320
+ await recordReadState(title);
321
+ return {
322
+ content: [{ type: "text", text: result }]
323
+ };
324
+ }
325
+ } catch (error) {
326
+ return {
327
+ content: [{ type: "text", text: `Error: ${error.message}` }],
328
+ isError: true
329
+ };
330
+ }
331
+ }
332
+
333
+ // src/tools/ro/search.ts
334
+ import { z as z2 } from "zod";
335
+
336
+ // src/common/utils.ts
337
+ function jsonResult(data) {
338
+ return {
339
+ content: [{
340
+ type: "text",
341
+ text: JSON.stringify(data, null, 2)
342
+ }],
343
+ structuredContent: data
344
+ };
345
+ }
346
+ function errorResult(message, error) {
347
+ return {
348
+ content: [{
349
+ type: "text",
350
+ text: JSON.stringify({
351
+ error: message,
352
+ details: error?.message
353
+ }, null, 2)
354
+ }],
355
+ isError: true
356
+ };
357
+ }
358
+
359
+ // src/tools/ro/search.ts
360
+ function searchTool(server) {
361
+ const tool = server.tool(
362
+ "search",
363
+ "Search for wiki pages by keyword",
364
+ {
365
+ keyword: z2.string().describe("Search keyword"),
366
+ limit: z2.number().optional().default(10).describe("Maximum number of results")
367
+ },
368
+ {
369
+ title: "Search",
370
+ readOnlyHint: true,
371
+ destructiveHint: false
372
+ },
373
+ async ({ keyword, limit }) => handleSearchTool(keyword, limit)
374
+ );
375
+ tool.update({ outputSchema: { total: z2.number(), limit: z2.number(), keyword: z2.string(), results: z2.array(z2.record(z2.unknown())) } });
376
+ return tool;
377
+ }
378
+ async function handleSearchTool(keyword, limit) {
379
+ try {
380
+ const bot = await getBot();
381
+ const results = await promisifyBotMethod(
382
+ bot,
383
+ "search",
384
+ keyword
385
+ );
386
+ const limitedResults = results.slice(0, limit);
387
+ return jsonResult({
388
+ total: results.length,
389
+ limit,
390
+ keyword,
391
+ results: limitedResults
392
+ });
393
+ } catch (error) {
394
+ return errorResult("Failed to search", error);
395
+ }
396
+ }
397
+
398
+ // src/tools/ro/get-pages-in-category.ts
399
+ import { z as z3 } from "zod";
400
+ function getPagesInCategoryTool(server) {
401
+ const tool = server.tool(
402
+ "get-pages-in-category",
403
+ "Get all pages in a category",
404
+ {
405
+ category: z3.string().describe("Category name (with or without Category: prefix)")
406
+ },
407
+ {
408
+ title: "Get pages in category",
409
+ readOnlyHint: true,
410
+ destructiveHint: false
411
+ },
412
+ async ({ category }) => handleGetPagesInCategoryTool(category)
413
+ );
414
+ tool.update({ outputSchema: { category: z3.string(), pages: z3.array(z3.record(z3.unknown())), count: z3.number() } });
415
+ return tool;
416
+ }
417
+ async function handleGetPagesInCategoryTool(category) {
418
+ try {
419
+ const bot = await getBot();
420
+ const cleanCategory = category.replace(/^Category:/i, "");
421
+ const results = await promisifyBotMethod(
422
+ bot,
423
+ "getPagesInCategory",
424
+ cleanCategory
425
+ );
426
+ return jsonResult({
427
+ category: cleanCategory,
428
+ pages: results,
429
+ count: results.length
430
+ });
431
+ } catch (error) {
432
+ return errorResult("Failed to get pages in category", error);
433
+ }
434
+ }
435
+
436
+ // src/tools/ro/get-categories.ts
437
+ import { z as z4 } from "zod";
438
+ function getCategoriesTool(server) {
439
+ const tool = server.tool(
440
+ "get-categories",
441
+ "Get all categories matching a prefix",
442
+ {
443
+ prefix: z4.string().optional().default("").describe("Prefix to filter categories")
444
+ },
445
+ {
446
+ title: "Get categories",
447
+ readOnlyHint: true,
448
+ destructiveHint: false
449
+ },
450
+ async ({ prefix }) => handleGetCategoriesTool(prefix)
451
+ );
452
+ tool.update({ outputSchema: { prefix: z4.string(), categories: z4.array(z4.record(z4.unknown())), count: z4.number() } });
453
+ return tool;
454
+ }
455
+ async function handleGetCategoriesTool(prefix) {
456
+ try {
457
+ const bot = await getBot();
458
+ const results = await promisifyBotMethod(
459
+ bot,
460
+ "getCategories",
461
+ prefix
462
+ );
463
+ return jsonResult({
464
+ prefix,
465
+ categories: results,
466
+ count: results.length
467
+ });
468
+ } catch (error) {
469
+ return errorResult("Failed to get categories", error);
470
+ }
471
+ }
472
+
473
+ // src/tools/ro/get-users.ts
474
+ import { z as z5 } from "zod";
475
+ function getUsersTool(server) {
476
+ const tool = server.tool(
477
+ "get-users",
478
+ "Get all users matching a prefix",
479
+ {
480
+ prefix: z5.string().optional().default("").describe("Prefix to filter usernames"),
481
+ onlyWithEdits: z5.boolean().optional().default(false).describe("Only include users with at least one edit")
482
+ },
483
+ {
484
+ title: "Get users",
485
+ readOnlyHint: true,
486
+ destructiveHint: false
487
+ },
488
+ async ({ prefix, onlyWithEdits }) => handleGetUsersTool(prefix, onlyWithEdits)
489
+ );
490
+ tool.update({ outputSchema: { prefix: z5.string(), onlyWithEdits: z5.boolean(), users: z5.array(z5.record(z5.unknown())), count: z5.number() } });
491
+ return tool;
492
+ }
493
+ async function handleGetUsersTool(prefix, onlyWithEdits) {
494
+ try {
495
+ const bot = await getBot();
496
+ const results = await promisifyBotMethod(
497
+ bot,
498
+ "getUsers",
499
+ { prefix, witheditsonly: onlyWithEdits }
500
+ );
501
+ return jsonResult({
502
+ prefix,
503
+ onlyWithEdits,
504
+ users: results,
505
+ count: results.length
506
+ });
507
+ } catch (error) {
508
+ return errorResult("Failed to get users", error);
509
+ }
510
+ }
511
+
512
+ // src/tools/ro/get-all-pages.ts
513
+ import { z as z6 } from "zod";
514
+ function getAllPagesTool(server) {
515
+ const tool = server.tool(
516
+ "get-all-pages",
517
+ "Get all non-redirect pages from the wiki",
518
+ {
519
+ limit: z6.number().optional().default(500).describe("Maximum number of pages to return")
520
+ },
521
+ {
522
+ title: "Get all pages",
523
+ readOnlyHint: true,
524
+ destructiveHint: false
525
+ },
526
+ async ({ limit }) => handleGetAllPagesTool(limit)
527
+ );
528
+ tool.update({ outputSchema: { total: z6.number(), displayed: z6.number(), pages: z6.array(z6.record(z6.unknown())), limit: z6.number() } });
529
+ return tool;
530
+ }
531
+ async function handleGetAllPagesTool(limit) {
532
+ try {
533
+ const bot = await getBot();
534
+ const allResults = await promisifyBotMethod(
535
+ bot,
536
+ "getAllPages"
537
+ );
538
+ const results = allResults.slice(0, limit);
539
+ return jsonResult({
540
+ total: allResults.length,
541
+ displayed: results.length,
542
+ pages: results,
543
+ limit
544
+ });
545
+ } catch (error) {
546
+ return errorResult("Failed to get all pages", error);
547
+ }
548
+ }
549
+
550
+ // src/tools/ro/get-pages-in-namespace.ts
551
+ import { z as z7 } from "zod";
552
+ function getPagesInNamespaceTool(server) {
553
+ const tool = server.tool(
554
+ "get-pages-in-namespace",
555
+ "Get all non-redirect pages in a specific namespace",
556
+ {
557
+ namespace: z7.number().describe("Namespace number to filter pages")
558
+ },
559
+ {
560
+ title: "Get pages in namespace",
561
+ readOnlyHint: true,
562
+ destructiveHint: false
563
+ },
564
+ async ({ namespace }) => handleGetPagesInNamespaceTool(namespace)
565
+ );
566
+ tool.update({ outputSchema: { namespace: z7.number(), pages: z7.array(z7.record(z7.unknown())), count: z7.number() } });
567
+ return tool;
568
+ }
569
+ async function handleGetPagesInNamespaceTool(namespace) {
570
+ try {
571
+ const bot = await getBot();
572
+ const results = await promisifyBotMethod(
573
+ bot,
574
+ "getPagesInNamespace",
575
+ namespace
576
+ );
577
+ return jsonResult({
578
+ namespace,
579
+ pages: results,
580
+ count: results.length
581
+ });
582
+ } catch (error) {
583
+ return errorResult("Failed to get pages in namespace", error);
584
+ }
585
+ }
586
+
587
+ // src/tools/ro/get-pages-by-prefix.ts
588
+ import { z as z8 } from "zod";
589
+ function getPagesByPrefixTool(server) {
590
+ const tool = server.tool(
591
+ "get-pages-by-prefix",
592
+ "Get pages starting with a specific prefix",
593
+ {
594
+ prefix: z8.string().describe("Prefix to match page titles")
595
+ },
596
+ {
597
+ title: "Get pages by prefix",
598
+ readOnlyHint: true,
599
+ destructiveHint: false
600
+ },
601
+ async ({ prefix }) => handleGetPagesByPrefixTool(prefix)
602
+ );
603
+ tool.update({ outputSchema: { prefix: z8.string(), pages: z8.array(z8.record(z8.unknown())), count: z8.number() } });
604
+ return tool;
605
+ }
606
+ async function handleGetPagesByPrefixTool(prefix) {
607
+ try {
608
+ const bot = await getBot();
609
+ const results = await promisifyBotMethod(
610
+ bot,
611
+ "getPagesByPrefix",
612
+ prefix
613
+ );
614
+ return jsonResult({
615
+ prefix,
616
+ pages: results,
617
+ count: results.length
618
+ });
619
+ } catch (error) {
620
+ return errorResult("Failed to get pages by prefix", error);
621
+ }
622
+ }
623
+
624
+ // src/tools/ro/get-pages-transcluding.ts
625
+ import { z as z9 } from "zod";
626
+ function getPagesTranscludingTool(server) {
627
+ const tool = server.tool(
628
+ "get-pages-transcluding",
629
+ "Get all pages that transclude (include) a specific template",
630
+ {
631
+ template: z9.string().describe("Template title to find transclusions")
632
+ },
633
+ {
634
+ title: "Get pages transcluding template",
635
+ readOnlyHint: true,
636
+ destructiveHint: false
637
+ },
638
+ async ({ template }) => handleGetPagesTranscludingTool(template)
639
+ );
640
+ tool.update({ outputSchema: { template: z9.string(), pages: z9.array(z9.record(z9.unknown())), count: z9.number() } });
641
+ return tool;
642
+ }
643
+ async function handleGetPagesTranscludingTool(template) {
644
+ try {
645
+ const bot = await getBot();
646
+ const callbackArgs = await promisifyBotMethod(
647
+ bot,
648
+ "getPagesTranscluding",
649
+ template
650
+ );
651
+ const rawResults = callbackArgs[1];
652
+ const results = Array.isArray(rawResults) ? rawResults.filter((page) => page != null && typeof page === "object" && "title" in page) : [];
653
+ return jsonResult({
654
+ template,
655
+ pages: results,
656
+ count: results.length
657
+ });
658
+ } catch (error) {
659
+ return errorResult("Failed to get pages transcluding template", error);
660
+ }
661
+ }
662
+
663
+ // src/tools/ro/get-article-revisions.ts
664
+ import { z as z10 } from "zod";
665
+ function getArticleRevisionsTool(server) {
666
+ const tool = server.tool(
667
+ "get-article-revisions",
668
+ "Get all revisions of a wiki article",
669
+ {
670
+ title: z10.union([z10.string(), z10.number()]).describe("Article title or page ID")
671
+ },
672
+ {
673
+ title: "Get article revisions",
674
+ readOnlyHint: true,
675
+ destructiveHint: false
676
+ },
677
+ async ({ title }) => handleGetArticleRevisionsTool(title)
678
+ );
679
+ tool.update({ outputSchema: { title: z10.string(), revisions: z10.array(z10.record(z10.unknown())), count: z10.number() } });
680
+ return tool;
681
+ }
682
+ async function handleGetArticleRevisionsTool(title) {
683
+ try {
684
+ const bot = await getBot();
685
+ const allRevisions = await promisifyBotMethod(
686
+ bot,
687
+ "getArticleRevisions",
688
+ title
689
+ );
690
+ const revisions = allRevisions.flat();
691
+ return jsonResult({
692
+ title,
693
+ revisions,
694
+ count: revisions.length
695
+ });
696
+ } catch (error) {
697
+ return errorResult("Failed to get article revisions", error);
698
+ }
699
+ }
700
+
701
+ // src/tools/ro/get-article-categories.ts
702
+ import { z as z11 } from "zod";
703
+ function getArticleCategoriesTool(server) {
704
+ const tool = server.tool(
705
+ "get-article-categories",
706
+ "Get all categories that an article belongs to",
707
+ {
708
+ title: z11.union([z11.string(), z11.number()]).describe("Article title or page ID")
709
+ },
710
+ {
711
+ title: "Get article categories",
712
+ readOnlyHint: true,
713
+ destructiveHint: false
714
+ },
715
+ async ({ title }) => handleGetArticleCategoriesTool(title)
716
+ );
717
+ tool.update({ outputSchema: { title: z11.string(), categories: z11.array(z11.string()), count: z11.number() } });
718
+ return tool;
719
+ }
720
+ async function handleGetArticleCategoriesTool(title) {
721
+ try {
722
+ const bot = await getBot();
723
+ const categories = await promisifyBotMethod(
724
+ bot,
725
+ "getArticleCategories",
726
+ title
727
+ );
728
+ return jsonResult({
729
+ title,
730
+ categories,
731
+ count: categories.length
732
+ });
733
+ } catch (error) {
734
+ return errorResult("Failed to get article categories", error);
735
+ }
736
+ }
737
+
738
+ // src/tools/ro/get-article-properties.ts
739
+ import { z as z12 } from "zod";
740
+ function getArticlePropertiesTool(server) {
741
+ const tool = server.tool(
742
+ "get-article-properties",
743
+ "Get page properties for a wiki article",
744
+ {
745
+ title: z12.string().describe("Article title")
746
+ },
747
+ {
748
+ title: "Get article properties",
749
+ readOnlyHint: true,
750
+ destructiveHint: false
751
+ },
752
+ async ({ title }) => handleGetArticlePropertiesTool(title)
753
+ );
754
+ tool.update({ outputSchema: { title: z12.string(), properties: z12.record(z12.unknown()) } });
755
+ return tool;
756
+ }
757
+ async function handleGetArticlePropertiesTool(title) {
758
+ try {
759
+ const bot = await getBot();
760
+ const properties = await promisifyBotMethod(
761
+ bot,
762
+ "getArticleProperties",
763
+ title
764
+ );
765
+ return jsonResult({
766
+ title,
767
+ properties
768
+ });
769
+ } catch (error) {
770
+ return errorResult("Failed to get article properties", error);
771
+ }
772
+ }
773
+
774
+ // src/tools/ro/get-article-info.ts
775
+ import { z as z13 } from "zod";
776
+ function getArticleInfoTool(server) {
777
+ const tool = server.tool(
778
+ "get-article-info",
779
+ "Get detailed information about one or more articles",
780
+ {
781
+ title: z13.union([
782
+ z13.string(),
783
+ z13.number(),
784
+ z13.array(z13.union([z13.string(), z13.number()]))
785
+ ]).describe("Article title, page ID, or array of titles/IDs"),
786
+ properties: z13.array(z13.string()).optional().describe("Specific properties to retrieve")
787
+ },
788
+ {
789
+ title: "Get article info",
790
+ readOnlyHint: true,
791
+ destructiveHint: false
792
+ },
793
+ async ({ title, properties }) => handleGetArticleInfoTool(title, properties)
794
+ );
795
+ tool.update({ outputSchema: { title: z13.string(), results: z13.array(z13.record(z13.unknown())), count: z13.number() } });
796
+ return tool;
797
+ }
798
+ async function handleGetArticleInfoTool(title, properties) {
799
+ try {
800
+ const bot = await getBot();
801
+ const options = properties ? { inprop: properties } : {};
802
+ const info = await promisifyBotMethod(
803
+ bot,
804
+ "getArticleInfo",
805
+ title,
806
+ options
807
+ );
808
+ const results = Array.isArray(info) ? info : [info];
809
+ return jsonResult({
810
+ title,
811
+ results,
812
+ count: results.length
813
+ });
814
+ } catch (error) {
815
+ return errorResult("Failed to get article info", error);
816
+ }
817
+ }
818
+
819
+ // src/tools/ro/get-user-contribs.ts
820
+ import { z as z14 } from "zod";
821
+ function getUserContribsTool(server) {
822
+ const tool = server.tool(
823
+ "get-user-contribs",
824
+ "Get contributions made by a specific user",
825
+ {
826
+ username: z14.string().describe("Username to get contributions for"),
827
+ namespace: z14.number().optional().describe("Filter contributions by namespace"),
828
+ limit: z14.number().optional().default(50).describe("Maximum number of contributions to return")
829
+ },
830
+ {
831
+ title: "Get user contributions",
832
+ readOnlyHint: true,
833
+ destructiveHint: false
834
+ },
835
+ async ({ username, namespace, limit }) => handleGetUserContribsTool(username, namespace, limit)
836
+ );
837
+ tool.update({ outputSchema: { username: z14.string(), namespace: z14.number(), limit: z14.number(), total: z14.number(), displayed: z14.number(), contributions: z14.array(z14.record(z14.unknown())) } });
838
+ return tool;
839
+ }
840
+ async function handleGetUserContribsTool(username, namespace, limit = 50) {
841
+ try {
842
+ const bot = await getBot();
843
+ const options = {
844
+ user: username,
845
+ ...namespace !== void 0 && { namespace }
846
+ };
847
+ const callbackArgs = await promisifyBotMethod(
848
+ bot,
849
+ "getUserContribs",
850
+ options
851
+ );
852
+ const contribs = Array.isArray(callbackArgs[1]) ? callbackArgs[1] : [];
853
+ const limitedContribs = contribs.slice(0, limit);
854
+ return jsonResult({
855
+ username,
856
+ namespace,
857
+ limit,
858
+ total: contribs.length,
859
+ displayed: limitedContribs.length,
860
+ contributions: limitedContribs
861
+ });
862
+ } catch (error) {
863
+ return errorResult("Failed to get user contributions", error);
864
+ }
865
+ }
866
+
867
+ // src/tools/ro/whoami.ts
868
+ function whoamiTool(server) {
869
+ const tool = server.tool(
870
+ "whoami",
871
+ "Get information about the currently logged in user",
872
+ {},
873
+ {
874
+ title: "Who am I",
875
+ readOnlyHint: true,
876
+ destructiveHint: false
877
+ },
878
+ async () => handleWhoamiTool()
879
+ );
880
+ tool.update({ outputSchema: {} });
881
+ return tool;
882
+ }
883
+ async function handleWhoamiTool() {
884
+ try {
885
+ const bot = await getBot();
886
+ const userInfo = await promisifyBotMethod(
887
+ bot,
888
+ "whoami"
889
+ );
890
+ return jsonResult(userInfo);
891
+ } catch (error) {
892
+ return errorResult("Failed to get current user info", error);
893
+ }
894
+ }
895
+
896
+ // src/tools/ro/whois.ts
897
+ import { z as z15 } from "zod";
898
+ function whoisTool(server) {
899
+ const tool = server.tool(
900
+ "whois",
901
+ "Get information about a specific user",
902
+ {
903
+ username: z15.string().describe("Username to look up")
904
+ },
905
+ {
906
+ title: "Whois",
907
+ readOnlyHint: true,
908
+ destructiveHint: false
909
+ },
910
+ async ({ username }) => handleWhoisTool(username)
911
+ );
912
+ tool.update({ outputSchema: {} });
913
+ return tool;
914
+ }
915
+ async function handleWhoisTool(username) {
916
+ try {
917
+ const bot = await getBot();
918
+ const userInfo = await promisifyBotMethod(
919
+ bot,
920
+ "whois",
921
+ username
922
+ );
923
+ if (userInfo.missing) {
924
+ return errorResult(`User "${username}" not found.`);
925
+ }
926
+ return jsonResult(userInfo);
927
+ } catch (error) {
928
+ return errorResult("Failed to get user info", error);
929
+ }
930
+ }
931
+
932
+ // src/tools/ro/whoare.ts
933
+ import { z as z16 } from "zod";
934
+ function whoareTool(server) {
935
+ const tool = server.tool(
936
+ "whoare",
937
+ "Get information about multiple wiki users",
938
+ {
939
+ usernames: z16.array(z16.string()).describe("Array of usernames to query")
940
+ },
941
+ {
942
+ title: "Who are",
943
+ readOnlyHint: true,
944
+ destructiveHint: false
945
+ },
946
+ async (params) => handleWhoareTool(params)
947
+ );
948
+ tool.update({ outputSchema: {} });
949
+ return tool;
950
+ }
951
+ async function handleWhoareTool(params) {
952
+ try {
953
+ const bot = await getBot();
954
+ const users = await promisifyBotMethod(
955
+ bot,
956
+ "whoare",
957
+ params.usernames
958
+ );
959
+ return jsonResult(users);
960
+ } catch (error) {
961
+ return errorResult("Failed to get user information", error);
962
+ }
963
+ }
964
+
965
+ // src/tools/ro/get-images.ts
966
+ import { z as z17 } from "zod";
967
+ function getImagesTool(server) {
968
+ const tool = server.tool(
969
+ "get-images",
970
+ "Get list of images starting from a specific name",
971
+ {
972
+ startFrom: z17.string().optional().default("").describe("Start from this image name"),
973
+ limit: z17.number().optional().default(50).describe("Maximum number of images to return")
974
+ },
975
+ {
976
+ title: "Get images",
977
+ readOnlyHint: true,
978
+ destructiveHint: false
979
+ },
980
+ async ({ startFrom, limit }) => handleGetImagesTool(startFrom, limit)
981
+ );
982
+ tool.update({ outputSchema: { total: z17.number(), limit: z17.number(), startFrom: z17.string(), images: z17.array(z17.record(z17.unknown())) } });
983
+ return tool;
984
+ }
985
+ async function handleGetImagesTool(startFrom, limit) {
986
+ try {
987
+ const bot = await getBot();
988
+ const images = await new Promise((resolve, reject) => {
989
+ bot.getImages(startFrom, (err, ...args) => {
990
+ if (err) {
991
+ reject(err);
992
+ } else {
993
+ const imgs = args[0];
994
+ resolve(Array.isArray(imgs) ? imgs : []);
995
+ }
996
+ });
997
+ });
998
+ const limitedImages = images.slice(0, limit);
999
+ return jsonResult({
1000
+ total: images.length,
1001
+ limit,
1002
+ startFrom,
1003
+ images: limitedImages
1004
+ });
1005
+ } catch (error) {
1006
+ return errorResult("Failed to get images", error);
1007
+ }
1008
+ }
1009
+
1010
+ // src/tools/ro/get-images-from-article.ts
1011
+ import { z as z18 } from "zod";
1012
+ function getImagesFromArticleTool(server) {
1013
+ const tool = server.tool(
1014
+ "get-images-from-article",
1015
+ "Get all images embedded in a specific article",
1016
+ {
1017
+ title: z18.union([z18.string(), z18.number()]).describe("Article title or page ID")
1018
+ },
1019
+ {
1020
+ title: "Get images from article",
1021
+ readOnlyHint: true,
1022
+ destructiveHint: false
1023
+ },
1024
+ async ({ title }) => handleGetImagesFromArticleTool(title)
1025
+ );
1026
+ tool.update({ outputSchema: { title: z18.string(), images: z18.array(z18.record(z18.unknown())), count: z18.number() } });
1027
+ return tool;
1028
+ }
1029
+ async function handleGetImagesFromArticleTool(title) {
1030
+ try {
1031
+ const bot = await getBot();
1032
+ const images = await promisifyBotMethod(
1033
+ bot,
1034
+ "getImagesFromArticle",
1035
+ title
1036
+ );
1037
+ return jsonResult({
1038
+ title,
1039
+ images,
1040
+ count: images.length
1041
+ });
1042
+ } catch (error) {
1043
+ return errorResult("Failed to get images from article", error);
1044
+ }
1045
+ }
1046
+
1047
+ // src/tools/ro/get-image-usage.ts
1048
+ import { z as z19 } from "zod";
1049
+ function getImageUsageTool(server) {
1050
+ const tool = server.tool(
1051
+ "get-image-usage",
1052
+ "Get all pages that use a specific image",
1053
+ {
1054
+ filename: z19.string().describe("Image filename with File: prefix")
1055
+ },
1056
+ {
1057
+ title: "Get image usage",
1058
+ readOnlyHint: true,
1059
+ destructiveHint: false
1060
+ },
1061
+ async ({ filename }) => handleGetImageUsageTool(filename)
1062
+ );
1063
+ tool.update({ outputSchema: { filename: z19.string(), pages: z19.array(z19.record(z19.unknown())), count: z19.number() } });
1064
+ return tool;
1065
+ }
1066
+ async function handleGetImageUsageTool(filename) {
1067
+ try {
1068
+ const bot = await getBot();
1069
+ const pages = await promisifyBotMethod(
1070
+ bot,
1071
+ "getImageUsage",
1072
+ filename
1073
+ );
1074
+ return jsonResult({
1075
+ filename,
1076
+ pages,
1077
+ count: pages.length
1078
+ });
1079
+ } catch (error) {
1080
+ return errorResult("Failed to get image usage", error);
1081
+ }
1082
+ }
1083
+
1084
+ // src/tools/ro/get-image-info.ts
1085
+ import { z as z20 } from "zod";
1086
+ function getImageInfoTool(server) {
1087
+ const tool = server.tool(
1088
+ "get-image-info",
1089
+ "Get detailed information about an image file",
1090
+ {
1091
+ filename: z20.string().describe("Image filename with File: prefix")
1092
+ },
1093
+ {
1094
+ title: "Get image info",
1095
+ readOnlyHint: true,
1096
+ destructiveHint: false
1097
+ },
1098
+ async ({ filename }) => handleGetImageInfoTool(filename)
1099
+ );
1100
+ tool.update({ outputSchema: { filename: z20.string(), info: z20.record(z20.unknown()) } });
1101
+ return tool;
1102
+ }
1103
+ async function handleGetImageInfoTool(filename) {
1104
+ try {
1105
+ const bot = await getBot();
1106
+ const info = await promisifyBotMethod(
1107
+ bot,
1108
+ "getImageInfo",
1109
+ filename
1110
+ );
1111
+ if (!info) {
1112
+ return errorResult(`Image "${filename}" not found.`);
1113
+ }
1114
+ return jsonResult({
1115
+ filename,
1116
+ info
1117
+ });
1118
+ } catch (error) {
1119
+ return errorResult("Failed to get image info", error);
1120
+ }
1121
+ }
1122
+
1123
+ // src/tools/ro/get-log.ts
1124
+ import { z as z21 } from "zod";
1125
+ function getLogTool(server) {
1126
+ const tool = server.tool(
1127
+ "get-log",
1128
+ "Get log entries of a specific type",
1129
+ {
1130
+ type: z21.string().describe("Log type (e.g. delete, block, move)"),
1131
+ start: z21.string().optional().default("").describe("Start timestamp (YYYYMMDDHHMMSS format)"),
1132
+ limit: z21.number().optional().default(50).describe("Maximum number of entries to return")
1133
+ },
1134
+ {
1135
+ title: "Get log entries",
1136
+ readOnlyHint: true,
1137
+ destructiveHint: false
1138
+ },
1139
+ async ({ type, start, limit }) => handleGetLogTool(type, start, limit)
1140
+ );
1141
+ tool.update({ outputSchema: { type: z21.string(), start: z21.string(), limit: z21.number(), total: z21.number(), displayed: z21.number(), entries: z21.array(z21.record(z21.unknown())) } });
1142
+ return tool;
1143
+ }
1144
+ async function handleGetLogTool(type, start, limit) {
1145
+ try {
1146
+ const bot = await getBot();
1147
+ const entries = await new Promise((resolve, reject) => {
1148
+ bot.getLog(type, start, (err, ...args) => {
1149
+ if (err) {
1150
+ reject(err);
1151
+ } else {
1152
+ const ents = args[0];
1153
+ resolve(Array.isArray(ents) ? ents : []);
1154
+ }
1155
+ });
1156
+ });
1157
+ const limitedEntries = entries.slice(0, limit);
1158
+ return jsonResult({
1159
+ type,
1160
+ start,
1161
+ limit,
1162
+ total: entries.length,
1163
+ displayed: limitedEntries.length,
1164
+ entries: limitedEntries
1165
+ });
1166
+ } catch (error) {
1167
+ return errorResult("Failed to get log entries", error);
1168
+ }
1169
+ }
1170
+
1171
+ // src/tools/ro/expand-templates.ts
1172
+ import { z as z22 } from "zod";
1173
+ function expandTemplatesTool(server) {
1174
+ return server.tool(
1175
+ "expand-templates",
1176
+ "Expand templates in wikitext",
1177
+ {
1178
+ text: z22.string().describe("Wikitext with templates to expand"),
1179
+ title: z22.string().optional().describe("Context page title")
1180
+ },
1181
+ {
1182
+ title: "Expand templates",
1183
+ readOnlyHint: true,
1184
+ destructiveHint: false
1185
+ },
1186
+ async ({ text, title }) => handleExpandTemplatesTool(text, title)
1187
+ );
1188
+ }
1189
+ async function handleExpandTemplatesTool(text, title) {
1190
+ try {
1191
+ const bot = await getBot();
1192
+ const expandedXml = await promisifyBotMethod(
1193
+ bot,
1194
+ "expandTemplates",
1195
+ text,
1196
+ title || ""
1197
+ );
1198
+ if (expandedXml == null) {
1199
+ return {
1200
+ content: [{ type: "text", text: `Failed to expand templates for "${text}".` }],
1201
+ isError: true
1202
+ };
1203
+ }
1204
+ return {
1205
+ content: [{ type: "text", text: expandedXml }]
1206
+ };
1207
+ } catch (error) {
1208
+ return {
1209
+ content: [{ type: "text", text: `Error: ${error.message}` }],
1210
+ isError: true
1211
+ };
1212
+ }
1213
+ }
1214
+
1215
+ // src/tools/ro/parse.ts
1216
+ import { z as z23 } from "zod";
1217
+ function parseTool(server) {
1218
+ return server.tool(
1219
+ "parse",
1220
+ "Parse wikitext to HTML",
1221
+ {
1222
+ text: z23.string().describe("Wikitext to parse"),
1223
+ title: z23.string().optional().describe("Context page title")
1224
+ },
1225
+ {
1226
+ title: "Parse wikitext",
1227
+ readOnlyHint: true,
1228
+ destructiveHint: false
1229
+ },
1230
+ async ({ text, title }) => handleParseTool(text, title)
1231
+ );
1232
+ }
1233
+ async function handleParseTool(text, title) {
1234
+ try {
1235
+ const bot = await getBot();
1236
+ const callbackArgs = await promisifyBotMethod(
1237
+ bot,
1238
+ "parse",
1239
+ text,
1240
+ title || ""
1241
+ );
1242
+ const xml = callbackArgs[1] || "";
1243
+ const images = Array.isArray(callbackArgs[2]) ? callbackArgs[2] : [];
1244
+ const output = [
1245
+ "Parsed XML structure:",
1246
+ "",
1247
+ xml,
1248
+ "",
1249
+ `Images found: ${images.length > 0 ? images.join(", ") : "none"}`
1250
+ ].join("\n");
1251
+ return {
1252
+ content: [{ type: "text", text: output }]
1253
+ };
1254
+ } catch (error) {
1255
+ return {
1256
+ content: [{ type: "text", text: `Error: ${error.message}` }],
1257
+ isError: true
1258
+ };
1259
+ }
1260
+ }
1261
+
1262
+ // src/tools/ro/get-recent-changes.ts
1263
+ import { z as z24 } from "zod";
1264
+ function getRecentChangesTool(server) {
1265
+ const tool = server.tool(
1266
+ "get-recent-changes",
1267
+ "Get recent changes on the wiki",
1268
+ {
1269
+ start: z24.string().optional().describe("Start timestamp"),
1270
+ limit: z24.number().optional().default(50).describe("Maximum number of changes to return")
1271
+ },
1272
+ {
1273
+ title: "Get recent changes",
1274
+ readOnlyHint: true,
1275
+ destructiveHint: false
1276
+ },
1277
+ async ({ start, limit }) => handleGetRecentChangesTool(start, limit)
1278
+ );
1279
+ tool.update({ outputSchema: { total: z24.number(), limit: z24.number(), start: z24.string(), changes: z24.array(z24.record(z24.unknown())) } });
1280
+ return tool;
1281
+ }
1282
+ async function handleGetRecentChangesTool(start, limit = 50) {
1283
+ try {
1284
+ const bot = await getBot();
1285
+ const changes = await new Promise((resolve, reject) => {
1286
+ bot.getRecentChanges(start, (err, ...args) => {
1287
+ if (err) {
1288
+ reject(err);
1289
+ } else {
1290
+ const chgs = args[0];
1291
+ resolve(Array.isArray(chgs) ? chgs : []);
1292
+ }
1293
+ });
1294
+ });
1295
+ const limitedChanges = changes.slice(0, limit);
1296
+ return jsonResult({
1297
+ total: changes.length,
1298
+ limit,
1299
+ start,
1300
+ changes: limitedChanges
1301
+ });
1302
+ } catch (error) {
1303
+ return errorResult("Failed to get recent changes", error);
1304
+ }
1305
+ }
1306
+
1307
+ // src/tools/ro/get-site-info.ts
1308
+ import { z as z25 } from "zod";
1309
+ function getSiteInfoTool(server) {
1310
+ const tool = server.tool(
1311
+ "get-site-info",
1312
+ "Get site information from MediaWiki",
1313
+ {
1314
+ properties: z25.array(z25.string()).describe("List of site information properties to retrieve")
1315
+ },
1316
+ {
1317
+ title: "Get site info",
1318
+ readOnlyHint: true,
1319
+ destructiveHint: false
1320
+ },
1321
+ async ({ properties }) => handleGetSiteInfoTool(properties)
1322
+ );
1323
+ tool.update({ outputSchema: {} });
1324
+ return tool;
1325
+ }
1326
+ async function handleGetSiteInfoTool(properties) {
1327
+ try {
1328
+ const bot = await getBot();
1329
+ const info = await promisifyBotMethod(
1330
+ bot,
1331
+ "getSiteInfo",
1332
+ properties
1333
+ );
1334
+ return jsonResult(info || {});
1335
+ } catch (error) {
1336
+ return errorResult("Failed to get site info", error);
1337
+ }
1338
+ }
1339
+
1340
+ // src/tools/ro/get-site-stats.ts
1341
+ function getSiteStatsTool(server) {
1342
+ const tool = server.tool(
1343
+ "get-site-stats",
1344
+ "Get site statistics",
1345
+ {},
1346
+ {
1347
+ title: "Get site stats",
1348
+ readOnlyHint: true,
1349
+ destructiveHint: false
1350
+ },
1351
+ async () => handleGetSiteStatsTool()
1352
+ );
1353
+ tool.update({ outputSchema: {} });
1354
+ return tool;
1355
+ }
1356
+ async function handleGetSiteStatsTool() {
1357
+ try {
1358
+ const bot = await getBot();
1359
+ const stats = await promisifyBotMethod(
1360
+ bot,
1361
+ "getSiteStats"
1362
+ );
1363
+ return jsonResult(stats);
1364
+ } catch (error) {
1365
+ return errorResult("Failed to get site stats", error);
1366
+ }
1367
+ }
1368
+
1369
+ // src/tools/ro/get-mediawiki-version.ts
1370
+ import { z as z26 } from "zod";
1371
+ function getMediaWikiVersionTool(server) {
1372
+ const tool = server.tool(
1373
+ "get-mediawiki-version",
1374
+ "Get MediaWiki version running on the server",
1375
+ {},
1376
+ {
1377
+ title: "Get MediaWiki version",
1378
+ readOnlyHint: true,
1379
+ destructiveHint: false
1380
+ },
1381
+ async () => handleGetMediaWikiVersionTool()
1382
+ );
1383
+ tool.update({ outputSchema: { version: z26.string() } });
1384
+ return tool;
1385
+ }
1386
+ async function handleGetMediaWikiVersionTool() {
1387
+ try {
1388
+ const bot = await getBot();
1389
+ const version = await promisifyBotMethod(
1390
+ bot,
1391
+ "getMediaWikiVersion"
1392
+ );
1393
+ return jsonResult({ version });
1394
+ } catch (error) {
1395
+ return errorResult("Failed to get MediaWiki version", error);
1396
+ }
1397
+ }
1398
+
1399
+ // src/tools/ro/get-query-page.ts
1400
+ import { z as z27 } from "zod";
1401
+ function getQueryPageTool(server) {
1402
+ const tool = server.tool(
1403
+ "get-query-page",
1404
+ "Get results from a query page (special page)",
1405
+ {
1406
+ name: z27.string().describe("Name of the query page")
1407
+ },
1408
+ {
1409
+ title: "Get query page results",
1410
+ readOnlyHint: true,
1411
+ destructiveHint: false
1412
+ },
1413
+ async ({ name }) => handleGetQueryPageTool(name)
1414
+ );
1415
+ tool.update({ outputSchema: { name: z27.string(), results: z27.array(z27.record(z27.unknown())), count: z27.number() } });
1416
+ return tool;
1417
+ }
1418
+ async function handleGetQueryPageTool(name) {
1419
+ try {
1420
+ const bot = await getBot();
1421
+ const results = await promisifyBotMethod(
1422
+ bot,
1423
+ "getQueryPage",
1424
+ name
1425
+ );
1426
+ return jsonResult({
1427
+ name,
1428
+ results,
1429
+ count: results.length
1430
+ });
1431
+ } catch (error) {
1432
+ return errorResult("Failed to get query page results", error);
1433
+ }
1434
+ }
1435
+
1436
+ // src/tools/ro/get-external-links.ts
1437
+ import { z as z28 } from "zod";
1438
+ function getExternalLinksTool(server) {
1439
+ const tool = server.tool(
1440
+ "get-external-links",
1441
+ "Get all external links from an article",
1442
+ {
1443
+ title: z28.union([z28.string(), z28.number()]).describe("Article title or page ID")
1444
+ },
1445
+ {
1446
+ title: "Get external links",
1447
+ readOnlyHint: true,
1448
+ destructiveHint: false
1449
+ },
1450
+ async ({ title }) => handleGetExternalLinksTool(title)
1451
+ );
1452
+ tool.update({ outputSchema: { title: z28.string(), links: z28.array(z28.record(z28.unknown())), count: z28.number() } });
1453
+ return tool;
1454
+ }
1455
+ async function handleGetExternalLinksTool(title) {
1456
+ try {
1457
+ const bot = await getBot();
1458
+ const links = await promisifyBotMethod(
1459
+ bot,
1460
+ "getExternalLinks",
1461
+ title
1462
+ );
1463
+ return jsonResult({
1464
+ title,
1465
+ links: links.map((link) => link["*"]),
1466
+ count: links.length
1467
+ });
1468
+ } catch (error) {
1469
+ return errorResult("Failed to get external links", error);
1470
+ }
1471
+ }
1472
+
1473
+ // src/tools/ro/get-backlinks.ts
1474
+ import { z as z29 } from "zod";
1475
+ function getBacklinksTool(server) {
1476
+ const tool = server.tool(
1477
+ "get-backlinks",
1478
+ "Get all backlinks to a specific page",
1479
+ {
1480
+ title: z29.string().describe("Target page title to find backlinks for")
1481
+ },
1482
+ {
1483
+ title: "Get backlinks",
1484
+ readOnlyHint: true,
1485
+ destructiveHint: false
1486
+ },
1487
+ async ({ title }) => handleGetBacklinksTool(title)
1488
+ );
1489
+ tool.update({ outputSchema: { target: z29.string(), backlinks: z29.array(z29.record(z29.unknown())), count: z29.number() } });
1490
+ return tool;
1491
+ }
1492
+ async function handleGetBacklinksTool(title) {
1493
+ try {
1494
+ const bot = await getBot();
1495
+ const backlinks = await promisifyBotMethod(
1496
+ bot,
1497
+ "getBacklinks",
1498
+ title
1499
+ );
1500
+ return jsonResult({
1501
+ target: title,
1502
+ backlinks,
1503
+ count: backlinks.length
1504
+ });
1505
+ } catch (error) {
1506
+ return errorResult("Failed to get backlinks", error);
1507
+ }
1508
+ }
1509
+
1510
+ // src/tools/editing/edit.ts
1511
+ import { z as z30 } from "zod";
1512
+ function editTool(server) {
1513
+ const tool = server.tool(
1514
+ "edit",
1515
+ "Replace the ENTIRE content of a wiki page (requires authentication). CRITICAL: This is a FULL replacement \u2014 content you provide becomes the complete page, not an addition. There is NO undelete/undo tool \u2014 any damage you cause must be manually reverted by a human. To add a category or make a small change, you MUST first call get-article to retrieve the current content, modify it as needed, then pass the FULL modified content here. For appending or prepending without fetching the full page first, use the append/prepend tools instead.",
1516
+ {
1517
+ title: z30.string().describe("Page title to edit"),
1518
+ content: z30.string().describe("COMPLETE new wikitext for the ENTIRE page \u2014 not a snippet, not a prefix, not an appendage. This replaces everything. Always fetch the current content with get-article first, then modify and resubmit the full text."),
1519
+ intent: z30.enum(["add", "revise", "delete"]).describe(
1520
+ 'Your editing intent: "add" = adding content (page should grow), "revise" = modifying content (small net change, must keep \u22653/4 of existing bytes), "delete" = removing significant content (page should shrink significantly)'
1521
+ ),
1522
+ summary: z30.string().describe("Edit summary describing what was changed and why"),
1523
+ minor: z30.boolean().optional().default(false).describe("Mark as minor edit")
1524
+ },
1525
+ {
1526
+ title: "Edit page",
1527
+ readOnlyHint: false,
1528
+ destructiveHint: true
1529
+ },
1530
+ async (params) => handleEditTool(params)
1531
+ );
1532
+ tool.update({ outputSchema: {} });
1533
+ return tool;
1534
+ }
1535
+ async function handleEditTool(params) {
1536
+ try {
1537
+ const bot = await getBot();
1538
+ await requireRead(params.title);
1539
+ const currentContent = await promisifyBotMethod(bot, "getArticle", params.title, false);
1540
+ if (currentContent != null) {
1541
+ const currentBytes = Buffer.byteLength(currentContent, "utf8");
1542
+ const proposedBytes = Buffer.byteLength(params.content, "utf8");
1543
+ const delta = currentBytes - proposedBytes;
1544
+ if (delta > 200) {
1545
+ switch (params.intent) {
1546
+ case "add":
1547
+ if (proposedBytes < currentBytes) {
1548
+ return errorResult(
1549
+ `Size mismatch: intent is "add" but proposed (${proposedBytes} B) < current (${currentBytes} B). Add operations should not shrink the page. If you meant to remove content, use intent "delete".`
1550
+ );
1551
+ }
1552
+ break;
1553
+ case "revise":
1554
+ if (proposedBytes < currentBytes * 3 / 4) {
1555
+ return errorResult(
1556
+ `Size mismatch: intent is "revise" but proposed (${proposedBytes} B) < 3/4 of current (${currentBytes} B, threshold ${Math.floor(currentBytes * 3 / 4)} B). Revise should keep most content intact. For larger removals, use intent "delete".`
1557
+ );
1558
+ }
1559
+ break;
1560
+ case "delete":
1561
+ if (proposedBytes < currentBytes * 1 / 10) {
1562
+ return errorResult(
1563
+ `Size mismatch: intent is "delete" but proposed (${proposedBytes} B) < 1/10 of current (${currentBytes} B, threshold ${Math.floor(currentBytes / 10)} B). This looks like an accidental page wipe. If intentional, verify the content is correct and retry.`
1564
+ );
1565
+ }
1566
+ break;
1567
+ }
1568
+ }
1569
+ }
1570
+ const prefixedSummary = `[nodemw-mcp.edit] ${params.summary}`;
1571
+ const result = await promisifyBotMethod(
1572
+ bot,
1573
+ "edit",
1574
+ params.title,
1575
+ params.content,
1576
+ prefixedSummary,
1577
+ params.minor || false
1578
+ );
1579
+ return jsonResult(result);
1580
+ } catch (error) {
1581
+ return errorResult("Failed to edit page", error);
1582
+ }
1583
+ }
1584
+
1585
+ // src/tools/editing/append.ts
1586
+ import { z as z31 } from "zod";
1587
+ function appendTool(server) {
1588
+ const tool = server.tool(
1589
+ "append",
1590
+ "Append content to the END of a wiki page without changing existing content (requires authentication). Safe for adding categories, interwiki links, or any content that belongs at the bottom of a page.",
1591
+ {
1592
+ title: z31.string().describe("Page title"),
1593
+ content: z31.string().describe('Content to append to the end of the page (e.g., "\\n[[Category:MyCategory]]")'),
1594
+ summary: z31.string().describe("Edit summary")
1595
+ },
1596
+ {
1597
+ title: "Append to page",
1598
+ readOnlyHint: false,
1599
+ destructiveHint: true
1600
+ },
1601
+ async (params) => handleAppendTool(params)
1602
+ );
1603
+ tool.update({ outputSchema: { success: z31.boolean(), title: z31.string() } });
1604
+ return tool;
1605
+ }
1606
+ async function handleAppendTool(params) {
1607
+ try {
1608
+ const bot = await getBot();
1609
+ await requireRead(params.title);
1610
+ const prefixedSummary = `[nodemw-mcp.append] ${params.summary}`;
1611
+ await promisifyBotMethod(
1612
+ bot,
1613
+ "append",
1614
+ params.title,
1615
+ params.content,
1616
+ prefixedSummary
1617
+ );
1618
+ return jsonResult({ success: true, title: params.title });
1619
+ } catch (error) {
1620
+ return errorResult("Failed to append to page", error);
1621
+ }
1622
+ }
1623
+
1624
+ // src/tools/editing/prepend.ts
1625
+ import { z as z32 } from "zod";
1626
+ function prependTool(server) {
1627
+ const tool = server.tool(
1628
+ "prepend",
1629
+ "Prepend content to the TOP of a wiki page without changing existing content (requires authentication). Useful for adding notices, templates, or cleanup tags that belong at the top of a page.",
1630
+ {
1631
+ title: z32.string().describe("Page title to prepend to"),
1632
+ content: z32.string().describe('Content to prepend to the top of the page (e.g., "{{Cleanup}}\\n")'),
1633
+ summary: z32.string().describe("Edit summary")
1634
+ },
1635
+ {
1636
+ title: "Prepend to page",
1637
+ readOnlyHint: false,
1638
+ destructiveHint: true
1639
+ },
1640
+ async (params) => handlePrependTool(params)
1641
+ );
1642
+ tool.update({ outputSchema: {} });
1643
+ return tool;
1644
+ }
1645
+ async function handlePrependTool(params) {
1646
+ try {
1647
+ const bot = await getBot();
1648
+ await requireRead(params.title);
1649
+ const prefixedSummary = `[nodemw-mcp.prepend] ${params.summary}`;
1650
+ const result = await promisifyBotMethod(
1651
+ bot,
1652
+ "prepend",
1653
+ params.title,
1654
+ params.content,
1655
+ prefixedSummary
1656
+ );
1657
+ return jsonResult(result);
1658
+ } catch (error) {
1659
+ return errorResult("Failed to prepend to page", error);
1660
+ }
1661
+ }
1662
+
1663
+ // src/tools/editing/move.ts
1664
+ import { z as z33 } from "zod";
1665
+ function moveTool(server) {
1666
+ const tool = server.tool(
1667
+ "move",
1668
+ "Move (rename) a wiki page \u2014 changes the page title and creates a redirect from the old name (requires authentication). The old page title becomes a redirect to the new title. All page history moves with the page.",
1669
+ {
1670
+ from: z33.string().describe("Current/existing page title to rename"),
1671
+ to: z33.string().describe("New target page title \u2014 must not already exist (unless moving to overwrite)"),
1672
+ summary: z33.string().describe("Reason for the move (visible in move log)")
1673
+ },
1674
+ {
1675
+ title: "Move page",
1676
+ readOnlyHint: false,
1677
+ destructiveHint: true
1678
+ },
1679
+ async (params) => handleMoveTool(params)
1680
+ );
1681
+ tool.update({ outputSchema: {} });
1682
+ return tool;
1683
+ }
1684
+ async function handleMoveTool(params) {
1685
+ try {
1686
+ const bot = await getBot();
1687
+ await requireRead(params.from);
1688
+ const prefixedSummary = `[nodemw-mcp.move] ${params.summary}`;
1689
+ const result = await promisifyBotMethod(
1690
+ bot,
1691
+ "move",
1692
+ params.from,
1693
+ params.to,
1694
+ prefixedSummary
1695
+ );
1696
+ return jsonResult(result);
1697
+ } catch (error) {
1698
+ return errorResult("Failed to move page", error);
1699
+ }
1700
+ }
1701
+
1702
+ // src/tools/editing/delete.ts
1703
+ import { z as z34 } from "zod";
1704
+ function deleteTool(server) {
1705
+ const tool = server.tool(
1706
+ "delete",
1707
+ "PERMANENTLY delete a wiki page (requires authentication). CRITICAL: This action is IRREVERSIBLE \u2014 there is NO undelete/undo tool available. Any deletion must be manually restored by a human administrator. Only delete a page when the user explicitly asks for it. Always verify the title is correct before proceeding.",
1708
+ {
1709
+ title: z34.string().describe("Exact page title to permanently delete \u2014 double-check this is correct"),
1710
+ reason: z34.string().describe("Detailed reason for deletion (visible in deletion log)")
1711
+ },
1712
+ {
1713
+ title: "Delete page",
1714
+ readOnlyHint: false,
1715
+ destructiveHint: true
1716
+ },
1717
+ async (params) => handleDeleteTool(params)
1718
+ );
1719
+ tool.update({ outputSchema: {} });
1720
+ return tool;
1721
+ }
1722
+ async function handleDeleteTool(params) {
1723
+ try {
1724
+ const bot = await getBot();
1725
+ await requireRead(params.title);
1726
+ const prefixedReason = `[nodemw-mcp.delete] ${params.reason}`;
1727
+ const result = await promisifyBotMethod(
1728
+ bot,
1729
+ "delete",
1730
+ params.title,
1731
+ prefixedReason
1732
+ );
1733
+ return jsonResult(result);
1734
+ } catch (error) {
1735
+ return errorResult("Failed to delete page", error);
1736
+ }
1737
+ }
1738
+
1739
+ // src/tools/editing/protect.ts
1740
+ import { z as z35 } from "zod";
1741
+ function protectTool(server) {
1742
+ const tool = server.tool(
1743
+ "protect",
1744
+ 'Protect or unprotect a wiki page to restrict editing/moving (requires authentication). CRITICAL: Protection can lock out legitimate editors \u2014 only protect pages when there is a clear need (ongoing vandalism, edit war, high-risk template, policy page). To remove protection, set level to "all". Available levels: "all" (anyone), "autoconfirmed" (trusted users), "sysop" (admins only).',
1745
+ {
1746
+ title: z35.string().describe("Page title to protect or unprotect"),
1747
+ protections: z35.array(
1748
+ z35.object({
1749
+ type: z35.enum(["edit", "move"]).describe('Action to restrict: "edit" or "move"'),
1750
+ level: z35.enum(["all", "autoconfirmed", "sysop"]).optional().default("all").describe(
1751
+ 'Who can perform this action: "all" = no restriction, "autoconfirmed" = trusted users only, "sysop" = admins only'
1752
+ ),
1753
+ expiry: z35.string().optional().describe('How long protection lasts (e.g. "1 day", "1 week", "infinite"). Default is indefinite.')
1754
+ })
1755
+ ).describe('Protection rules \u2014 typically one entry for "edit" and optionally one for "move". Example: [{type:"edit",level:"sysop",expiry:"1 week"}]'),
1756
+ reason: z35.string().optional().describe("Reason for changing protection, visible in the page log"),
1757
+ cascade: z35.boolean().optional().default(false).describe("If true, transcluded templates/pages inherit this protection. Only works with full sysop protection on edit. Use with caution.")
1758
+ },
1759
+ {
1760
+ title: "Protect page",
1761
+ readOnlyHint: false,
1762
+ destructiveHint: true
1763
+ },
1764
+ async (params) => handleProtectTool(params)
1765
+ );
1766
+ tool.update({ outputSchema: {} });
1767
+ return tool;
1768
+ }
1769
+ async function handleProtectTool(params) {
1770
+ try {
1771
+ const bot = await getBot();
1772
+ await requireRead(params.title);
1773
+ const options = {};
1774
+ if (params.reason) {
1775
+ options.reason = `[nodemw-mcp.protect] ${params.reason}`;
1776
+ }
1777
+ if (params.cascade) {
1778
+ options.cascade = params.cascade;
1779
+ }
1780
+ const result = await promisifyBotMethod(
1781
+ bot,
1782
+ "protect",
1783
+ params.title,
1784
+ params.protections,
1785
+ options
1786
+ );
1787
+ return jsonResult(result);
1788
+ } catch (error) {
1789
+ return errorResult("Failed to protect page", error);
1790
+ }
1791
+ }
1792
+
1793
+ // src/tools/editing/purge.ts
1794
+ import { z as z36 } from "zod";
1795
+ function purgeTool(server) {
1796
+ const tool = server.tool(
1797
+ "purge",
1798
+ "Purge the server-side cache for one or more wiki pages (requires authentication). Forces MediaWiki to regenerate the page from current wikitext. This is a safe, non-destructive action.",
1799
+ {
1800
+ titles: z36.union([z36.string(), z36.array(z36.string())]).describe("Page title(s) or category name to purge")
1801
+ },
1802
+ {
1803
+ title: "Purge pages",
1804
+ readOnlyHint: false,
1805
+ destructiveHint: false
1806
+ },
1807
+ async (params) => handlePurgeTool(params)
1808
+ );
1809
+ tool.update({ outputSchema: {} });
1810
+ return tool;
1811
+ }
1812
+ async function handlePurgeTool(params) {
1813
+ try {
1814
+ const bot = await getBot();
1815
+ const result = await promisifyBotMethod(
1816
+ bot,
1817
+ "purge",
1818
+ params.titles
1819
+ );
1820
+ return jsonResult(result);
1821
+ } catch (error) {
1822
+ return errorResult("Failed to purge pages", error);
1823
+ }
1824
+ }
1825
+
1826
+ // src/tools/editing/send-email.ts
1827
+ import { z as z37 } from "zod";
1828
+ function sendEmailTool(server) {
1829
+ const tool = server.tool(
1830
+ "send-email",
1831
+ "Send an ACTUAL email to a wiki user via the wiki's built-in email system (requires authentication). CRITICAL: This sends a real email to the user's registered address \u2014 it is NOT a simulation. The recipient will see it came from the authenticated bot operator's wiki account. Abuse (spam, harassment, unsolicited messages) WILL result in the bot account being blocked. ONLY use this when the human user has explicitly asked you to send an email.",
1832
+ {
1833
+ username: z37.string().describe("Target wiki username \u2014 email goes to their registered email address"),
1834
+ subject: z37.string().describe("Email subject line \u2014 be clear and professional, no misleading subjects"),
1835
+ text: z37.string().describe("Plain text email body \u2014 will be delivered as-is to the recipient's inbox")
1836
+ },
1837
+ {
1838
+ title: "Send email",
1839
+ readOnlyHint: false,
1840
+ destructiveHint: true
1841
+ },
1842
+ async (params) => handleSendEmailTool(params)
1843
+ );
1844
+ tool.update({ outputSchema: {} });
1845
+ return tool;
1846
+ }
1847
+ async function handleSendEmailTool(params) {
1848
+ try {
1849
+ const bot = await getBot();
1850
+ const result = await promisifyBotMethod(
1851
+ bot,
1852
+ "sendEmail",
1853
+ params.username,
1854
+ params.subject,
1855
+ params.text
1856
+ );
1857
+ return jsonResult(result);
1858
+ } catch (error) {
1859
+ return errorResult("Failed to send email", error);
1860
+ }
1861
+ }
1862
+
1863
+ // src/tools/editing/upload.ts
1864
+ import { z as z38 } from "zod";
1865
+ function uploadTool(server) {
1866
+ const tool = server.tool(
1867
+ "upload",
1868
+ "Upload a file to the wiki (requires authentication). CRITICAL: If a file with the same name already exists, it WILL BE OVERWRITTEN. Ensure you have the right to upload the content. Use only when explicitly requested.",
1869
+ {
1870
+ filename: z38.string().describe('Destination filename on wiki (e.g., "MyImage.png") \u2014 existing file will be overwritten!'),
1871
+ content: z38.string().describe("File content encoded as base64 string"),
1872
+ comment: z38.string().optional().describe("Upload comment describing the file")
1873
+ },
1874
+ {
1875
+ title: "Upload file",
1876
+ readOnlyHint: false,
1877
+ destructiveHint: true
1878
+ },
1879
+ async (params) => handleUploadTool(params)
1880
+ );
1881
+ tool.update({ outputSchema: {} });
1882
+ return tool;
1883
+ }
1884
+ async function handleUploadTool(params) {
1885
+ try {
1886
+ const bot = await getBot();
1887
+ const fileContent = Buffer.from(params.content, "base64");
1888
+ const comment = params.comment ? `[nodemw-mcp.upload] ${params.comment}` : "[nodemw-mcp.upload] File upload";
1889
+ const result = await promisifyBotMethod(
1890
+ bot,
1891
+ "upload",
1892
+ params.filename,
1893
+ fileContent,
1894
+ comment
1895
+ );
1896
+ return jsonResult(result);
1897
+ } catch (error) {
1898
+ return errorResult("Failed to upload file", error);
1899
+ }
1900
+ }
1901
+
1902
+ // src/tools/editing/upload-by-url.ts
1903
+ import { z as z39 } from "zod";
1904
+ function uploadByUrlTool(server) {
1905
+ const tool = server.tool(
1906
+ "upload-by-url",
1907
+ "Upload a file to the wiki by downloading it from a URL (requires authentication). CRITICAL: If a file with the same name already exists, it WILL BE OVERWRITTEN. Ensure you have the right to upload the content from the source URL.",
1908
+ {
1909
+ filename: z39.string().describe('Destination filename on wiki (e.g., "Diagram.png") \u2014 existing file will be overwritten!'),
1910
+ url: z39.string().url().describe("Source URL to download the file from \u2014 must be publicly accessible"),
1911
+ summary: z39.string().optional().describe("Upload summary")
1912
+ },
1913
+ {
1914
+ title: "Upload file by URL",
1915
+ readOnlyHint: false,
1916
+ destructiveHint: true
1917
+ },
1918
+ async (params) => handleUploadByUrlTool(params)
1919
+ );
1920
+ tool.update({ outputSchema: {} });
1921
+ return tool;
1922
+ }
1923
+ async function handleUploadByUrlTool(params) {
1924
+ try {
1925
+ const bot = await getBot();
1926
+ const prefixedSummary = params.summary ? `[nodemw-mcp.upload-by-url] ${params.summary}` : "[nodemw-mcp.upload-by-url] File upload from URL";
1927
+ const result = await promisifyBotMethod(
1928
+ bot,
1929
+ "uploadByUrl",
1930
+ params.filename,
1931
+ params.url,
1932
+ prefixedSummary
1933
+ );
1934
+ return jsonResult(result);
1935
+ } catch (error) {
1936
+ return errorResult("Failed to upload file by URL", error);
1937
+ }
1938
+ }
1939
+
1940
+ // src/tools/editing/add-flow-topic.ts
1941
+ import { z as z40 } from "zod";
1942
+ function addFlowTopicTool(server) {
1943
+ const tool = server.tool(
1944
+ "add-flow-topic",
1945
+ "Add a new Flow/Structured Discussions topic to a wiki talk page (requires authentication). Creates a publicly visible discussion thread on the wiki. Ensure the content is appropriate and relevant.",
1946
+ {
1947
+ title: z40.string().describe('Talk page title to add the topic to (e.g., "Talk:Main Page")'),
1948
+ subject: z40.string().describe("Topic title/heading \u2014 should summarize the discussion topic"),
1949
+ content: z40.string().describe("Topic body content in wikitext format")
1950
+ },
1951
+ {
1952
+ title: "Add Flow topic",
1953
+ readOnlyHint: false,
1954
+ destructiveHint: true
1955
+ },
1956
+ async (params) => handleAddFlowTopicTool(params)
1957
+ );
1958
+ tool.update({ outputSchema: {} });
1959
+ return tool;
1960
+ }
1961
+ async function handleAddFlowTopicTool(params) {
1962
+ try {
1963
+ const bot = await getBot();
1964
+ const result = await promisifyBotMethod(
1965
+ bot,
1966
+ "addFlowTopic",
1967
+ params.title,
1968
+ params.subject,
1969
+ params.content
1970
+ );
1971
+ return jsonResult(result);
1972
+ } catch (error) {
1973
+ return errorResult("Failed to add Flow topic", error);
1974
+ }
1975
+ }
1976
+
1977
+ // src/tools/editing/create-account.ts
1978
+ import { z as z41 } from "zod";
1979
+ function createAccountTool(server) {
1980
+ const tool = server.tool(
1981
+ "create-account",
1982
+ "Create a NEW user account on the wiki (requires authentication). CRITICAL: This creates a real user account. Do NOT create accounts for yourself or without explicit user request. The account will be permanently registered on the wiki.",
1983
+ {
1984
+ username: z41.string().describe("Desired username for the new account \u2014 must follow wiki username rules"),
1985
+ password: z41.string().describe("Password for the new account \u2014 use a strong, unique password")
1986
+ },
1987
+ {
1988
+ title: "Create user account",
1989
+ readOnlyHint: false,
1990
+ destructiveHint: true
1991
+ },
1992
+ async (params) => handleCreateAccountTool(params)
1993
+ );
1994
+ tool.update({ outputSchema: {} });
1995
+ return tool;
1996
+ }
1997
+ async function handleCreateAccountTool(params) {
1998
+ try {
1999
+ const bot = await getBot();
2000
+ const result = await promisifyBotMethod(
2001
+ bot,
2002
+ "createAccount",
2003
+ params.username,
2004
+ params.password
2005
+ );
2006
+ return jsonResult(result);
2007
+ } catch (error) {
2008
+ return errorResult("Failed to create account", error);
2009
+ }
2010
+ }
2011
+
2012
+ // src/tools/index.ts
2013
+ var readToolRegistrars = [
2014
+ getArticleTool,
2015
+ searchTool,
2016
+ getPagesInCategoryTool,
2017
+ getCategoriesTool,
2018
+ getUsersTool,
2019
+ getAllPagesTool,
2020
+ getPagesInNamespaceTool,
2021
+ getPagesByPrefixTool,
2022
+ getPagesTranscludingTool,
2023
+ getArticleRevisionsTool,
2024
+ getArticleCategoriesTool,
2025
+ getArticlePropertiesTool,
2026
+ getArticleInfoTool,
2027
+ getUserContribsTool,
2028
+ whoamiTool,
2029
+ whoisTool,
2030
+ whoareTool,
2031
+ getImagesTool,
2032
+ getImagesFromArticleTool,
2033
+ getImageUsageTool,
2034
+ getImageInfoTool,
2035
+ getLogTool,
2036
+ expandTemplatesTool,
2037
+ parseTool,
2038
+ getRecentChangesTool,
2039
+ getSiteInfoTool,
2040
+ getSiteStatsTool,
2041
+ getMediaWikiVersionTool,
2042
+ getQueryPageTool,
2043
+ getExternalLinksTool,
2044
+ getBacklinksTool
2045
+ ];
2046
+ var writeToolRegistrars = [
2047
+ editTool,
2048
+ appendTool,
2049
+ prependTool,
2050
+ moveTool,
2051
+ deleteTool,
2052
+ protectTool,
2053
+ purgeTool,
2054
+ sendEmailTool,
2055
+ uploadTool,
2056
+ uploadByUrlTool,
2057
+ addFlowTopicTool,
2058
+ createAccountTool
2059
+ ];
2060
+ function registerAllTools(server, includeWriteTools = true) {
2061
+ const registrars = includeWriteTools ? [...readToolRegistrars, ...writeToolRegistrars] : readToolRegistrars;
2062
+ const registeredTools = [];
2063
+ for (const registrar of registrars) {
2064
+ try {
2065
+ registeredTools.push(registrar(server));
2066
+ } catch (error) {
2067
+ console.error(`Error registering tool: ${error.message}`);
2068
+ }
2069
+ }
2070
+ return registeredTools;
2071
+ }
2072
+
2073
+ // src/index.ts
2074
+ function parseCliArgs() {
2075
+ const { values, positionals } = parseArgs({
2076
+ options: {
2077
+ server: { type: "string", short: "s" },
2078
+ path: { type: "string" },
2079
+ endpoint: { type: "string" },
2080
+ user: { type: "string", short: "u" },
2081
+ pass: { type: "string", short: "p" },
2082
+ token: { type: "string" },
2083
+ "dry-run": { type: "boolean" }
2084
+ },
2085
+ strict: false,
2086
+ allowPositionals: true
2087
+ });
2088
+ const serverUrl = values.server ?? positionals[0] ?? process.env.NODEMW_MCP_SERVER;
2089
+ if (!serverUrl) {
2090
+ console.error("Error: target server is required (-s, positional arg, or NODEMW_MCP_SERVER env)");
2091
+ process.exit(1);
2092
+ }
2093
+ let server;
2094
+ let protocol;
2095
+ let port;
2096
+ try {
2097
+ if (serverUrl.startsWith("http://") || serverUrl.startsWith("https://")) {
2098
+ const url = new URL(serverUrl);
2099
+ server = url.hostname;
2100
+ protocol = url.protocol.replace(":", "");
2101
+ if (url.port) {
2102
+ port = parseInt(url.port, 10);
2103
+ }
2104
+ } else {
2105
+ server = serverUrl;
2106
+ }
2107
+ } catch {
2108
+ server = serverUrl;
2109
+ }
2110
+ const pathFromEnv = process.env.NODEMW_MCP_ENDPOINT_PATH;
2111
+ const pathExplicit = !!(values.path ?? values.endpoint ?? pathFromEnv);
2112
+ return {
2113
+ config: {
2114
+ server,
2115
+ protocol,
2116
+ port,
2117
+ path: values.path ?? values.endpoint ?? pathFromEnv ?? "/w",
2118
+ username: values.user ?? process.env.NODEMW_MCP_MW_USER,
2119
+ password: values.pass ?? process.env.NODEMW_MCP_MW_PASS,
2120
+ token: values.token,
2121
+ dryRun: values["dry-run"]
2122
+ },
2123
+ pathExplicit
2124
+ };
2125
+ }
2126
+ async function main() {
2127
+ const { config, pathExplicit } = parseCliArgs();
2128
+ if (!pathExplicit) {
2129
+ try {
2130
+ config.path = await autoDetectPath(config);
2131
+ console.error(`Auto-detected API path: ${config.path}`);
2132
+ } catch (err) {
2133
+ console.error("Error:", err.message);
2134
+ process.exit(1);
2135
+ }
2136
+ }
2137
+ initServerConfig(config);
2138
+ try {
2139
+ await initBot(config);
2140
+ } catch (err) {
2141
+ console.error("Error:", err.message);
2142
+ process.exit(1);
2143
+ }
2144
+ const bot = getBot();
2145
+ let siteInfo;
2146
+ try {
2147
+ const info = await promisifyBotMethod(bot, "getSiteInfo", ["general"]);
2148
+ const general = info?.general;
2149
+ if (general) {
2150
+ siteInfo = {
2151
+ sitename: general.sitename || "Unknown",
2152
+ base: general.base || "",
2153
+ generator: general.generator || "MediaWiki"
2154
+ };
2155
+ }
2156
+ } catch {
2157
+ console.error("Warning: Could not fetch site info for server description.");
2158
+ }
2159
+ const auth = isAuthenticated();
2160
+ const server = createServer(siteInfo, auth);
2161
+ registerAllTools(server, auth);
2162
+ const transport = new StdioServerTransport();
2163
+ await server.connect(transport);
2164
+ const protocol = config.protocol ?? "https";
2165
+ const endpoint = `${protocol}://${config.server}${config.path}/api.php`;
2166
+ const sitename = siteInfo?.sitename ?? config.server;
2167
+ console.error(`Ready to operate on "${sitename}" <${endpoint}>`);
2168
+ }
2169
+ main().catch(console.error);