openalmanac 0.3.5 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,18 +1,6 @@
1
1
  import { z } from "zod";
2
2
  import { request } from "../auth.js";
3
- function coerceJson(schema) {
4
- return z.preprocess((val) => {
5
- if (typeof val === "string") {
6
- try {
7
- return JSON.parse(val);
8
- }
9
- catch {
10
- return val;
11
- }
12
- }
13
- return val;
14
- }, schema);
15
- }
3
+ import { coerceJson } from "../utils.js";
16
4
  export function registerTopicTools(server) {
17
5
  server.addTool({
18
6
  name: "list_topics",
@@ -28,24 +16,6 @@ export function registerTopicTools(server) {
28
16
  return JSON.stringify(await resp.json(), null, 2);
29
17
  },
30
18
  });
31
- server.addTool({
32
- name: "create_topic",
33
- description: "Create a topic in a wiki. Topics are lightweight categories — pages can belong to multiple topics. Requires wiki membership.",
34
- parameters: z.object({
35
- wiki_slug: z.string().describe("Wiki slug"),
36
- title: z.string().describe("Topic title"),
37
- description: z.string().default("").describe("Topic description"),
38
- image_url: z.string().url().max(2048).optional().describe("Topic image URL (https:// or http://)"),
39
- parent_slugs: coerceJson(z.array(z.string())).default([]).describe("Parent topic slugs"),
40
- }),
41
- async execute({ wiki_slug, title, description, image_url, parent_slugs }) {
42
- const resp = await request("POST", `/api/w/${wiki_slug}/topics`, {
43
- auth: true,
44
- json: { title, description, image_url, parent_slugs },
45
- });
46
- return JSON.stringify(await resp.json(), null, 2);
47
- },
48
- });
49
19
  server.addTool({
50
20
  name: "update_topic",
51
21
  description: "Update a topic's title, description, or image. Requires wiki moderator role.",
@@ -75,8 +45,8 @@ export function registerTopicTools(server) {
75
45
  },
76
46
  });
77
47
  server.addTool({
78
- name: "create_topics_batch",
79
- description: "Batch create topics in a wiki. Useful for bootstrapping a topic hierarchy. Requires wiki membership.",
48
+ name: "create_topics",
49
+ description: "Create one or more topics in a wiki. Topics are lightweight categories — pages can belong to multiple topics. Pass a single-element array for one topic: `[{ title: \"Security Pins\" }]`. Useful for bootstrapping a topic hierarchy. Requires wiki membership.",
80
50
  parameters: z.object({
81
51
  wiki_slug: z.string().describe("Wiki slug"),
82
52
  topics: coerceJson(z.array(z.object({
@@ -84,7 +54,7 @@ export function registerTopicTools(server) {
84
54
  description: z.string().default(""),
85
55
  image_url: z.string().url().max(2048).optional(),
86
56
  parent_slugs: z.array(z.string()).default([]),
87
- })).min(1).max(100)).describe("Topics to create"),
57
+ })).min(1).max(100)).describe("Topics to create (N=1 is fully supported)"),
88
58
  }),
89
59
  async execute({ wiki_slug, topics }) {
90
60
  const resp = await request("POST", `/api/w/${wiki_slug}/topics/batch`, {
@@ -1,18 +1,6 @@
1
1
  import { z } from "zod";
2
2
  import { request } from "../auth.js";
3
- function coerceJson(schema) {
4
- return z.preprocess((val) => {
5
- if (typeof val === "string") {
6
- try {
7
- return JSON.parse(val);
8
- }
9
- catch {
10
- return val;
11
- }
12
- }
13
- return val;
14
- }, schema);
15
- }
3
+ import { coerceJson } from "../utils.js";
16
4
  // Mirrors backend `NavItem` in src/schemas/wiki_settings_schemas.py. The
17
5
  // refinement matches the `exactly_one_target` @model_validator there —
18
6
  // agents get a clear error pre-flight instead of a 422 round-trip.
@@ -94,7 +82,7 @@ export function registerWikiTools(server) {
94
82
  });
95
83
  server.addTool({
96
84
  name: "update_wiki_settings",
97
- description: "Update a wiki's settings (nav, cover_image_url, theme). Each NavItem must have exactly one of `page`, `topic`, or `link`. Use `auto` (only on topic items) to auto-populate children from the topic DAG. Requires moderator access.",
85
+ description: "Update a wiki's settings. Pass any combination of `nav`, `cover_image_url`, and `theme` — omitted fields are preserved (the backend uses exclude_unset merge). For example, `{nav: [...]}` updates navigation only without touching theme or cover_image_url. Each NavItem must have exactly one of `page`, `topic`, or `link`. Use `auto` (only on topic items) to auto-populate children from the topic DAG. Requires moderator access.",
98
86
  parameters: z.object({
99
87
  wiki_slug: z.string().describe("Wiki slug"),
100
88
  settings: coerceJson(z.object({
@@ -112,22 +100,32 @@ export function registerWikiTools(server) {
112
100
  },
113
101
  });
114
102
  server.addTool({
115
- name: "update_nav",
116
- description: "Update just the navigation tree for a wiki. Shorthand for updating settings.nav. Requires moderator access.",
103
+ name: "join_wiki",
104
+ description: "Join a wiki as a member. After joining you can create and edit pages. Requires login.",
117
105
  parameters: z.object({
118
106
  wiki_slug: z.string().describe("Wiki slug"),
119
- nav: coerceJson(z.array(navItemSchema)).describe("Nav items"),
120
107
  }),
121
- async execute({ wiki_slug, nav }) {
122
- // Single atomic PATCH the backend's update_settings service uses
123
- // model_dump(exclude_unset=True) to merge partial bodies, so sending
124
- // only {nav} doesn't touch theme or cover_image_url. Previously this
125
- // did a GET-then-PATCH, which raced with concurrent settings updates.
126
- const resp = await request("PATCH", `/api/w/${wiki_slug}/settings`, {
127
- auth: true,
128
- json: { nav },
129
- });
130
- return JSON.stringify(await resp.json(), null, 2);
108
+ async execute({ wiki_slug }) {
109
+ const resp = await request("POST", `/api/w/${wiki_slug}/join`, { auth: true });
110
+ const data = (await resp.json());
111
+ const role = data.role ?? "member";
112
+ return `Joined wiki "${wiki_slug}" as ${role}.`;
113
+ },
114
+ });
115
+ server.addTool({
116
+ name: "get_wiki_membership",
117
+ description: "Check your membership status in a wiki. Returns your role if you are a member. Requires login.",
118
+ parameters: z.object({
119
+ wiki_slug: z.string().describe("Wiki slug"),
120
+ }),
121
+ async execute({ wiki_slug }) {
122
+ const resp = await request("GET", `/api/w/${wiki_slug}/membership/me`, { auth: true });
123
+ const data = (await resp.json());
124
+ if (!data.is_member) {
125
+ return `You are not a member of "${wiki_slug}".`;
126
+ }
127
+ const role = data.role ?? "member";
128
+ return `You are a member of "${wiki_slug}" as ${role}.`;
131
129
  },
132
130
  });
133
131
  }
@@ -0,0 +1,7 @@
1
+ import { z } from "zod";
2
+ /**
3
+ * Workaround for Claude Agent SDK MCP transport bug (#18260):
4
+ * Array/object parameters are sometimes serialized as JSON strings
5
+ * instead of native values. This preprocessor coerces them back.
6
+ */
7
+ export declare function coerceJson<T extends z.ZodTypeAny>(schema: T): z.ZodEffects<T, T["_output"], unknown>;
package/dist/utils.js ADDED
@@ -0,0 +1,19 @@
1
+ import { z } from "zod";
2
+ /**
3
+ * Workaround for Claude Agent SDK MCP transport bug (#18260):
4
+ * Array/object parameters are sometimes serialized as JSON strings
5
+ * instead of native values. This preprocessor coerces them back.
6
+ */
7
+ export function coerceJson(schema) {
8
+ return z.preprocess((val) => {
9
+ if (typeof val === "string") {
10
+ try {
11
+ return JSON.parse(val);
12
+ }
13
+ catch {
14
+ return val;
15
+ }
16
+ }
17
+ return val;
18
+ }, schema);
19
+ }
package/package.json CHANGED
@@ -1,22 +1,30 @@
1
1
  {
2
2
  "name": "openalmanac",
3
- "version": "0.3.5",
4
- "description": "OpenAlmanac — pull, edit, and push articles to the open knowledge base",
3
+ "version": "0.4.0",
4
+ "description": "OpenAlmanac — pull, edit, and push pages to the open knowledge base",
5
5
  "type": "module",
6
6
  "bin": {
7
7
  "openalmanac": "dist/cli.js"
8
8
  },
9
+ "exports": {
10
+ "./tool-registry": {
11
+ "types": "./dist/tool-registry.d.ts",
12
+ "default": "./dist/tool-registry.js"
13
+ }
14
+ },
9
15
  "scripts": {
10
16
  "build": "tsc",
11
17
  "dev": "tsc --watch",
12
- "start": "node dist/cli.js"
18
+ "start": "node dist/cli.js",
19
+ "test": "vitest run",
20
+ "test:watch": "vitest"
13
21
  },
14
22
  "keywords": [
15
23
  "openalmanac",
16
24
  "mcp",
17
25
  "knowledge-base",
18
26
  "ai",
19
- "articles"
27
+ "pages"
20
28
  ],
21
29
  "license": "MIT",
22
30
  "dependencies": {
@@ -25,8 +33,9 @@
25
33
  "zod": "^3.24.0"
26
34
  },
27
35
  "devDependencies": {
28
- "@types/node": "^22.0.0",
29
- "typescript": "^5.7.0"
36
+ "@types/node": "^25.6.0",
37
+ "typescript": "^5.7.0",
38
+ "vitest": "^3.2.4"
30
39
  },
31
40
  "engines": {
32
41
  "node": ">=18.0.0"
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  name: reddit-wiki
3
3
  description: Turn any subreddit into a published wiki on Almanac
4
- allowed-tools: Bash(node ${CLAUDE_SKILL_DIR}/scripts/ingest.js *), mcp__almanac__search_articles, mcp__almanac__search_communities, mcp__almanac__list_articles, mcp__almanac__read, mcp__almanac__download, mcp__almanac__new, mcp__almanac__publish, mcp__almanac__search_web, mcp__almanac__read_webpage, mcp__almanac__search_images, mcp__almanac__view_images, mcp__almanac__register_sources, mcp__almanac__login, mcp__almanac__create_community, Read(~/.openalmanac/**), Write(~/.openalmanac/**), Edit(~/.openalmanac/**)
4
+ allowed-tools: Bash(node ${CLAUDE_SKILL_DIR}/scripts/ingest.js *), mcp__almanac__search_pages, mcp__almanac__list_wikis, mcp__almanac__create_wiki, mcp__almanac__list_pages, mcp__almanac__read_page, mcp__almanac__download, mcp__almanac__new, mcp__almanac__publish, mcp__almanac__search_web, mcp__almanac__read_webpage, mcp__almanac__search_images, mcp__almanac__view_images, mcp__almanac__register_sources, mcp__almanac__login, Read(~/.openalmanac/**), Write(~/.openalmanac/**), Edit(~/.openalmanac/**)
5
5
  argument-hint: r/<subreddit>
6
6
  ---
7
7
 
@@ -18,8 +18,8 @@ Never estimate how long things will take. Do show data sizes so the user knows w
18
18
  ## Flow overview
19
19
 
20
20
  Two phases:
21
- 1. **Foundation** — Plan and write 15-20 core articles with images, citations, and wikilinks
22
- 2. **Deep Absorb** — Process the corpus batch by batch, discovering niche topics and enriching existing articles
21
+ 1. **Foundation** — Plan and write 15-20 core pages with images, citations, and wikilinks
22
+ 2. **Deep Absorb** — Process the corpus batch by batch, discovering niche topics and enriching existing pages
23
23
 
24
24
  ## Naming convention
25
25
 
@@ -32,9 +32,9 @@ Two phases:
32
32
 
33
33
  If the user runs `/reddit-wiki` without arguments or asks how it works, explain briefly:
34
34
 
35
- - **What it does:** Takes any subreddit and builds a wiki on Almanac — real articles with citations, images, and links between them. Two phases: a foundation of 15-20 core articles, then a deep pass through the corpus finding niche topics.
35
+ - **What it does:** Takes any subreddit and builds a wiki on Almanac — real pages with citations, images, and links between them. Two phases: a foundation of 15-20 core pages, then a deep pass through the corpus finding niche topics.
36
36
  - **What Almanac is:** An open knowledge base anyone can read and write to. Think Wikipedia's depth meets Reddit's community energy.
37
- - **How it works:** Downloads the subreddit's history, scores posts by quality, then uses AI agents to research and write articles citing the community's own discussions.
37
+ - **How it works:** Downloads the subreddit's history, scores posts by quality, then uses AI agents to research and write pages citing the community's own discussions.
38
38
  - **Data storage:** Everything is stored locally at `~/.openalmanac/corpus/<subreddit>/`. The user can delete it anytime after the wiki is published.
39
39
  - **Any subreddit:** They can pick any subreddit they're interested in. Some smaller or newer subreddits may not have data available — if that happens, you'll suggest alternatives or nearby subreddits that do have data.
40
40
 
@@ -45,8 +45,8 @@ Then end with a single inviting line that asks what they're into and offers to h
45
45
  Extract the subreddit name from the argument (strip `r/` prefix if present). Use the bare name for all API calls and file paths. Use `r/<name>` when talking to the user.
46
46
 
47
47
  Run these three things in parallel (silently — don't narrate the tool calls):
48
- 1. `search_communities("<subreddit_name>")`
49
- 2. `search_articles` with 5-10 key topic terms you'd expect in this community
48
+ 1. `list_wikis()` and look for the subreddit wiki slug
49
+ 2. `search_pages` with 5-10 key topic terms you'd expect in this community
50
50
  3. Get subreddit stats from Arctic Shift:
51
51
 
52
52
  ```bash
@@ -56,7 +56,7 @@ node ${CLAUDE_SKILL_DIR}/scripts/ingest.js $1 count
56
56
  This returns JSON with `total_posts`, `total_comments`, and `estimated_size_mb`.
57
57
 
58
58
  Now greet the user. Tell them:
59
- - What already exists on Almanac for this community (articles, stubs, community)
59
+ - What already exists on Almanac for this community (pages, stubs, community)
60
60
  - Share something genuinely interesting about it if you know anything
61
61
  - Subreddit stats (posts, comments)
62
62
  - The two-phase plan (brief — one line each)
@@ -153,18 +153,18 @@ If the `count` command returns 0 posts, the subreddit may not be indexed. In thi
153
153
  Read 20-30 corpus entries (prioritize high-score posts) to understand the landscape. Also check what already exists:
154
154
 
155
155
  ```
156
- list_articles(community_slug: "<subreddit>", sort: "most_referenced")
156
+ list_pages(wiki_slug: "<subreddit>")
157
157
  ```
158
158
 
159
- Identify 15-20 core articles. **Favor nouns over themes** — specific things people would look up, not vague survey topics.
159
+ Identify 15-20 core pages. **Favor nouns over themes** — specific things people would look up, not vague survey topics.
160
160
 
161
161
  - **~70% nouns:** Specific locks, tools, people, techniques, concepts. "American Lock 1100", "Spool Pin", "Tension Wrench", "LockPickingLawyer". These are the building blocks — what people search for, link to, and learn from.
162
- - **~30% structural themes:** Only the big ones that serve as entry points and tie nouns together. "Belt System", "Lock Picking Basics". Not vague surveys — each should be a real article that teaches something.
162
+ - **~30% structural themes:** Only the big ones that serve as entry points and tie nouns together. "Belt System", "Lock Picking Basics". Not vague surveys — each should be a real page that teaches something.
163
163
 
164
164
  Bad: "Security Pin Mechanics" (vague theme, reads like a textbook chapter)
165
165
  Good: "Spool Pin", "Serrated Pin", "Mushroom Pin" (specific nouns — then link them from a "Security Pins" overview)
166
166
 
167
- Present them to the user grouped by category, but make clear most articles are about specific things:
167
+ Present them to the user grouped by category, but make clear most pages are about specific things:
168
168
 
169
169
  ```
170
170
  Here's what I'd build for the foundation:
@@ -188,21 +188,21 @@ Include your recommendation. Wait for the user to confirm or adjust.
188
188
 
189
189
  ### Topics
190
190
 
191
- The groupings you present (Locks, Components, Techniques, Community) become **community topics** on Almanac. Topics show up as categories on the wiki page and each article gets assigned to one. When you scaffold articles, include the topic in the `new()` call.
191
+ The groupings you present (Locks, Components, Techniques, Community) become **community topics** on Almanac. Topics show up as categories on the wiki page and each page gets assigned to one. When you scaffold pages, include the topic in the `new()` call.
192
192
 
193
- Keep topics broad and few (4-7). They're navigation, not a taxonomy. A topic like "Locks" is good. A topic like "European High-Security Disc Detainer Locks" is too specific — that's an article, not a topic.
193
+ Keep topics broad and few (4-7). They're navigation, not a taxonomy. A topic like "Locks" is good. A topic like "European High-Security Disc Detainer Locks" is too specific — that's a page, not a topic.
194
194
 
195
195
  ### Scaffold entities
196
196
 
197
- Before any writing, scaffold all planned articles as local files:
197
+ Before any writing, scaffold all planned pages as local files:
198
198
 
199
- 1. **Check what exists online:** `search_articles` with ALL planned entity names in one batch call
200
- 2. **Check local folder:** Read `~/.openalmanac/articles/<subreddit>/` to see what's already scaffolded
201
- 3. **Create missing:** `new(articles: [{title, community_slug}, ...])` for everything not found
199
+ 1. **Check what exists online:** `search_pages` with ALL planned entity names in one batch call
200
+ 2. **Check local folder:** Read `~/.openalmanac/pages/<subreddit>/` to see what's already scaffolded
201
+ 3. **Create missing:** `new(pages: [{title, slug?, topics?}, ...], wiki_slug: "<subreddit>")` for everything not found
202
202
 
203
203
  This creates the entity map. Writing agents will check the local folder to know what slugs exist.
204
204
 
205
- ### Write articles
205
+ ### Write pages
206
206
 
207
207
  Tell the user what's happening:
208
208
 
@@ -215,62 +215,62 @@ Kicking off the writing agents:
215
215
  • Agent 4: Community — LockPickingLawyer, Belt System
216
216
  ```
217
217
 
218
- Spin up 4-5 parallel writing agents, ~3-4 articles each. Group by theme so related articles are written by the same agent (better cross-referencing).
218
+ Spin up 4-5 parallel writing agents, ~3-4 pages each. Group by theme so related pages are written by the same agent (better cross-referencing).
219
219
 
220
220
  **Each writing agent's brief must include:**
221
221
 
222
- 1. **Which articles to write** (the scaffolded .md files to fill in)
222
+ 1. **Which pages to write** (the scaffolded .md files to fill in)
223
223
  2. **Corpus entries to read** — point to specific files in `~/.openalmanac/corpus/<subreddit>/` relevant to its topics
224
224
  3. **The entity map** — list all scaffolded slugs so the agent uses correct wikilinks
225
225
  4. **These citation rules:**
226
226
  - Every source MUST have a public URL
227
- - Corpus entries have `citation_key` and `source` (Reddit permalink) in their frontmatter — use them as `[@citation_key]` markers and list them in the article's YAML `sources:` array
227
+ - Corpus entries have `citation_key` and `source` (Reddit permalink) in their frontmatter — use them as `[@citation_key]` markers and list them in the page's YAML `sources:` array
228
228
  - Also use `search_web` and `read_webpage` for additional sources beyond Reddit
229
229
  - NEVER fabricate a URL. If a source has no public URL, do not use it.
230
230
  - Register sources with `register_sources` before writing
231
231
  5. **These wikilink rules:**
232
232
  - Use `[[slug|Display Text]]` syntax for entities that exist (scaffolded or published)
233
- - Before linking to a new entity NOT on the map: `search_articles` to check, then scaffold with `new()` if needed
233
+ - Before linking to a new entity NOT on the map: `search_pages` to check, then scaffold with `new()` if needed
234
234
  - Prefer existing slugs over inventing new ones
235
235
  6. **Writing quality:**
236
236
  - Fetch guidelines from `https://openalmanac.org/writing-guidelines` using `read_webpage`
237
237
  - Write with the community's voice — cite Reddit discussions, not just Wikipedia
238
238
  - Include `[@citation_key]` markers throughout, especially for claims from the corpus
239
- - Articles should feel like they were written by someone who lives in this community
239
+ - pages should feel like they were written by someone who lives in this community
240
240
 
241
241
  **While agents work**, narrate what's happening. Share interesting things you see them finding. Example:
242
242
 
243
243
  ```
244
244
  Agent 2 found a heated 2019 thread about whether LockPickingLawyer's
245
245
  speed picks are realistic for beginners — 400 upvotes, great discussion.
246
- Working that into the article...
246
+ Working that into the page...
247
247
  ```
248
248
 
249
249
  ### Image pass
250
250
 
251
- After all writing agents finish, run parallel haiku-model image agents (one per article):
251
+ After all writing agents finish, run parallel haiku-model image agents (one per page):
252
252
 
253
253
  Each image agent:
254
- 1. Reads the article
254
+ 1. Reads the page
255
255
  2. `search_images` for 1-2 hero image queries
256
256
  3. `view_images` to verify the best candidate
257
- 4. Adds the image URL to the article's frontmatter as `image_url`
257
+ 4. Adds the image URL to the page's frontmatter as `image_url`
258
258
 
259
259
  ### Publish
260
260
 
261
261
  ```
262
- publish(community_slug: "<subreddit>")
262
+ publish(wiki_slug: "<subreddit>")
263
263
  ```
264
264
 
265
- This batch-publishes all articles in the community folder. The backend auto-creates stubs from any dead wikilinks in the articles.
265
+ This batch-publishes all pages in the community folder. The backend auto-creates stubs from any dead wikilinks in the pages.
266
266
 
267
267
  Share the results with enthusiasm:
268
268
 
269
269
  ```
270
- 17 articles live! The wiki now has 35 articles total, plus
270
+ 17 pages live! The wiki now has 35 pages total, plus
271
271
  12 new stubs that emerged from wikilinks.
272
272
 
273
- Check it out: openalmanac.org/communities/<subreddit>/wiki
273
+ Check it out: openalmanac.org/w/<subreddit>
274
274
 
275
275
  You can also browse it in the Almanac desktop app — best way
276
276
  to explore and keep contributing.
@@ -302,10 +302,10 @@ For each batch:
302
302
 
303
303
  1. **Read 50 unabsorbed entries** from the corpus directory (skip any listed in absorb_log)
304
304
  2. **Cluster by theme** — what topics do these entries cover?
305
- 3. **Decide:** Create new articles? Enrich existing ones? Both?
306
- 4. **For existing articles:** `download` them first, then expand with new details/sections
307
- 5. **For new articles:** Scaffold → write → add to wiki
308
- 6. **Image pass** on any new articles (haiku agents)
305
+ 3. **Decide:** Create new pages? Enrich existing ones? Both?
306
+ 4. **For existing pages:** `download` them first, then expand with new details/sections
307
+ 5. **For new pages:** Scaffold → write → add to wiki
308
+ 6. **Image pass** on any new pages (haiku agents)
309
309
  7. **Publish** the batch
310
310
  8. **Update absorb_log.json:**
311
311
  ```json
@@ -313,7 +313,7 @@ For each batch:
313
313
  "entries": {
314
314
  "<filename>": {
315
315
  "absorbed_at": "<ISO timestamp>",
316
- "absorbed_into": ["article-slug-1", "article-slug-2"]
316
+ "absorbed_into": ["page-slug-1", "page-slug-2"]
317
317
  }
318
318
  },
319
319
  "stats": {
@@ -330,12 +330,12 @@ For each batch:
330
330
  Batches 1-5 done. Found some gems:
331
331
  • "Lock Lubricants in Cold Weather" — apparently Houdini
332
332
  lube freezes below -20°F, community recommends graphite
333
- • Expanded the American 1100 article with a detailed
333
+ • Expanded the American 1100 page with a detailed
334
334
  teardown thread from 2017
335
- • New article: "Lockpicking Competitions" — there's a
335
+ • New page: "Lockpicking Competitions" — there's a
336
336
  whole competitive scene
337
337
 
338
- 3 new articles, 4 enriched. Continuing...
338
+ 3 new pages, 4 enriched. Continuing...
339
339
  ```
340
340
 
341
341
  ### When to stop
@@ -348,11 +348,11 @@ Batches 1-5 done. Found some gems:
348
348
  Phase 2 complete. Processed X,XXX entries across N batches.
349
349
 
350
350
  Final wiki:
351
- XX articles (was YY)
351
+ XX pages (was YY)
352
352
  XX remaining stubs
353
353
  XXX+ citations from the community
354
354
 
355
- openalmanac.org/communities/<subreddit>/wiki
355
+ openalmanac.org/w/<subreddit>
356
356
  ```
357
357
 
358
358
  ## Important rules
@@ -364,14 +364,14 @@ openalmanac.org/communities/<subreddit>/wiki
364
364
  - Corpus entries have `citation_key` and `source` in their frontmatter — these are ready to use.
365
365
 
366
366
  ### Entity linking
367
- - Always `search_articles` before creating new entities — check what already exists
368
- - Check the local `~/.openalmanac/articles/<subreddit>/` folder for scaffolded files
367
+ - Always `search_pages` before creating new entities — check what already exists
368
+ - Check the local `~/.openalmanac/pages/<subreddit>/` folder for scaffolded files
369
369
  - Only scaffold with `new()` if the entity doesn't exist anywhere
370
370
  - Use `[[slug|Display Text]]` wikilink syntax
371
371
  - Prefer existing slugs over inventing new ones to avoid duplicates
372
372
 
373
373
  ### Community creation
374
- - If the community doesn't exist on Almanac yet, create it with `create_community`
374
+ - If the wiki doesn't exist on Almanac yet, create it with `create_wiki`
375
375
  - The description should have personality — capture the community's vibe, not a generic taxonomy
376
376
  - Find a good cover image with `search_images`
377
377
 
@@ -385,5 +385,5 @@ openalmanac.org/communities/<subreddit>/wiki
385
385
  - Don't make small talk or ask personal questions
386
386
  - Don't force enthusiasm — if something isn't interesting, don't pretend
387
387
  - Don't go silent for long stretches — narrate what's happening
388
- - Don't ask permission for every article — the user approved the plan, that's consent
388
+ - Don't ask permission for every page — the user approved the plan, that's consent
389
389
  - Don't skip Reddit as a source — the corpus IS the community's voice, cite it
@@ -1,2 +0,0 @@
1
- import { FastMCP } from "fastmcp";
2
- export declare function registerArticleTools(server: FastMCP): void;