@kibhq/core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/package.json +40 -0
  2. package/src/compile/backlinks.test.ts +112 -0
  3. package/src/compile/backlinks.ts +80 -0
  4. package/src/compile/cache.test.ts +126 -0
  5. package/src/compile/cache.ts +125 -0
  6. package/src/compile/compiler.test.ts +278 -0
  7. package/src/compile/compiler.ts +305 -0
  8. package/src/compile/diff.test.ts +164 -0
  9. package/src/compile/diff.ts +121 -0
  10. package/src/compile/index-manager.test.ts +227 -0
  11. package/src/compile/index-manager.ts +148 -0
  12. package/src/compile/prompts.ts +124 -0
  13. package/src/constants.ts +40 -0
  14. package/src/errors.ts +66 -0
  15. package/src/hash.test.ts +21 -0
  16. package/src/hash.ts +24 -0
  17. package/src/index.ts +22 -0
  18. package/src/ingest/extractors/file.test.ts +129 -0
  19. package/src/ingest/extractors/file.ts +136 -0
  20. package/src/ingest/extractors/github.test.ts +47 -0
  21. package/src/ingest/extractors/github.ts +135 -0
  22. package/src/ingest/extractors/interface.ts +26 -0
  23. package/src/ingest/extractors/pdf.ts +130 -0
  24. package/src/ingest/extractors/web.test.ts +242 -0
  25. package/src/ingest/extractors/web.ts +163 -0
  26. package/src/ingest/extractors/youtube.test.ts +44 -0
  27. package/src/ingest/extractors/youtube.ts +166 -0
  28. package/src/ingest/ingest.test.ts +187 -0
  29. package/src/ingest/ingest.ts +179 -0
  30. package/src/ingest/normalize.test.ts +120 -0
  31. package/src/ingest/normalize.ts +83 -0
  32. package/src/ingest/router.test.ts +154 -0
  33. package/src/ingest/router.ts +119 -0
  34. package/src/lint/lint.test.ts +253 -0
  35. package/src/lint/lint.ts +43 -0
  36. package/src/lint/rules.ts +178 -0
  37. package/src/providers/anthropic.ts +107 -0
  38. package/src/providers/index.ts +4 -0
  39. package/src/providers/ollama.ts +101 -0
  40. package/src/providers/openai.ts +67 -0
  41. package/src/providers/router.ts +62 -0
  42. package/src/query/query.test.ts +165 -0
  43. package/src/query/query.ts +136 -0
  44. package/src/schemas.ts +193 -0
  45. package/src/search/engine.test.ts +230 -0
  46. package/src/search/engine.ts +390 -0
  47. package/src/skills/loader.ts +163 -0
  48. package/src/skills/runner.ts +139 -0
  49. package/src/skills/schema.ts +28 -0
  50. package/src/skills/skills.test.ts +134 -0
  51. package/src/types.ts +136 -0
  52. package/src/vault.test.ts +141 -0
  53. package/src/vault.ts +251 -0
@@ -0,0 +1,390 @@
1
+ import { existsSync } from "node:fs";
2
+ import { readFile } from "node:fs/promises";
3
+ import { join } from "node:path";
4
+ import { parseFrontmatter } from "../compile/diff.js";
5
+ import { CACHE_DIR, VAULT_DIR, WIKI_DIR } from "../constants.js";
6
+ import type { SearchResult } from "../types.js";
7
+ import { listRaw, listWiki } from "../vault.js";
8
+
9
+ // ─── Stop Words ──────────────────────────────────────────────────
10
+
11
+ const STOP_WORDS = new Set([
12
+ "a",
13
+ "an",
14
+ "and",
15
+ "are",
16
+ "as",
17
+ "at",
18
+ "be",
19
+ "by",
20
+ "for",
21
+ "from",
22
+ "has",
23
+ "he",
24
+ "in",
25
+ "is",
26
+ "it",
27
+ "its",
28
+ "of",
29
+ "on",
30
+ "or",
31
+ "she",
32
+ "that",
33
+ "the",
34
+ "to",
35
+ "was",
36
+ "were",
37
+ "will",
38
+ "with",
39
+ "this",
40
+ "but",
41
+ "they",
42
+ "have",
43
+ "had",
44
+ "not",
45
+ "been",
46
+ "can",
47
+ "do",
48
+ "does",
49
+ "did",
50
+ "would",
51
+ "could",
52
+ "should",
53
+ "may",
54
+ "might",
55
+ "shall",
56
+ "which",
57
+ "who",
58
+ "whom",
59
+ "what",
60
+ "when",
61
+ "where",
62
+ "how",
63
+ "why",
64
+ "all",
65
+ "each",
66
+ "every",
67
+ "both",
68
+ "few",
69
+ "more",
70
+ "most",
71
+ "other",
72
+ "some",
73
+ "such",
74
+ "no",
75
+ "nor",
76
+ "only",
77
+ "own",
78
+ "same",
79
+ "so",
80
+ "than",
81
+ "too",
82
+ "very",
83
+ "just",
84
+ "about",
85
+ "above",
86
+ "after",
87
+ "again",
88
+ "also",
89
+ "am",
90
+ "any",
91
+ "because",
92
+ "before",
93
+ "being",
94
+ "between",
95
+ "during",
96
+ "here",
97
+ "if",
98
+ "into",
99
+ "itself",
100
+ "me",
101
+ "my",
102
+ "myself",
103
+ "once",
104
+ "our",
105
+ "out",
106
+ "over",
107
+ "then",
108
+ "there",
109
+ "these",
110
+ "those",
111
+ "through",
112
+ "under",
113
+ "until",
114
+ "up",
115
+ "we",
116
+ "while",
117
+ "you",
118
+ "your",
119
+ ]);
120
+
121
+ // ─── Tokenizer ───────────────────────────────────────────────────
122
+
123
+ function tokenize(text: string): string[] {
124
+ return text
125
+ .toLowerCase()
126
+ .replace(/[^a-z0-9\s-]/g, " ")
127
+ .split(/\s+/)
128
+ .map(stem)
129
+ .filter((t) => t.length > 1 && !STOP_WORDS.has(t));
130
+ }
131
+
132
+ /** Simple suffix stripping for English. Not a full Porter stemmer, but handles common cases. */
133
+ function stem(word: string): string {
134
+ if (word.length < 4) return word;
135
+ // Order matters: try longest suffixes first
136
+ if (word.endsWith("ization")) return word.slice(0, -7) + "ize";
137
+ if (word.endsWith("ational")) return word.slice(0, -7) + "ate";
138
+ if (word.endsWith("iveness")) return word.slice(0, -7) + "ive";
139
+ if (word.endsWith("fulness")) return word.slice(0, -7) + "ful";
140
+ if (word.endsWith("ousli")) return word.slice(0, -5) + "ous";
141
+ if (word.endsWith("ation")) return word.slice(0, -5) + "ate";
142
+ if (word.endsWith("ness")) return word.slice(0, -4);
143
+ if (word.endsWith("ment")) return word.slice(0, -4);
144
+ if (word.endsWith("ting")) return word.slice(0, -3) + "e";
145
+ if (word.endsWith("ing") && word.length > 5) return word.slice(0, -3);
146
+ if (word.endsWith("ies") && word.length > 4) return word.slice(0, -3) + "y";
147
+ if (word.endsWith("ied")) return word.slice(0, -3) + "y";
148
+ if (word.endsWith("ous")) return word.slice(0, -3);
149
+ if (word.endsWith("ful")) return word.slice(0, -3);
150
+ if (word.endsWith("ers")) return word.slice(0, -3);
151
+ if (word.endsWith("ed") && word.length > 4) return word.slice(0, -2);
152
+ if (word.endsWith("ly") && word.length > 4) return word.slice(0, -2);
153
+ if (word.endsWith("es") && word.length > 4) return word.slice(0, -2);
154
+ if (word.endsWith("er") && word.length > 4) return word.slice(0, -2);
155
+ if (word.endsWith("s") && !word.endsWith("ss") && word.length > 3) return word.slice(0, -1);
156
+ return word;
157
+ }
158
+
159
+ // ─── BM25 Index ──────────────────────────────────────────────────
160
+
161
+ interface Document {
162
+ path: string;
163
+ title: string;
164
+ content: string;
165
+ tokens: string[];
166
+ tokenCount: number;
167
+ termFreqs: Map<string, number>;
168
+ }
169
+
170
+ interface SerializedIndex {
171
+ version: 1;
172
+ documents: {
173
+ path: string;
174
+ title: string;
175
+ snippet: string;
176
+ tokenCount: number;
177
+ termFreqs: [string, number][];
178
+ }[];
179
+ idf: [string, number][];
180
+ avgDl: number;
181
+ }
182
+
183
+ export class SearchIndex {
184
+ private documents: Document[] = [];
185
+ private idf = new Map<string, number>();
186
+ private avgDl = 0;
187
+
188
+ // BM25 parameters
189
+ private k1 = 1.5;
190
+ private b = 0.75;
191
+
192
+ /**
193
+ * Build the index from vault files.
194
+ */
195
+ async build(root: string, scope: "wiki" | "raw" | "all" = "all"): Promise<void> {
196
+ this.documents = [];
197
+
198
+ const files: string[] = [];
199
+ if (scope === "wiki" || scope === "all") {
200
+ const wikiFiles = await listWiki(root);
201
+ files.push(...wikiFiles.filter((f) => !f.endsWith("INDEX.md") && !f.endsWith("GRAPH.md")));
202
+ }
203
+ if (scope === "raw" || scope === "all") {
204
+ files.push(...(await listRaw(root)));
205
+ }
206
+
207
+ for (const filePath of files) {
208
+ const content = await readFile(filePath, "utf-8");
209
+ const { frontmatter, body } = parseFrontmatter(content);
210
+ const title =
211
+ (frontmatter.title as string) ?? filePath.split("/").pop()?.replace(/\.md$/, "") ?? "";
212
+
213
+ const tokens = tokenize(`${title} ${title} ${body}`); // title gets extra weight
214
+ const termFreqs = new Map<string, number>();
215
+ for (const token of tokens) {
216
+ termFreqs.set(token, (termFreqs.get(token) ?? 0) + 1);
217
+ }
218
+
219
+ this.documents.push({
220
+ path: filePath,
221
+ title,
222
+ content: body,
223
+ tokens,
224
+ tokenCount: tokens.length,
225
+ termFreqs,
226
+ });
227
+ }
228
+
229
+ // Compute IDF
230
+ this.idf.clear();
231
+ const N = this.documents.length;
232
+ const docFreq = new Map<string, number>();
233
+
234
+ for (const doc of this.documents) {
235
+ const seen = new Set<string>();
236
+ for (const token of doc.tokens) {
237
+ if (!seen.has(token)) {
238
+ docFreq.set(token, (docFreq.get(token) ?? 0) + 1);
239
+ seen.add(token);
240
+ }
241
+ }
242
+ }
243
+
244
+ for (const [term, df] of docFreq) {
245
+ // Standard IDF formula with smoothing
246
+ this.idf.set(term, Math.log((N - df + 0.5) / (df + 0.5) + 1));
247
+ }
248
+
249
+ // Average document length
250
+ this.avgDl =
251
+ this.documents.length > 0
252
+ ? this.documents.reduce((sum, d) => sum + d.tokenCount, 0) / this.documents.length
253
+ : 0;
254
+ }
255
+
256
+ /**
257
+ * Search the index using BM25 scoring.
258
+ */
259
+ search(query: string, opts: { limit?: number; threshold?: number } = {}): SearchResult[] {
260
+ const limit = opts.limit ?? 20;
261
+ const threshold = opts.threshold ?? 0;
262
+ const queryTokens = tokenize(query);
263
+
264
+ if (queryTokens.length === 0 || this.documents.length === 0) {
265
+ return [];
266
+ }
267
+
268
+ const scores: { doc: Document; score: number }[] = [];
269
+
270
+ for (const doc of this.documents) {
271
+ let score = 0;
272
+ const dl = doc.tokenCount;
273
+
274
+ for (const qt of queryTokens) {
275
+ const tf = doc.termFreqs.get(qt) ?? 0;
276
+ if (tf === 0) continue;
277
+
278
+ const idfVal = this.idf.get(qt) ?? 0;
279
+ const tfNorm =
280
+ (tf * (this.k1 + 1)) / (tf + this.k1 * (1 - this.b + this.b * (dl / this.avgDl)));
281
+
282
+ score += idfVal * tfNorm;
283
+ }
284
+
285
+ if (score > threshold) {
286
+ scores.push({ doc, score });
287
+ }
288
+ }
289
+
290
+ // Sort by score descending
291
+ scores.sort((a, b) => b.score - a.score);
292
+
293
+ return scores.slice(0, limit).map(({ doc, score }) => ({
294
+ path: doc.path,
295
+ score: Math.round(score * 100) / 100,
296
+ snippet: extractSnippet(doc.content, queryTokens),
297
+ title: doc.title || undefined,
298
+ }));
299
+ }
300
+
301
+ /**
302
+ * Serialize the index for caching.
303
+ */
304
+ serialize(): string {
305
+ const data: SerializedIndex = {
306
+ version: 1,
307
+ documents: this.documents.map((d) => ({
308
+ path: d.path,
309
+ title: d.title,
310
+ snippet: d.content.slice(0, 200),
311
+ tokenCount: d.tokens.length,
312
+ termFreqs: [...d.termFreqs.entries()],
313
+ })),
314
+ idf: [...this.idf.entries()],
315
+ avgDl: this.avgDl,
316
+ };
317
+ return JSON.stringify(data);
318
+ }
319
+
320
+ /**
321
+ * Save index to disk.
322
+ */
323
+ async save(root: string): Promise<void> {
324
+ const { writeFile, mkdir } = await import("node:fs/promises");
325
+ const dir = join(root, VAULT_DIR, CACHE_DIR);
326
+ await mkdir(dir, { recursive: true });
327
+ await writeFile(join(dir, "search.idx"), this.serialize(), "utf-8");
328
+ }
329
+
330
+ /**
331
+ * Load index from disk.
332
+ */
333
+ async load(root: string): Promise<boolean> {
334
+ const path = join(root, VAULT_DIR, CACHE_DIR, "search.idx");
335
+ if (!existsSync(path)) return false;
336
+
337
+ try {
338
+ const raw = await readFile(path, "utf-8");
339
+ const data = JSON.parse(raw) as SerializedIndex;
340
+
341
+ if (data.version !== 1) return false;
342
+
343
+ this.documents = data.documents.map((d) => ({
344
+ path: d.path,
345
+ title: d.title,
346
+ content: d.snippet,
347
+ tokens: [], // Not needed for search — termFreqs is enough
348
+ tokenCount: d.tokenCount,
349
+ termFreqs: new Map(d.termFreqs),
350
+ }));
351
+ this.idf = new Map(data.idf);
352
+ this.avgDl = data.avgDl;
353
+
354
+ return true;
355
+ } catch {
356
+ return false;
357
+ }
358
+ }
359
+
360
+ get documentCount(): number {
361
+ return this.documents.length;
362
+ }
363
+ }
364
+
365
+ /**
366
+ * Extract a relevant snippet from content matching query terms.
367
+ */
368
+ function extractSnippet(content: string, queryTokens: string[], maxLength = 150): string {
369
+ const lower = content.toLowerCase();
370
+
371
+ // Find the first occurrence of any query token
372
+ let bestPos = 0;
373
+ for (const token of queryTokens) {
374
+ const pos = lower.indexOf(token);
375
+ if (pos !== -1) {
376
+ bestPos = pos;
377
+ break;
378
+ }
379
+ }
380
+
381
+ // Extract a window around the match
382
+ const start = Math.max(0, bestPos - 30);
383
+ const end = Math.min(content.length, start + maxLength);
384
+ let snippet = content.slice(start, end).replace(/\n/g, " ").trim();
385
+
386
+ if (start > 0) snippet = `...${snippet}`;
387
+ if (end < content.length) snippet = `${snippet}...`;
388
+
389
+ return snippet;
390
+ }
@@ -0,0 +1,163 @@
1
+ import { existsSync } from "node:fs";
2
+ import { readdir } from "node:fs/promises";
3
+ import { join } from "node:path";
4
+ import { SKILLS_DIR, VAULT_DIR } from "../constants.js";
5
+ import type { SkillDefinition } from "../types.js";
6
+ import { SkillDefinitionSchema } from "./schema.js";
7
+
8
+ /**
9
+ * Load all available skills (built-in + installed).
10
+ */
11
+ export async function loadSkills(root: string): Promise<SkillDefinition[]> {
12
+ const builtIns = getBuiltinSkills();
13
+ const installed = await loadInstalledSkills(root);
14
+ return [...builtIns, ...installed];
15
+ }
16
+
17
+ /**
18
+ * Find a skill by name.
19
+ */
20
+ export async function findSkill(root: string, name: string): Promise<SkillDefinition | null> {
21
+ const skills = await loadSkills(root);
22
+ return skills.find((s) => s.name === name) ?? null;
23
+ }
24
+
25
+ /**
26
+ * Load user-installed skills from .kb/skills/.
27
+ */
28
+ async function loadInstalledSkills(root: string): Promise<SkillDefinition[]> {
29
+ const skillsDir = join(root, VAULT_DIR, SKILLS_DIR);
30
+ if (!existsSync(skillsDir)) return [];
31
+
32
+ const files = await readdir(skillsDir);
33
+ const tsFiles = files.filter((f) => f.endsWith(".ts") || f.endsWith(".js"));
34
+
35
+ const skills: SkillDefinition[] = [];
36
+
37
+ for (const file of tsFiles) {
38
+ try {
39
+ const mod = await import(join(skillsDir, file));
40
+ const definition = mod.default ?? mod;
41
+
42
+ // Validate the skill definition
43
+ const parsed = SkillDefinitionSchema.safeParse(definition);
44
+ if (parsed.success) {
45
+ skills.push({
46
+ ...definition,
47
+ run: definition.run,
48
+ });
49
+ }
50
+ } catch {
51
+ // Skip malformed skills
52
+ }
53
+ }
54
+
55
+ return skills;
56
+ }
57
+
58
+ /**
59
+ * Built-in skills that ship with kib.
60
+ */
61
+ function getBuiltinSkills(): SkillDefinition[] {
62
+ return [
63
+ {
64
+ name: "summarize",
65
+ version: "1.0.0",
66
+ description: "Summarize a wiki article or raw source",
67
+ input: "selection",
68
+ output: "stdout",
69
+ llm: {
70
+ required: true,
71
+ model: "fast",
72
+ systemPrompt:
73
+ "Summarize the following content concisely. Highlight key points, main arguments, and conclusions. Output markdown.",
74
+ maxTokens: 1024,
75
+ temperature: 0,
76
+ },
77
+ async run(ctx) {
78
+ const articles = await ctx.vault.readWiki();
79
+ if (articles.length === 0) {
80
+ ctx.logger.warn("No articles to summarize.");
81
+ return {};
82
+ }
83
+ const content = articles.map((a) => `# ${a.title}\n\n${a.content}`).join("\n\n---\n\n");
84
+ const result = await ctx.llm.complete({
85
+ system: this.llm!.systemPrompt,
86
+ messages: [{ role: "user", content }],
87
+ maxTokens: this.llm!.maxTokens,
88
+ temperature: this.llm!.temperature,
89
+ });
90
+ return { content: result.content };
91
+ },
92
+ },
93
+ {
94
+ name: "flashcards",
95
+ version: "1.0.0",
96
+ description: "Generate flashcards from wiki articles",
97
+ input: "wiki",
98
+ output: "report",
99
+ llm: {
100
+ required: true,
101
+ model: "default",
102
+ systemPrompt: `Generate flashcards from the following knowledge base articles.
103
+ Output format:
104
+ Q: [question]
105
+ A: [answer]
106
+
107
+ Create 5-10 flashcards per article. Focus on key concepts, definitions, and relationships.`,
108
+ maxTokens: 4096,
109
+ temperature: 0,
110
+ },
111
+ async run(ctx) {
112
+ const articles = await ctx.vault.readWiki();
113
+ if (articles.length === 0) {
114
+ ctx.logger.warn("No articles to generate flashcards from.");
115
+ return {};
116
+ }
117
+ const content = articles
118
+ .slice(0, 5)
119
+ .map((a) => `# ${a.title}\n\n${a.content}`)
120
+ .join("\n\n---\n\n");
121
+ const result = await ctx.llm.complete({
122
+ system: this.llm!.systemPrompt,
123
+ messages: [{ role: "user", content }],
124
+ maxTokens: this.llm!.maxTokens,
125
+ temperature: this.llm!.temperature,
126
+ });
127
+ return { content: result.content };
128
+ },
129
+ },
130
+ {
131
+ name: "connections",
132
+ version: "1.0.0",
133
+ description: "Suggest new connections between existing articles",
134
+ input: "index",
135
+ output: "report",
136
+ llm: {
137
+ required: true,
138
+ model: "default",
139
+ systemPrompt: `Analyze the following wiki index and suggest connections between articles that aren't currently linked.
140
+ For each suggestion, explain why the connection is relevant.
141
+ Output as a markdown list.`,
142
+ maxTokens: 2048,
143
+ temperature: 0.3,
144
+ },
145
+ async run(ctx) {
146
+ const index = await ctx.vault.readIndex();
147
+ const graph = await ctx.vault.readGraph();
148
+ const result = await ctx.llm.complete({
149
+ system: this.llm!.systemPrompt,
150
+ messages: [
151
+ {
152
+ role: "user",
153
+ content: `CURRENT INDEX:\n${index}\n\nCURRENT GRAPH:\n${graph}`,
154
+ },
155
+ ],
156
+ maxTokens: this.llm!.maxTokens,
157
+ temperature: this.llm!.temperature,
158
+ });
159
+ return { content: result.content };
160
+ },
161
+ },
162
+ ];
163
+ }
@@ -0,0 +1,139 @@
1
+ import { readFile } from "node:fs/promises";
2
+ import { join } from "node:path";
3
+ import { parseFrontmatter } from "../compile/diff.js";
4
+ import { WIKI_DIR } from "../constants.js";
5
+ import { SearchIndex } from "../search/engine.js";
6
+ import type {
7
+ LLMProvider,
8
+ Manifest,
9
+ SkillContext,
10
+ SkillDefinition,
11
+ VaultConfig,
12
+ } from "../types.js";
13
+ import {
14
+ listRaw,
15
+ listWiki,
16
+ loadConfig,
17
+ loadManifest,
18
+ readIndex,
19
+ readWiki,
20
+ writeWiki,
21
+ } from "../vault.js";
22
+
23
+ export interface RunSkillOptions {
24
+ /** Additional CLI args */
25
+ args?: Record<string, unknown>;
26
+ /** LLM provider (required if skill.llm.required is true) */
27
+ provider?: LLMProvider;
28
+ }
29
+
30
+ export interface RunSkillResult {
31
+ content?: string;
32
+ }
33
+
34
+ /**
35
+ * Execute a skill against a vault.
36
+ */
37
+ export async function runSkill(
38
+ root: string,
39
+ skill: SkillDefinition,
40
+ options: RunSkillOptions = {},
41
+ ): Promise<RunSkillResult> {
42
+ const manifest = await loadManifest(root);
43
+ const config = await loadConfig(root);
44
+
45
+ if (skill.llm?.required && !options.provider) {
46
+ throw new Error(`Skill "${skill.name}" requires an LLM provider`);
47
+ }
48
+
49
+ const ctx = buildContext(root, manifest, config, options);
50
+ return skill.run(ctx);
51
+ }
52
+
53
+ function buildContext(
54
+ root: string,
55
+ manifest: Manifest,
56
+ config: VaultConfig,
57
+ options: RunSkillOptions,
58
+ ): SkillContext {
59
+ return {
60
+ vault: {
61
+ async readIndex() {
62
+ return readIndex(root);
63
+ },
64
+ async readGraph() {
65
+ try {
66
+ return await readFile(join(root, WIKI_DIR, "GRAPH.md"), "utf-8");
67
+ } catch {
68
+ return "";
69
+ }
70
+ },
71
+ async readWiki() {
72
+ const files = await listWiki(root);
73
+ const articles: { title: string; slug: string; content: string }[] = [];
74
+ for (const f of files) {
75
+ if (f.endsWith("INDEX.md") || f.endsWith("GRAPH.md")) continue;
76
+ const raw = await readFile(f, "utf-8");
77
+ const { frontmatter, body } = parseFrontmatter(raw);
78
+ articles.push({
79
+ title: (frontmatter.title as string) ?? "",
80
+ slug: (frontmatter.slug as string) ?? "",
81
+ content: body,
82
+ });
83
+ }
84
+ return articles;
85
+ },
86
+ async readRaw() {
87
+ const files = await listRaw(root);
88
+ const sources: { path: string; content: string }[] = [];
89
+ for (const f of files) {
90
+ const content = await readFile(f, "utf-8");
91
+ sources.push({ path: f, content });
92
+ }
93
+ return sources;
94
+ },
95
+ async readFile(path: string) {
96
+ return readFile(join(root, path), "utf-8");
97
+ },
98
+ async writeFile(path: string, content: string) {
99
+ await writeWiki(root, path, content);
100
+ },
101
+ async listFiles(glob: string) {
102
+ // Simple implementation — just list wiki files matching pattern
103
+ const allFiles = await listWiki(root);
104
+ if (!glob || glob === "*") return allFiles;
105
+ return allFiles.filter((f) => f.includes(glob.replace("*", "")));
106
+ },
107
+ manifest,
108
+ config,
109
+ },
110
+
111
+ llm: {
112
+ async complete(params) {
113
+ if (!options.provider) throw new Error("No LLM provider available");
114
+ return options.provider.complete(params);
115
+ },
116
+ async *stream(params) {
117
+ if (!options.provider) throw new Error("No LLM provider available");
118
+ yield* options.provider.stream(params);
119
+ },
120
+ },
121
+
122
+ search: {
123
+ async query(term, opts) {
124
+ const index = new SearchIndex();
125
+ const loaded = await index.load(root);
126
+ if (!loaded) await index.build(root, "wiki");
127
+ return index.search(term, opts);
128
+ },
129
+ },
130
+
131
+ logger: {
132
+ info: (msg) => console.log(` [${skill.name}] ${msg}`),
133
+ warn: (msg) => console.warn(` [${skill.name}] ⚠ ${msg}`),
134
+ error: (msg) => console.error(` [${skill.name}] ✗ ${msg}`),
135
+ },
136
+
137
+ args: options.args ?? {},
138
+ };
139
+ }
@@ -0,0 +1,28 @@
1
+ import { z } from "zod";
2
+
3
+ export const SkillInputSchema = z.enum(["wiki", "raw", "vault", "selection", "index", "none"]);
4
+
5
+ export const SkillOutputSchema = z.enum(["articles", "report", "mutations", "stdout", "none"]);
6
+
7
+ export const SkillDefinitionSchema = z.object({
8
+ name: z.string().min(1),
9
+ version: z.string().default("1.0.0"),
10
+ description: z.string().min(1),
11
+ author: z.string().optional(),
12
+
13
+ input: SkillInputSchema,
14
+ output: SkillOutputSchema,
15
+
16
+ llm: z
17
+ .object({
18
+ required: z.boolean().default(true),
19
+ model: z.enum(["default", "fast"]).default("default"),
20
+ systemPrompt: z.string(),
21
+ maxTokens: z.number().int().positive().optional(),
22
+ temperature: z.number().min(0).max(2).optional(),
23
+ })
24
+ .optional(),
25
+ });
26
+
27
+ export type SkillInput = z.infer<typeof SkillInputSchema>;
28
+ export type SkillOutput = z.infer<typeof SkillOutputSchema>;