@justestif/pk 0.1.13 → 0.1.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -52,16 +52,17 @@ Available harnesses: `claude`, `claude-desktop`, `omp`, `cursor`, `opencode`, `c
52
52
 
53
53
  ## How it works
54
54
 
55
- `pk mcp` runs a stdio MCP server that exposes four tools to any connected agent:
55
+ `pk mcp` runs a stdio MCP server that exposes five tools to any connected agent:
56
56
 
57
57
  | Tool | What it does |
58
58
  |---|---|
59
59
  | `pk_search` | Full-text search over the knowledge base (BM25, porter stemming) |
60
- | `pk_synthesize` | Summarise notes matching a query or the whole base |
61
- | `pk_new` | Create a new note (type, title, tags, body) |
60
+ | `pk_synthesize` | Ranked context dump by query, session start, or all notes |
61
+ | `pk_read` | Read the full content of a note by path |
62
+ | `pk_new` | Create a new typed note skeleton, returns path to fill in |
62
63
  | `pk_lint` | Validate all notes for schema and quality rules |
63
64
 
64
- The agent calls these tools directly — no hooks, no shell extensions, no prompt injection.
65
+ The agent calls these tools directly — no hooks, no shell extensions, no prompt injection. Agents should never read or write knowledge files directly.
65
66
 
66
67
  ## Commands
67
68
 
@@ -102,8 +103,8 @@ pk synthesize
102
103
 
103
104
  ## Knowledge structure
104
105
 
105
- Notes live in `~/.pk/<name>/` as plain markdown files — human-editable,
106
- git-diffable, readable without any tool.
106
+ Notes live in `~/.pk/<name>/` as plain markdown files — human-editable and git-diffable.
107
+ Agents access them exclusively through the MCP tools; humans can read and edit them directly.
107
108
 
108
109
  ```
109
110
  ~/.pk/
package/dist/index.js CHANGED
@@ -17713,7 +17713,7 @@ var {
17713
17713
  var package_default = {
17714
17714
  name: "@justestif/pk",
17715
17715
  type: "module",
17716
- version: "0.1.13",
17716
+ version: "0.1.14",
17717
17717
  description: "Project knowledge \u2014 structured intake, search, and recall",
17718
17718
  bin: {
17719
17719
  pk: "dist/index.js"
@@ -32791,60 +32791,47 @@ function registerLint(program2) {
32791
32791
  }
32792
32792
 
32793
32793
  // src/lib/config.ts
32794
- import {
32795
- existsSync as existsSync3,
32796
- mkdirSync as mkdirSync3,
32797
- readFileSync,
32798
- writeFileSync
32799
- } from "fs";
32794
+ import { mkdirSync as mkdirSync3 } from "fs";
32800
32795
  import path6 from "path";
32801
32796
  import os2 from "os";
32802
32797
  var DEFAULT = { auto_commit: true, embedding: "" };
32803
32798
  function configPath() {
32804
32799
  return path6.join(os2.homedir(), ".pk", "config.json");
32805
32800
  }
32806
- function loadConfig() {
32801
+ async function loadConfig() {
32807
32802
  const p = configPath();
32808
- if (!existsSync3(p)) {
32809
- return { ...DEFAULT };
32810
- }
32811
32803
  try {
32812
- const merged = { ...DEFAULT, ...JSON.parse(readFileSync(p, "utf8")) };
32804
+ const text = await Bun.file(p).text();
32805
+ const merged = { ...DEFAULT, ...JSON.parse(text) };
32813
32806
  return merged;
32814
32807
  } catch {
32815
32808
  return { ...DEFAULT };
32816
32809
  }
32817
32810
  }
32818
- function saveConfig(config2) {
32811
+ async function saveConfig(config2) {
32819
32812
  const p = configPath();
32820
32813
  mkdirSync3(path6.dirname(p), { recursive: true });
32821
- writeFileSync(p, JSON.stringify(config2, null, 2) + `
32814
+ await Bun.write(p, JSON.stringify(config2, null, 2) + `
32822
32815
  `);
32823
32816
  }
32824
32817
 
32825
32818
  // src/commands/config-cmd.ts
32826
32819
  function registerConfig(program2) {
32827
- program2.command("config").description("Show or update pk configuration (~/.pk/config.json)").option("--auto-commit <bool>", "Auto-commit knowledge operations (true/false)").option("--embedding <model>", "Embedding model (empty to disable)").action((opts) => {
32828
- const config2 = loadConfig();
32820
+ program2.command("config").description("Show or update pk configuration (~/.pk/config.json)").option("--auto-commit <bool>", "Auto-commit knowledge operations (true/false)").option("--embedding <model>", "Embedding model (empty to disable)").action(async (opts) => {
32821
+ const config2 = await loadConfig();
32829
32822
  if (opts.autoCommit !== undefined) {
32830
32823
  config2.auto_commit = opts.autoCommit === "true";
32831
32824
  }
32832
32825
  if (opts.embedding !== undefined) {
32833
32826
  config2.embedding = opts.embedding;
32834
32827
  }
32835
- saveConfig(config2);
32828
+ await saveConfig(config2);
32836
32829
  console.log(JSON.stringify(config2, null, 2));
32837
32830
  });
32838
32831
  }
32839
32832
 
32840
32833
  // src/commands/init.ts
32841
- import {
32842
- cpSync,
32843
- existsSync as existsSync4,
32844
- mkdirSync as mkdirSync4,
32845
- readFileSync as readFileSync2,
32846
- writeFileSync as writeFileSync2
32847
- } from "fs";
32834
+ import { cpSync, existsSync as existsSync3, mkdirSync as mkdirSync4 } from "fs";
32848
32835
  import os3 from "os";
32849
32836
  import path7 from "path";
32850
32837
 
@@ -34172,62 +34159,59 @@ function pkMcpEntry(knowledgeDir) {
34172
34159
  env: { PK_KNOWLEDGE_DIR: knowledgeDir }
34173
34160
  };
34174
34161
  }
34175
- function readJson(filePath) {
34176
- if (!existsSync4(filePath)) {
34177
- return {};
34178
- }
34162
+ async function readJson(filePath) {
34179
34163
  try {
34180
- return JSON.parse(readFileSync2(filePath, "utf8"));
34164
+ return JSON.parse(await Bun.file(filePath).text());
34181
34165
  } catch {
34182
34166
  return {};
34183
34167
  }
34184
34168
  }
34185
- function writeJson(filePath, data) {
34169
+ async function writeJson(filePath, data) {
34186
34170
  mkdirSync4(path7.dirname(filePath), { recursive: true });
34187
- writeFileSync2(filePath, JSON.stringify(data, null, 2) + `
34171
+ await Bun.write(filePath, JSON.stringify(data, null, 2) + `
34188
34172
  `);
34189
34173
  }
34190
- function writeClaudeConfig(projectRoot, _name, knowledgeDir) {
34174
+ async function writeClaudeConfig(projectRoot, _name, knowledgeDir) {
34191
34175
  const cfgPath = path7.join(projectRoot, ".mcp.json");
34192
- const cfg = readJson(cfgPath);
34176
+ const cfg = await readJson(cfgPath);
34193
34177
  const servers = cfg.mcpServers ?? {};
34194
34178
  servers.pk = pkMcpEntry(knowledgeDir);
34195
34179
  cfg.mcpServers = servers;
34196
- writeJson(cfgPath, cfg);
34180
+ await writeJson(cfgPath, cfg);
34197
34181
  }
34198
- function writeClaudeDesktopConfig(homeDir, name, knowledgeDir) {
34182
+ async function writeClaudeDesktopConfig(homeDir, name, knowledgeDir) {
34199
34183
  const cfgPath = path7.join(homeDir, "Library", "Application Support", "Claude", "claude_desktop_config.json");
34200
- const cfg = readJson(cfgPath);
34184
+ const cfg = await readJson(cfgPath);
34201
34185
  const servers = cfg.mcpServers ?? {};
34202
34186
  servers[`pk-${name}`] = pkMcpEntry(knowledgeDir);
34203
34187
  cfg.mcpServers = servers;
34204
- writeJson(cfgPath, cfg);
34188
+ await writeJson(cfgPath, cfg);
34205
34189
  }
34206
- function writeCursorConfig(projectRoot, _name, knowledgeDir) {
34190
+ async function writeCursorConfig(projectRoot, _name, knowledgeDir) {
34207
34191
  const cfgPath = path7.join(projectRoot, ".cursor", "mcp.json");
34208
- const cfg = readJson(cfgPath);
34192
+ const cfg = await readJson(cfgPath);
34209
34193
  const servers = cfg.mcpServers ?? {};
34210
34194
  servers.pk = pkMcpEntry(knowledgeDir);
34211
34195
  cfg.mcpServers = servers;
34212
- writeJson(cfgPath, cfg);
34196
+ await writeJson(cfgPath, cfg);
34213
34197
  }
34214
- function writeOmpConfig(projectRoot, _name, knowledgeDir) {
34198
+ async function writeOmpConfig(projectRoot, _name, knowledgeDir) {
34215
34199
  const cfgPath = path7.join(projectRoot, ".omp", "mcp.json");
34216
- const cfg = readJson(cfgPath);
34200
+ const cfg = await readJson(cfgPath);
34217
34201
  const servers = cfg.mcpServers ?? {};
34218
34202
  servers.pk = pkMcpEntry(knowledgeDir);
34219
34203
  cfg.mcpServers = servers;
34220
- writeJson(cfgPath, cfg);
34204
+ await writeJson(cfgPath, cfg);
34221
34205
  }
34222
- function writeOpenCodeConfig(projectRoot, _name, knowledgeDir) {
34206
+ async function writeOpenCodeConfig(projectRoot, _name, knowledgeDir) {
34223
34207
  const cfgPath = path7.join(projectRoot, "opencode.json");
34224
- const cfg = readJson(cfgPath);
34208
+ const cfg = await readJson(cfgPath);
34225
34209
  const mcp = cfg.mcp ?? {};
34226
34210
  mcp.pk = pkMcpEntry(knowledgeDir);
34227
34211
  cfg.mcp = mcp;
34228
- writeJson(cfgPath, cfg);
34212
+ await writeJson(cfgPath, cfg);
34229
34213
  }
34230
- function writeCodexConfig(projectRoot, _name, knowledgeDir) {
34214
+ async function writeCodexConfig(projectRoot, _name, knowledgeDir) {
34231
34215
  const cfgPath = path7.join(projectRoot, ".codex", "config.toml");
34232
34216
  const toml = [
34233
34217
  "[mcp_servers.pk]",
@@ -34239,18 +34223,17 @@ function writeCodexConfig(projectRoot, _name, knowledgeDir) {
34239
34223
  ""
34240
34224
  ].join(`
34241
34225
  `);
34242
- if (existsSync4(cfgPath)) {
34243
- const existing = readFileSync2(cfgPath, "utf8");
34226
+ mkdirSync4(path7.dirname(cfgPath), { recursive: true });
34227
+ if (existsSync3(cfgPath)) {
34228
+ const existing = await Bun.file(cfgPath).text();
34244
34229
  if (existing.includes("[mcp_servers.pk]")) {
34245
34230
  return;
34246
34231
  }
34247
- mkdirSync4(path7.dirname(cfgPath), { recursive: true });
34248
- writeFileSync2(cfgPath, existing.trimEnd() + `
34232
+ await Bun.write(cfgPath, existing.trimEnd() + `
34249
34233
 
34250
34234
  ` + toml);
34251
34235
  } else {
34252
- mkdirSync4(path7.dirname(cfgPath), { recursive: true });
34253
- writeFileSync2(cfgPath, toml);
34236
+ await Bun.write(cfgPath, toml);
34254
34237
  }
34255
34238
  }
34256
34239
  function skillTargetDir(harness, projectRoot) {
@@ -34280,61 +34263,59 @@ function installSkill(harness, projectRoot) {
34280
34263
  return "";
34281
34264
  }
34282
34265
  const src = skillSourceDir();
34283
- if (!existsSync4(src)) {
34266
+ if (!existsSync3(src)) {
34284
34267
  return "";
34285
34268
  }
34286
- if (existsSync4(target)) {
34269
+ if (existsSync3(target)) {
34287
34270
  return target;
34288
34271
  }
34289
34272
  cpSync(src, target, { recursive: true });
34290
34273
  return target;
34291
34274
  }
34292
- function ensureProject(name) {
34275
+ async function ensureProject(name) {
34293
34276
  const kDir = projectDir(name);
34294
- const alreadyExists = existsSync4(kDir);
34277
+ const alreadyExists = existsSync3(kDir);
34295
34278
  for (const dir of Object.values(TYPE_DIRS)) {
34296
34279
  mkdirSync4(path7.join(kDir, dir), { recursive: true });
34297
34280
  }
34298
34281
  const gi = path7.join(kDir, ".gitignore");
34299
- if (!existsSync4(gi)) {
34300
- writeFileSync2(gi, `.index.db
34282
+ if (!existsSync3(gi)) {
34283
+ await Bun.write(gi, `.index.db
34301
34284
  `);
34302
34285
  }
34303
34286
  return { created: !alreadyExists, knowledgeDir: kDir };
34304
34287
  }
34305
- function applyHarness(harness, ctx) {
34288
+ async function applyHarness(harness, ctx) {
34306
34289
  const { name, knowledgeDir, projectRoot, home } = ctx;
34307
34290
  switch (harness) {
34308
34291
  case "claude": {
34309
- writeClaudeConfig(projectRoot, name, knowledgeDir);
34292
+ await writeClaudeConfig(projectRoot, name, knowledgeDir);
34310
34293
  break;
34311
34294
  }
34312
34295
  case "claude-desktop": {
34313
- writeClaudeDesktopConfig(home, name, knowledgeDir);
34296
+ await writeClaudeDesktopConfig(home, name, knowledgeDir);
34314
34297
  break;
34315
34298
  }
34316
34299
  case "codex": {
34317
- writeCodexConfig(projectRoot, name, knowledgeDir);
34300
+ await writeCodexConfig(projectRoot, name, knowledgeDir);
34318
34301
  break;
34319
34302
  }
34320
34303
  case "cursor": {
34321
- writeCursorConfig(projectRoot, name, knowledgeDir);
34304
+ await writeCursorConfig(projectRoot, name, knowledgeDir);
34322
34305
  break;
34323
34306
  }
34324
34307
  case "omp": {
34325
- writeOmpConfig(projectRoot, name, knowledgeDir);
34308
+ await writeOmpConfig(projectRoot, name, knowledgeDir);
34326
34309
  break;
34327
34310
  }
34328
34311
  case "opencode": {
34329
- writeOpenCodeConfig(projectRoot, name, knowledgeDir);
34312
+ await writeOpenCodeConfig(projectRoot, name, knowledgeDir);
34330
34313
  break;
34331
34314
  }
34332
34315
  }
34333
34316
  }
34334
- function applyHarnesses(harnesses, ctx) {
34335
- for (const h2 of harnesses) {
34336
- applyHarness(h2, ctx);
34337
- }
34317
+ async function applyHarnesses(harnesses, ctx) {
34318
+ await Promise.all(harnesses.map(async (h2) => applyHarness(h2, ctx)));
34338
34319
  const seen = new Set;
34339
34320
  const installed = [];
34340
34321
  for (const h2 of harnesses) {
@@ -34361,14 +34342,14 @@ function registerInit(program2) {
34361
34342
  flagHarnesses = result;
34362
34343
  }
34363
34344
  if (nameArg && flagHarnesses) {
34364
- const { created: created2, knowledgeDir: knowledgeDir2 } = ensureProject(nameArg);
34345
+ const { created: created2, knowledgeDir: knowledgeDir2 } = await ensureProject(nameArg);
34365
34346
  const ctx2 = {
34366
34347
  home,
34367
34348
  knowledgeDir: knowledgeDir2,
34368
34349
  name: nameArg,
34369
34350
  projectRoot
34370
34351
  };
34371
- const skillPaths2 = applyHarnesses(flagHarnesses, ctx2);
34352
+ const skillPaths2 = await applyHarnesses(flagHarnesses, ctx2);
34372
34353
  console.log(buildOutro(created2, knowledgeDir2, flagHarnesses, skillPaths2).join(`
34373
34354
  `));
34374
34355
  return;
@@ -34424,14 +34405,14 @@ function registerInit(program2) {
34424
34405
  }
34425
34406
  harnesses = picked;
34426
34407
  }
34427
- const { created, knowledgeDir } = ensureProject(name);
34408
+ const { created, knowledgeDir } = await ensureProject(name);
34428
34409
  const ctx = {
34429
34410
  home,
34430
34411
  knowledgeDir,
34431
34412
  name,
34432
34413
  projectRoot
34433
34414
  };
34434
- const skillPaths = applyHarnesses(harnesses, ctx);
34415
+ const skillPaths = await applyHarnesses(harnesses, ctx);
34435
34416
  ye(buildOutro(created, knowledgeDir, harnesses, skillPaths).join(`
34436
34417
  `));
34437
34418
  });
@@ -42919,6 +42900,7 @@ function createPkMcpServer() {
42919
42900
  const dir = requireKnowledgeDir();
42920
42901
  try {
42921
42902
  const outPath = await createNote(dir, type, title, tags ?? "");
42903
+ await rebuild(dir);
42922
42904
  return { content: [{ type: "text", text: outPath }] };
42923
42905
  } catch (error51) {
42924
42906
  return {
@@ -42927,6 +42909,29 @@ function createPkMcpServer() {
42927
42909
  };
42928
42910
  }
42929
42911
  });
42912
+ server.registerTool("pk_read", {
42913
+ description: "Read the full content of a knowledge note by its path. Use paths returned by pk_search or pk_synthesize.",
42914
+ inputSchema: {
42915
+ path: exports_external.string().describe("Absolute path to the note file, as returned by pk_search or pk_synthesize")
42916
+ }
42917
+ }, async ({ path: notePath }) => {
42918
+ const dir = requireKnowledgeDir();
42919
+ if (!notePath.startsWith(dir)) {
42920
+ return {
42921
+ content: [{ type: "text", text: `Path must be inside the knowledge directory: ${dir}` }],
42922
+ isError: true
42923
+ };
42924
+ }
42925
+ try {
42926
+ const text = await Bun.file(notePath).text();
42927
+ return { content: [{ type: "text", text }] };
42928
+ } catch {
42929
+ return {
42930
+ content: [{ type: "text", text: `File not found: ${notePath}` }],
42931
+ isError: true
42932
+ };
42933
+ }
42934
+ });
42930
42935
  server.registerTool("pk_lint", {
42931
42936
  description: "Validate knowledge note structure and frontmatter. Returns lint issues grouped by severity.",
42932
42937
  inputSchema: {}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@justestif/pk",
3
3
  "type": "module",
4
- "version": "0.1.13",
4
+ "version": "0.1.14",
5
5
  "description": "Project knowledge — structured intake, search, and recall",
6
6
  "bin": {
7
7
  "pk": "dist/index.js"
package/skill/SKILL.md CHANGED
@@ -5,43 +5,89 @@ description: "Load when maintaining project knowledge, capturing decisions or qu
5
5
 
6
6
  # pk
7
7
 
8
- Structured project knowledge — intake, search, recall, and audit over `knowledge/`.
8
+ Structured project knowledge — intake, search, recall, and audit.
9
+
10
+ **Search, synthesis, creation, and validation go through MCP tools. Writing note body content uses your standard file Edit tool — but only on paths returned by `pk_new` or `pk_search`, never by navigating the filesystem yourself.**
9
11
 
10
12
  ## Tools
11
13
 
12
- | Task | Tool |
13
- |---|---|
14
- | Search notes | `pk_search` |
15
- | Context dump / session start | `pk_synthesize` |
16
- | Create a note | `pk_new` |
17
- | Validate structure | `pk_lint` |
14
+ ### `pk_synthesize` orient before any investigation
15
+
16
+ ```
17
+ pk_synthesize({ sessionStart: true }) # open questions + accepted decisions + active notes
18
+ pk_synthesize({ query: "auth flow" }) # ranked context for a topic
19
+ pk_synthesize({ query: "auth", type: "decision", limit: 5 })
20
+ ```
21
+
22
+ Returns formatted markdown with title, type, status, tags, and an excerpt per note. Use `sessionStart: true` at the start of every session.
23
+
24
+ ### `pk_search` — locate notes by content
25
+
26
+ ```
27
+ pk_search({ query: "database schema" })
28
+ pk_search({ query: "api", type: "question", status: "open" })
29
+ pk_search({ query: "deploy", tag: "infra", limit: 5 })
30
+ ```
31
+
32
+ Returns `[{ path, type, status, title, tags, snippet }]`. Always call before `pk_new` — duplicates erode trust faster than gaps do.
33
+
34
+ ### `pk_read` — full note body
35
+
36
+ ```
37
+ pk_read({ path: "/abs/path/returned/by/pk_search" })
38
+ ```
39
+
40
+ Returns complete file contents including frontmatter. Use paths from `pk_search` or `pk_synthesize` output.
41
+
42
+ ### `pk_new` — create a typed note skeleton
43
+
44
+ ```
45
+ pk_new({ type: "note", title: "Auth token expiry behaviour", tags: "auth,security" })
46
+ pk_new({ type: "decision", title: "Use JWT over sessions" })
47
+ pk_new({ type: "question", title: "Should we rate-limit the search endpoint?" })
48
+ pk_new({ type: "source", title: "Meeting notes 2024-06-01" })
49
+ ```
50
+
51
+ Returns the absolute path. Frontmatter (id, dates, status, tags as YAML array) is generated automatically from your inputs — you don't edit frontmatter after creation. After receiving the path: call `pk_read` to see the skeleton, then use your standard file Edit tool to fill in the required sections.
52
+
53
+ **Required sections by type:**
54
+ - `note` → `## Summary`, `## Details`, `## Evidence`, `## Related`
55
+ - `decision` → `## Decision`, `## Context`, `## Rationale`, `## Consequences`, `## Related`
56
+ - `question` → `## Question`, `## Why It Matters`, `## Current Understanding`, `## Resolution`
57
+ - `source` → `## Source`, `## Raw Material`, `## Extracted Items`
58
+
59
+ **`source` vs `note`:** `source` = raw/provenance-heavy input (meeting notes, transcripts, external docs, unprocessed data). `note` = stable synthesised fact or constraint you've derived. When synthesising across multiple sources into one insight: create a `note` and put source paths in `## Evidence`.
60
+
61
+ ### `pk_lint` — validate before committing
62
+
63
+ ```
64
+ pk_lint({})
65
+ ```
18
66
 
19
- ## Intake
67
+ **Errors block commits** (missing frontmatter, duplicate id, wrong folder, missing required sections, broken links). **Warnings are advisory** (empty tags, note too long, source marked processed with no extracted items) — fix when practical, not required to commit.
20
68
 
21
- **Search before creating** — always call `pk_search` first.
69
+ ### Status transitions
22
70
 
23
- - Substantial messy input `source`. Extract `note`, `decision`, `question` only when durable beyond this session.
24
- - Update existing when the match is obvious; otherwise create and link in body.
25
- - Call `pk_lint` before committing. Auto-commit coherent operations only when lint passes and no unrelated files are staged.
71
+ No MCP tool for status changes. Use your file Edit tool directly on the frontmatter, fill in the resolution section, then lint.
26
72
 
27
- ## Asking
73
+ **MANDATORY READ `references/knowledge-model.md`** when: creating a note type you haven't used before, unsure which folder a type belongs in, validating frontmatter fields, or unsure which status values are valid for a given type. (Read with your standard file Read tool — these are local skill files, not MCP-accessible.)
28
74
 
29
- 1. `pk_search` with the relevant query
30
- 2. Read top results directly
31
- 3. Answer with citations to note paths/IDs
32
- 4. If silent or ambiguous, offer to create a `question` note
75
+ **MANDATORY READ `references/git-workflow.md`** when: committing knowledge changes or unsure whether to auto-commit. (Same — standard file Read tool.)
33
76
 
34
77
  ## NEVER
35
78
 
36
- - **Skip `pk_search` before creating** — duplicates erode trust in the knowledge base
37
- - **Dump raw input into durable notes** preserve in `source`, extract selectively
38
- - **Silently merge related-but-different claims** create and link instead
39
- - **Auto-commit when lint fails or unrelated files are staged**
79
+ - **NEVER skip `pk_search` before `pk_new`**
80
+ **Why:** Duplicates silently fragment knowledge — two notes on the same topic never get reconciled, and future searches return noise.
81
+ **Instead:** Search first; update the existing note if found, or create and link if genuinely different.
40
82
 
41
- ## References
83
+ - **NEVER dump raw input into a `note` or `decision`**
84
+ **Why:** Durable note types are for stable, verified claims. Raw input contains noise, ambiguity, and provenance that decays poorly.
85
+ **Instead:** Create a `source` note, then extract `note`/`decision`/`question` entries from it selectively.
42
86
 
43
- Load only when the task requires it:
87
+ - **NEVER silently overwrite a conflicting claim**
88
+ **Why:** Silent overwrites destroy the rationale trail — you lose why the old claim existed.
89
+ **Instead:** Create a new note explaining the conflict, link both, and use `status: superseded` on the old one.
44
90
 
45
- - `references/knowledge-model.md` types, folders, frontmatter schema, required sections
46
- - `references/git-workflow.md` commit policy, safety stops
47
- - `references/source-principles.md` documentation governance
91
+ - **NEVER commit when `pk_lint` returns errors or unrelated files are staged**
92
+ **Why:** Lint errors mean required structure is broken; mixed commits make knowledge changes unauditable.
93
+ **Instead:** Fix errors, unstage unrelated files, then commit.
@@ -1,29 +0,0 @@
1
- ---
2
- id: decision-{{date}}-{{slug}}
3
- type: decision
4
- title: {{title}}
5
- created: {{date}}
6
- updated: {{date}}
7
- status: accepted
8
- tags: [{{tags}}]
9
- ---
10
-
11
- ## Decision
12
-
13
- What was decided.
14
-
15
- ## Context
16
-
17
- Why this decision came up.
18
-
19
- ## Rationale
20
-
21
- Why this option won.
22
-
23
- ## Consequences
24
-
25
- What this changes or constrains.
26
-
27
- ## Related
28
-
29
- Links to source, questions, notes, or superseded decisions.
@@ -1,25 +0,0 @@
1
- ---
2
- id: index-{{slug}}
3
- type: index
4
- title: {{title}}
5
- created: {{date}}
6
- updated: {{date}}
7
- status: active
8
- tags: [{{tags}}]
9
- ---
10
-
11
- ## Purpose
12
-
13
- What this index helps navigate.
14
-
15
- ## Key Links
16
-
17
- Curated links.
18
-
19
- ## Open Questions
20
-
21
- Relevant unresolved questions.
22
-
23
- ## Recent Changes
24
-
25
- Notable updates.
@@ -1,25 +0,0 @@
1
- ---
2
- id: note-{{date}}-{{slug}}
3
- type: note
4
- title: {{title}}
5
- created: {{date}}
6
- updated: {{date}}
7
- status: active
8
- tags: [{{tags}}]
9
- ---
10
-
11
- ## Summary
12
-
13
- One short paragraph.
14
-
15
- ## Details
16
-
17
- Durable project knowledge.
18
-
19
- ## Evidence
20
-
21
- Source links, files, quotes, or observations.
22
-
23
- ## Related
24
-
25
- Links to related notes, decisions, or questions.
@@ -1,25 +0,0 @@
1
- ---
2
- id: question-{{date}}-{{slug}}
3
- type: question
4
- title: {{title}}
5
- created: {{date}}
6
- updated: {{date}}
7
- status: open
8
- tags: [{{tags}}]
9
- ---
10
-
11
- ## Question
12
-
13
- The unresolved question.
14
-
15
- ## Why It Matters
16
-
17
- What decision or work this blocks or informs.
18
-
19
- ## Current Understanding
20
-
21
- Known facts and candidate answers.
22
-
23
- ## Resolution
24
-
25
- Answer once resolved.
@@ -1,21 +0,0 @@
1
- ---
2
- id: source-{{date}}-{{slug}}
3
- type: source
4
- title: {{title}}
5
- created: {{date}}
6
- updated: {{date}}
7
- status: unprocessed
8
- tags: [{{tags}}]
9
- ---
10
-
11
- ## Source
12
-
13
- Where this came from.
14
-
15
- ## Raw Material
16
-
17
- Original or lightly cleaned content.
18
-
19
- ## Extracted Items
20
-
21
- Links to notes, decisions, or questions created from this source.