@sfrangulov/shared-memory-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/github-memory-server.js +1360 -0
- package/lib/atomic-commit.js +126 -0
- package/lib/github-client.js +220 -0
- package/lib/root-parser.js +323 -0
- package/lib/slugify.js +77 -0
- package/lib/state-manager.js +153 -0
- package/package.json +33 -0
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Atomic commit via Git Trees API with SHA conflict retry.
|
|
3
|
+
*
|
|
4
|
+
* @module atomic-commit
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Error thrown when a ref update fails due to a SHA conflict (HTTP 422).
|
|
9
|
+
*/
|
|
10
|
+
export class ConflictError extends Error {
|
|
11
|
+
/**
|
|
12
|
+
* @param {string} message
|
|
13
|
+
*/
|
|
14
|
+
constructor(message) {
|
|
15
|
+
super(message);
|
|
16
|
+
this.name = "ConflictError";
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/** @param {number} ms */
|
|
21
|
+
const sleep = (ms) => new Promise((r) => setTimeout(r, ms));
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Performs a single atomic commit using the Git Trees API.
|
|
25
|
+
*
|
|
26
|
+
* Workflow:
|
|
27
|
+
* 1. Get HEAD SHA (or use provided parentSHA)
|
|
28
|
+
* 2. Get tree SHA of parent commit
|
|
29
|
+
* 3. Create blobs for each file
|
|
30
|
+
* 4. Create new tree with base_tree (preserves existing files)
|
|
31
|
+
* 5. Create commit pointing to new tree
|
|
32
|
+
* 6. Update ref to new commit (throws ConflictError on 422)
|
|
33
|
+
*
|
|
34
|
+
* @param {object} client - GitHub client (from createGitHubClient)
|
|
35
|
+
* @param {object} params
|
|
36
|
+
* @param {Array<{path: string, content: string}>} params.files - files to commit
|
|
37
|
+
* @param {string} params.message - commit message
|
|
38
|
+
* @param {string} [params.parentSHA] - optional parent commit SHA (fetches HEAD if omitted)
|
|
39
|
+
* @returns {Promise<{commitSHA: string, success: true}>}
|
|
40
|
+
* @throws {ConflictError} when ref update fails with 422
|
|
41
|
+
*/
|
|
42
|
+
export async function atomicCommit(client, { files, message, parentSHA }) {
|
|
43
|
+
// Step 1: Get parent SHA
|
|
44
|
+
const resolvedParentSHA = parentSHA ?? (await client.getHeadSHA());
|
|
45
|
+
|
|
46
|
+
// Step 2: Get tree SHA of parent commit
|
|
47
|
+
const treeSHA = await client.getTreeSHA(resolvedParentSHA);
|
|
48
|
+
|
|
49
|
+
// Step 3: Create blobs for each file
|
|
50
|
+
const filesWithBlobs = await Promise.all(
|
|
51
|
+
files.map(async (file) => {
|
|
52
|
+
const blobSHA = await client.createBlob(file.content);
|
|
53
|
+
return { path: file.path, blobSHA };
|
|
54
|
+
})
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
// Step 4: Create new tree with base_tree
|
|
58
|
+
const newTreeSHA = await client.createTree(treeSHA, filesWithBlobs);
|
|
59
|
+
|
|
60
|
+
// Step 5: Create commit
|
|
61
|
+
const commitSHA = await client.createCommit(
|
|
62
|
+
newTreeSHA,
|
|
63
|
+
resolvedParentSHA,
|
|
64
|
+
message
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
// Step 6: Update ref — throw ConflictError on 422
|
|
68
|
+
try {
|
|
69
|
+
await client.updateRef(commitSHA);
|
|
70
|
+
} catch (err) {
|
|
71
|
+
if (err.status === 422) {
|
|
72
|
+
throw new ConflictError(
|
|
73
|
+
`Ref update conflict: ${err.message || "SHA mismatch"}`
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
throw err;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return { commitSHA, success: true };
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Performs an atomic commit with automatic retry on SHA conflicts.
|
|
84
|
+
*
|
|
85
|
+
* On ConflictError: retries with fresh HEAD SHA.
|
|
86
|
+
* Backoff: 1s, 3s, 9s (exponential * 3).
|
|
87
|
+
* After all retries exhausted: returns { success: false, error: 'conflict' }.
|
|
88
|
+
* On non-ConflictError: rethrows immediately.
|
|
89
|
+
*
|
|
90
|
+
* @param {object} client - GitHub client (from createGitHubClient)
|
|
91
|
+
* @param {object} params
|
|
92
|
+
* @param {Array<{path: string, content: string}>} params.files - files to commit
|
|
93
|
+
* @param {string} params.message - commit message
|
|
94
|
+
* @param {number} [params.maxRetries=3] - maximum number of retries
|
|
95
|
+
* @returns {Promise<{commitSHA: string, success: true} | {success: false, error: 'conflict'}>}
|
|
96
|
+
*/
|
|
97
|
+
export async function atomicCommitWithRetry(
|
|
98
|
+
client,
|
|
99
|
+
{ files, message, maxRetries = 3 }
|
|
100
|
+
) {
|
|
101
|
+
const backoffs = [1000, 3000, 9000];
|
|
102
|
+
|
|
103
|
+
// First attempt
|
|
104
|
+
try {
|
|
105
|
+
return await atomicCommit(client, { files, message });
|
|
106
|
+
} catch (err) {
|
|
107
|
+
if (!(err instanceof ConflictError)) throw err;
|
|
108
|
+
// Fall through to retry loop
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Retry loop
|
|
112
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
113
|
+
const delay = backoffs[attempt] ?? backoffs[backoffs.length - 1];
|
|
114
|
+
await sleep(delay);
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
// No parentSHA — atomicCommit will fetch fresh HEAD
|
|
118
|
+
return await atomicCommit(client, { files, message });
|
|
119
|
+
} catch (err) {
|
|
120
|
+
if (!(err instanceof ConflictError)) throw err;
|
|
121
|
+
// Continue to next retry
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
return { success: false, error: "conflict" };
|
|
126
|
+
}
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
import { Octokit } from "@octokit/rest";
|
|
2
|
+
import { retry } from "@octokit/plugin-retry";
|
|
3
|
+
import { throttling } from "@octokit/plugin-throttling";
|
|
4
|
+
import pLimit from "p-limit";
|
|
5
|
+
|
|
6
|
+
const limit = pLimit(5);
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Creates an Octokit instance with retry and throttling plugins.
|
|
10
|
+
* @param {string} token - GitHub personal access token
|
|
11
|
+
* @returns {Octokit} configured Octokit instance
|
|
12
|
+
*/
|
|
13
|
+
export function createOctokit(token) {
|
|
14
|
+
const MyOctokit = Octokit.plugin(retry, throttling);
|
|
15
|
+
|
|
16
|
+
return new MyOctokit({
|
|
17
|
+
auth: token,
|
|
18
|
+
throttle: {
|
|
19
|
+
onRateLimit: (retryAfter, options, octokit, retryCount) => {
|
|
20
|
+
octokit.log.warn(
|
|
21
|
+
`Rate limit hit for ${options.method} ${options.url}`
|
|
22
|
+
);
|
|
23
|
+
if (retryCount < 1) return true;
|
|
24
|
+
},
|
|
25
|
+
onSecondaryRateLimit: (retryAfter, options, octokit) => {
|
|
26
|
+
octokit.log.warn(
|
|
27
|
+
`Secondary rate limit for ${options.method} ${options.url}`
|
|
28
|
+
);
|
|
29
|
+
},
|
|
30
|
+
},
|
|
31
|
+
retry: { doNotRetry: ["429"] },
|
|
32
|
+
request: { timeout: 10000 },
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Creates a GitHub client object with convenience methods.
|
|
38
|
+
* @param {object} params
|
|
39
|
+
* @param {Octokit} params.octokit - Octokit instance (or mock)
|
|
40
|
+
* @param {string} params.repo - "owner/repo" string
|
|
41
|
+
* @returns {object} client with owner, repo, and API methods
|
|
42
|
+
*/
|
|
43
|
+
export function createGitHubClient({ octokit, repo }) {
|
|
44
|
+
const parts = repo.split("/");
|
|
45
|
+
if (parts.length !== 2 || !parts[0] || !parts[1]) {
|
|
46
|
+
throw new Error(
|
|
47
|
+
`Invalid repo format: "${repo}". Expected "owner/repo".`
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
const [owner, repoName] = parts;
|
|
51
|
+
|
|
52
|
+
return {
|
|
53
|
+
owner,
|
|
54
|
+
repo: repoName,
|
|
55
|
+
|
|
56
|
+
async getUserInfo() {
|
|
57
|
+
return limit(async () => {
|
|
58
|
+
const { data } = await octokit.rest.users.getAuthenticated();
|
|
59
|
+
return {
|
|
60
|
+
name: data.name ?? data.login,
|
|
61
|
+
login: data.login,
|
|
62
|
+
};
|
|
63
|
+
});
|
|
64
|
+
},
|
|
65
|
+
|
|
66
|
+
async getFileContent(path) {
|
|
67
|
+
return limit(async () => {
|
|
68
|
+
try {
|
|
69
|
+
const { data } = await octokit.rest.repos.getContent({
|
|
70
|
+
owner,
|
|
71
|
+
repo: repoName,
|
|
72
|
+
path,
|
|
73
|
+
});
|
|
74
|
+
const content = Buffer.from(data.content, "base64").toString("utf-8");
|
|
75
|
+
return { content, sha: data.sha };
|
|
76
|
+
} catch (err) {
|
|
77
|
+
if (err.status === 404) return null;
|
|
78
|
+
throw err;
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
},
|
|
82
|
+
|
|
83
|
+
async getDirectoryListing(path) {
|
|
84
|
+
return limit(async () => {
|
|
85
|
+
try {
|
|
86
|
+
const { data } = await octokit.rest.repos.getContent({
|
|
87
|
+
owner,
|
|
88
|
+
repo: repoName,
|
|
89
|
+
path,
|
|
90
|
+
});
|
|
91
|
+
return Array.isArray(data)
|
|
92
|
+
? data.filter((item) => item.type === "file").map((item) => item.name)
|
|
93
|
+
: [];
|
|
94
|
+
} catch (err) {
|
|
95
|
+
if (err.status === 404) return [];
|
|
96
|
+
throw err;
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
},
|
|
100
|
+
|
|
101
|
+
async searchCode(query) {
|
|
102
|
+
return limit(async () => {
|
|
103
|
+
const { data } = await octokit.rest.search.code({
|
|
104
|
+
q: `${query} repo:${owner}/${repoName} extension:md`,
|
|
105
|
+
});
|
|
106
|
+
return data.items;
|
|
107
|
+
});
|
|
108
|
+
},
|
|
109
|
+
|
|
110
|
+
async getHeadSHA() {
|
|
111
|
+
return limit(async () => {
|
|
112
|
+
const { data } = await octokit.rest.git.getRef({
|
|
113
|
+
owner,
|
|
114
|
+
repo: repoName,
|
|
115
|
+
ref: "heads/main",
|
|
116
|
+
});
|
|
117
|
+
return data.object.sha;
|
|
118
|
+
});
|
|
119
|
+
},
|
|
120
|
+
|
|
121
|
+
async getTreeSHA(commitSHA) {
|
|
122
|
+
return limit(async () => {
|
|
123
|
+
const { data } = await octokit.rest.git.getCommit({
|
|
124
|
+
owner,
|
|
125
|
+
repo: repoName,
|
|
126
|
+
commit_sha: commitSHA,
|
|
127
|
+
});
|
|
128
|
+
return data.tree.sha;
|
|
129
|
+
});
|
|
130
|
+
},
|
|
131
|
+
|
|
132
|
+
async createBlob(content) {
|
|
133
|
+
return limit(async () => {
|
|
134
|
+
const { data } = await octokit.rest.git.createBlob({
|
|
135
|
+
owner,
|
|
136
|
+
repo: repoName,
|
|
137
|
+
content,
|
|
138
|
+
encoding: "utf-8",
|
|
139
|
+
});
|
|
140
|
+
return data.sha;
|
|
141
|
+
});
|
|
142
|
+
},
|
|
143
|
+
|
|
144
|
+
async createTree(baseTreeSHA, files) {
|
|
145
|
+
return limit(async () => {
|
|
146
|
+
const tree = files.map(({ path, blobSHA }) => ({
|
|
147
|
+
path,
|
|
148
|
+
mode: "100644",
|
|
149
|
+
type: "blob",
|
|
150
|
+
sha: blobSHA,
|
|
151
|
+
}));
|
|
152
|
+
const { data } = await octokit.rest.git.createTree({
|
|
153
|
+
owner,
|
|
154
|
+
repo: repoName,
|
|
155
|
+
base_tree: baseTreeSHA,
|
|
156
|
+
tree,
|
|
157
|
+
});
|
|
158
|
+
return data.sha;
|
|
159
|
+
});
|
|
160
|
+
},
|
|
161
|
+
|
|
162
|
+
async createCommit(treeSHA, parentSHA, message) {
|
|
163
|
+
return limit(async () => {
|
|
164
|
+
const { data } = await octokit.rest.git.createCommit({
|
|
165
|
+
owner,
|
|
166
|
+
repo: repoName,
|
|
167
|
+
message,
|
|
168
|
+
tree: treeSHA,
|
|
169
|
+
parents: [parentSHA],
|
|
170
|
+
});
|
|
171
|
+
return data.sha;
|
|
172
|
+
});
|
|
173
|
+
},
|
|
174
|
+
|
|
175
|
+
async updateRef(commitSHA) {
|
|
176
|
+
return limit(async () => {
|
|
177
|
+
await octokit.rest.git.updateRef({
|
|
178
|
+
owner,
|
|
179
|
+
repo: repoName,
|
|
180
|
+
ref: "heads/main",
|
|
181
|
+
sha: commitSHA,
|
|
182
|
+
force: false,
|
|
183
|
+
});
|
|
184
|
+
});
|
|
185
|
+
},
|
|
186
|
+
|
|
187
|
+
async getLastCommitForFile(path) {
|
|
188
|
+
return limit(async () => {
|
|
189
|
+
try {
|
|
190
|
+
const { data } = await octokit.rest.repos.listCommits({
|
|
191
|
+
owner,
|
|
192
|
+
repo: repoName,
|
|
193
|
+
path,
|
|
194
|
+
per_page: 1,
|
|
195
|
+
});
|
|
196
|
+
if (data.length === 0) return null;
|
|
197
|
+
const commit = data[0].commit;
|
|
198
|
+
return {
|
|
199
|
+
author: commit.author.name,
|
|
200
|
+
date: commit.author.date,
|
|
201
|
+
};
|
|
202
|
+
} catch (err) {
|
|
203
|
+
if (err.status === 404) return null;
|
|
204
|
+
throw err;
|
|
205
|
+
}
|
|
206
|
+
});
|
|
207
|
+
},
|
|
208
|
+
|
|
209
|
+
async getRootDirectoryListing() {
|
|
210
|
+
return limit(async () => {
|
|
211
|
+
const { data } = await octokit.rest.repos.getContent({
|
|
212
|
+
owner,
|
|
213
|
+
repo: repoName,
|
|
214
|
+
path: "",
|
|
215
|
+
});
|
|
216
|
+
return Array.isArray(data) ? data : [data];
|
|
217
|
+
});
|
|
218
|
+
},
|
|
219
|
+
};
|
|
220
|
+
}
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Markdown table parser for root.md files.
|
|
3
|
+
*
|
|
4
|
+
* Handles parsing, adding, and updating entries in the table of contents
|
|
5
|
+
* that lives inside each project's root.md file.
|
|
6
|
+
*
|
|
7
|
+
* @module root-parser
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
/** Regex that matches a separator line like |---|---|---| */
|
|
11
|
+
const SEPARATOR_RE = /^\|?\s*[-:]+\s*(\|\s*[-:]+\s*)+\|?\s*$/;
|
|
12
|
+
|
|
13
|
+
/** Regex that matches a markdown link [name](file.md) */
|
|
14
|
+
const LINK_RE = /^\[([^\]]+)\]\(([^)]+)\)$/;
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Splits a markdown table row into cell values, respecting escaped `\|`.
|
|
18
|
+
*
|
|
19
|
+
* Algorithm (from spec):
|
|
20
|
+
* 1. Remove leading/trailing `|` from the line
|
|
21
|
+
* 2. Walk character by character:
|
|
22
|
+
* - If char is `\` and next char is `|` -> append `|` to current cell, skip next
|
|
23
|
+
* - If char is `|` -> push current cell (trimmed) to result, start new cell
|
|
24
|
+
* - Otherwise -> append char to current cell
|
|
25
|
+
* 3. Push final cell (trimmed) to result
|
|
26
|
+
*
|
|
27
|
+
* @param {string} line - a single markdown table row
|
|
28
|
+
* @returns {string[]} array of cell values (trimmed, unescaped)
|
|
29
|
+
*/
|
|
30
|
+
export function splitTableRow(line) {
|
|
31
|
+
let s = line;
|
|
32
|
+
|
|
33
|
+
// Remove leading pipe (with optional whitespace)
|
|
34
|
+
if (s.startsWith("|")) {
|
|
35
|
+
s = s.slice(1);
|
|
36
|
+
}
|
|
37
|
+
// Remove trailing pipe (with optional whitespace)
|
|
38
|
+
const trimmedEnd = s.trimEnd();
|
|
39
|
+
if (trimmedEnd.endsWith("|")) {
|
|
40
|
+
// Make sure it's not an escaped pipe
|
|
41
|
+
if (!trimmedEnd.endsWith("\\|")) {
|
|
42
|
+
s = trimmedEnd.slice(0, -1);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const cells = [];
|
|
47
|
+
let current = "";
|
|
48
|
+
|
|
49
|
+
for (let i = 0; i < s.length; i++) {
|
|
50
|
+
const ch = s[i];
|
|
51
|
+
if (ch === "\\" && i + 1 < s.length && s[i + 1] === "|") {
|
|
52
|
+
current += "|";
|
|
53
|
+
i++; // skip next char
|
|
54
|
+
} else if (ch === "|") {
|
|
55
|
+
cells.push(current.trim());
|
|
56
|
+
current = "";
|
|
57
|
+
} else {
|
|
58
|
+
current += ch;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Push the final cell
|
|
63
|
+
cells.push(current.trim());
|
|
64
|
+
|
|
65
|
+
return cells;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Escapes pipe characters in text for safe inclusion in a markdown table cell.
|
|
70
|
+
* Replaces `|` with `\|`.
|
|
71
|
+
*
|
|
72
|
+
* @param {string} text - raw text
|
|
73
|
+
* @returns {string} escaped text
|
|
74
|
+
*/
|
|
75
|
+
export function escapeTableCell(text) {
|
|
76
|
+
return text.replace(/\|/g, "\\|");
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Reverses pipe escaping. Replaces `\|` with `|`.
|
|
81
|
+
*
|
|
82
|
+
* @param {string} text - escaped text
|
|
83
|
+
* @returns {string} unescaped text
|
|
84
|
+
*/
|
|
85
|
+
export function unescapeTableCell(text) {
|
|
86
|
+
return text.replace(/\\\|/g, "|");
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Finds the table header row in an array of lines and returns the column
|
|
91
|
+
* index mapping. The header columns can appear in any order.
|
|
92
|
+
*
|
|
93
|
+
* @param {string[]} lines - all lines of the markdown document
|
|
94
|
+
* @returns {{ headerIndex: number, columnMap: Record<string, number> } | null}
|
|
95
|
+
*/
|
|
96
|
+
function findTableHeader(lines) {
|
|
97
|
+
for (let i = 0; i < lines.length; i++) {
|
|
98
|
+
const line = lines[i].trim();
|
|
99
|
+
// A header row must contain at least a pipe and the word "Entry"
|
|
100
|
+
if (!line.includes("|")) continue;
|
|
101
|
+
|
|
102
|
+
const cells = splitTableRow(line);
|
|
103
|
+
const lower = cells.map((c) => c.toLowerCase().trim());
|
|
104
|
+
|
|
105
|
+
const entryIdx = lower.indexOf("entry");
|
|
106
|
+
if (entryIdx === -1) continue;
|
|
107
|
+
|
|
108
|
+
// Check that a separator line follows
|
|
109
|
+
if (i + 1 < lines.length && SEPARATOR_RE.test(lines[i + 1].trim())) {
|
|
110
|
+
const columnMap = {};
|
|
111
|
+
for (let j = 0; j < lower.length; j++) {
|
|
112
|
+
if (lower[j] === "entry") columnMap.entry = j;
|
|
113
|
+
else if (lower[j] === "description") columnMap.description = j;
|
|
114
|
+
else if (lower[j] === "tags") columnMap.tags = j;
|
|
115
|
+
}
|
|
116
|
+
return { headerIndex: i, columnMap };
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
return null;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Parses a root.md markdown string into a structured object.
|
|
124
|
+
*
|
|
125
|
+
* @param {string} markdown - full content of root.md
|
|
126
|
+
* @returns {{ description: string, entries: Array<{ name: string, file: string, description: string, tags: string[] }>, corrupted?: boolean }}
|
|
127
|
+
*/
|
|
128
|
+
export function parseRootMd(markdown) {
|
|
129
|
+
const lines = markdown.split("\n");
|
|
130
|
+
const header = findTableHeader(lines);
|
|
131
|
+
|
|
132
|
+
if (!header) {
|
|
133
|
+
return { description: "", entries: [], corrupted: true };
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const { headerIndex, columnMap } = header;
|
|
137
|
+
|
|
138
|
+
// Description = everything before the table header line
|
|
139
|
+
const descriptionLines = lines.slice(0, headerIndex);
|
|
140
|
+
// Trim trailing empty lines from description
|
|
141
|
+
while (
|
|
142
|
+
descriptionLines.length > 0 &&
|
|
143
|
+
descriptionLines[descriptionLines.length - 1].trim() === ""
|
|
144
|
+
) {
|
|
145
|
+
descriptionLines.pop();
|
|
146
|
+
}
|
|
147
|
+
const description = descriptionLines.join("\n");
|
|
148
|
+
|
|
149
|
+
const entries = [];
|
|
150
|
+
|
|
151
|
+
// Start parsing from headerIndex + 2 (skip header and separator)
|
|
152
|
+
for (let i = headerIndex + 2; i < lines.length; i++) {
|
|
153
|
+
const line = lines[i].trim();
|
|
154
|
+
if (!line || !line.includes("|")) continue;
|
|
155
|
+
if (SEPARATOR_RE.test(line)) continue;
|
|
156
|
+
|
|
157
|
+
const cells = splitTableRow(lines[i]);
|
|
158
|
+
|
|
159
|
+
const entryCell =
|
|
160
|
+
columnMap.entry !== undefined ? cells[columnMap.entry] ?? "" : "";
|
|
161
|
+
const descCell =
|
|
162
|
+
columnMap.description !== undefined
|
|
163
|
+
? cells[columnMap.description] ?? ""
|
|
164
|
+
: "";
|
|
165
|
+
const tagsCell =
|
|
166
|
+
columnMap.tags !== undefined ? cells[columnMap.tags] ?? "" : "";
|
|
167
|
+
|
|
168
|
+
// Parse entry cell: could be [name](file.md) or plain text
|
|
169
|
+
const linkMatch = entryCell.match(LINK_RE);
|
|
170
|
+
let name = "";
|
|
171
|
+
let file = "";
|
|
172
|
+
if (linkMatch) {
|
|
173
|
+
name = linkMatch[1];
|
|
174
|
+
file = linkMatch[2];
|
|
175
|
+
} else {
|
|
176
|
+
name = entryCell;
|
|
177
|
+
file = "";
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Parse tags: split by comma, trim, filter empty
|
|
181
|
+
const tags = tagsCell
|
|
182
|
+
.split(",")
|
|
183
|
+
.map((t) => t.trim())
|
|
184
|
+
.filter((t) => t.length > 0);
|
|
185
|
+
|
|
186
|
+
entries.push({ name, file, description: descCell, tags });
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
return { description, entries };
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Adds an entry row to a root.md markdown table. Idempotent: if a row with
|
|
194
|
+
* the same filename already exists, the markdown is returned unchanged.
|
|
195
|
+
*
|
|
196
|
+
* @param {string} markdown - full content of root.md
|
|
197
|
+
* @param {{ file: string, name: string, description: string, tags: string[] }} entry
|
|
198
|
+
* @returns {{ updated_markdown: string, was_added: boolean }}
|
|
199
|
+
*/
|
|
200
|
+
export function addEntryToRoot(markdown, entry) {
|
|
201
|
+
const lines = markdown.split("\n");
|
|
202
|
+
const header = findTableHeader(lines);
|
|
203
|
+
|
|
204
|
+
if (!header) {
|
|
205
|
+
// If there's no table, we can't add to it
|
|
206
|
+
return { updated_markdown: markdown, was_added: false };
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const { headerIndex, columnMap } = header;
|
|
210
|
+
|
|
211
|
+
// Check idempotency: does a row with this filename already exist?
|
|
212
|
+
for (let i = headerIndex + 2; i < lines.length; i++) {
|
|
213
|
+
const line = lines[i].trim();
|
|
214
|
+
if (!line || !line.includes("|")) continue;
|
|
215
|
+
if (SEPARATOR_RE.test(line)) continue;
|
|
216
|
+
|
|
217
|
+
const cells = splitTableRow(lines[i]);
|
|
218
|
+
const entryCell =
|
|
219
|
+
columnMap.entry !== undefined ? cells[columnMap.entry] ?? "" : "";
|
|
220
|
+
|
|
221
|
+
// Check if filename matches (parse link or compare plain text)
|
|
222
|
+
const linkMatch = entryCell.match(LINK_RE);
|
|
223
|
+
const existingFile = linkMatch ? linkMatch[2] : "";
|
|
224
|
+
|
|
225
|
+
if (existingFile === entry.file) {
|
|
226
|
+
return { updated_markdown: markdown, was_added: false };
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Build the new row in the correct column order
|
|
231
|
+
const entryValue = `[${escapeTableCell(entry.name)}](${entry.file})`;
|
|
232
|
+
const descValue = escapeTableCell(entry.description);
|
|
233
|
+
const tagsValue = escapeTableCell(entry.tags.join(", "));
|
|
234
|
+
|
|
235
|
+
// Determine the number of columns from the header
|
|
236
|
+
const headerCells = splitTableRow(lines[headerIndex]);
|
|
237
|
+
const newRowCells = new Array(headerCells.length).fill("");
|
|
238
|
+
|
|
239
|
+
if (columnMap.entry !== undefined) newRowCells[columnMap.entry] = entryValue;
|
|
240
|
+
if (columnMap.description !== undefined)
|
|
241
|
+
newRowCells[columnMap.description] = descValue;
|
|
242
|
+
if (columnMap.tags !== undefined) newRowCells[columnMap.tags] = tagsValue;
|
|
243
|
+
|
|
244
|
+
const newRow = "| " + newRowCells.join(" | ") + " |";
|
|
245
|
+
|
|
246
|
+
// Find the last table row to append after it
|
|
247
|
+
let lastTableRowIndex = headerIndex + 1; // separator line
|
|
248
|
+
for (let i = headerIndex + 2; i < lines.length; i++) {
|
|
249
|
+
const line = lines[i].trim();
|
|
250
|
+
if (!line.includes("|")) break;
|
|
251
|
+
lastTableRowIndex = i;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// Insert the new row after the last table row
|
|
255
|
+
const updatedLines = [
|
|
256
|
+
...lines.slice(0, lastTableRowIndex + 1),
|
|
257
|
+
newRow,
|
|
258
|
+
...lines.slice(lastTableRowIndex + 1),
|
|
259
|
+
];
|
|
260
|
+
|
|
261
|
+
return { updated_markdown: updatedLines.join("\n"), was_added: true };
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
/**
|
|
265
|
+
* Updates an existing entry row in a root.md markdown table.
|
|
266
|
+
* Only updates the fields provided in `changes`. Returns the original
|
|
267
|
+
* markdown if the filename is not found.
|
|
268
|
+
*
|
|
269
|
+
* @param {string} markdown - full content of root.md
|
|
270
|
+
* @param {string} filename - the filename to match (e.g. "overview.md")
|
|
271
|
+
* @param {{ description?: string, tags?: string[] }} changes
|
|
272
|
+
* @returns {string} updated markdown
|
|
273
|
+
*/
|
|
274
|
+
export function updateEntryInRoot(markdown, filename, changes) {
|
|
275
|
+
const lines = markdown.split("\n");
|
|
276
|
+
const header = findTableHeader(lines);
|
|
277
|
+
|
|
278
|
+
if (!header) {
|
|
279
|
+
return markdown;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
const { headerIndex, columnMap } = header;
|
|
283
|
+
|
|
284
|
+
for (let i = headerIndex + 2; i < lines.length; i++) {
|
|
285
|
+
const line = lines[i].trim();
|
|
286
|
+
if (!line || !line.includes("|")) continue;
|
|
287
|
+
if (SEPARATOR_RE.test(line)) continue;
|
|
288
|
+
|
|
289
|
+
const cells = splitTableRow(lines[i]);
|
|
290
|
+
const entryCell =
|
|
291
|
+
columnMap.entry !== undefined ? cells[columnMap.entry] ?? "" : "";
|
|
292
|
+
|
|
293
|
+
const linkMatch = entryCell.match(LINK_RE);
|
|
294
|
+
const existingFile = linkMatch ? linkMatch[2] : "";
|
|
295
|
+
|
|
296
|
+
if (existingFile !== filename) continue;
|
|
297
|
+
|
|
298
|
+
// Found the row — rebuild it with changes applied
|
|
299
|
+
const headerCells = splitTableRow(lines[headerIndex]);
|
|
300
|
+
const newCells = new Array(headerCells.length).fill("");
|
|
301
|
+
|
|
302
|
+
// Populate all cells from the current row, re-escaping preserved values
|
|
303
|
+
for (let j = 0; j < headerCells.length; j++) {
|
|
304
|
+
newCells[j] = escapeTableCell(cells[j] ?? "");
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
// Apply changes (these get escaped as well)
|
|
308
|
+
if (changes.description !== undefined && columnMap.description !== undefined) {
|
|
309
|
+
newCells[columnMap.description] = escapeTableCell(changes.description);
|
|
310
|
+
}
|
|
311
|
+
if (changes.tags !== undefined && columnMap.tags !== undefined) {
|
|
312
|
+
newCells[columnMap.tags] = escapeTableCell(changes.tags.join(", "));
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const updatedRow = "| " + newCells.join(" | ") + " |";
|
|
316
|
+
lines[i] = updatedRow;
|
|
317
|
+
|
|
318
|
+
return lines.join("\n");
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
// Filename not found — return original
|
|
322
|
+
return markdown;
|
|
323
|
+
}
|