@parall/cli 1.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/agents.d.ts +3 -0
- package/dist/commands/agents.d.ts.map +1 -0
- package/dist/commands/agents.js +18 -0
- package/dist/commands/chats.d.ts +3 -0
- package/dist/commands/chats.d.ts.map +1 -0
- package/dist/commands/chats.js +105 -0
- package/dist/commands/dm.d.ts +3 -0
- package/dist/commands/dm.d.ts.map +1 -0
- package/dist/commands/dm.js +51 -0
- package/dist/commands/mcp.d.ts +3 -0
- package/dist/commands/mcp.d.ts.map +1 -0
- package/dist/commands/mcp.js +439 -0
- package/dist/commands/messages.d.ts +3 -0
- package/dist/commands/messages.d.ts.map +1 -0
- package/dist/commands/messages.js +102 -0
- package/dist/commands/projects.d.ts +3 -0
- package/dist/commands/projects.d.ts.map +1 -0
- package/dist/commands/projects.js +104 -0
- package/dist/commands/refs.d.ts +3 -0
- package/dist/commands/refs.d.ts.map +1 -0
- package/dist/commands/refs.js +50 -0
- package/dist/commands/tasks.d.ts +3 -0
- package/dist/commands/tasks.d.ts.map +1 -0
- package/dist/commands/tasks.js +240 -0
- package/dist/commands/users.d.ts +3 -0
- package/dist/commands/users.d.ts.map +1 -0
- package/dist/commands/users.js +49 -0
- package/dist/commands/wiki.d.ts +3 -0
- package/dist/commands/wiki.d.ts.map +1 -0
- package/dist/commands/wiki.js +644 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +31 -0
- package/dist/lib/client.d.ts +24 -0
- package/dist/lib/client.d.ts.map +1 -0
- package/dist/lib/client.js +47 -0
- package/dist/lib/output.d.ts +3 -0
- package/dist/lib/output.d.ts.map +1 -0
- package/dist/lib/output.js +18 -0
- package/dist/lib/wiki.d.ts +269 -0
- package/dist/lib/wiki.d.ts.map +1 -0
- package/dist/lib/wiki.js +1800 -0
- package/package.json +43 -0
package/dist/lib/wiki.js
ADDED
|
@@ -0,0 +1,1800 @@
|
|
|
1
|
+
import { createHash } from 'node:crypto';
|
|
2
|
+
import { promises as fs } from 'node:fs';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { ApiError, } from '@parall/sdk';
|
|
5
|
+
// ---------------------------------------------------------------------------
|
|
6
|
+
// Constants
|
|
7
|
+
// ---------------------------------------------------------------------------
|
|
8
|
+
const MARKDOWN_HEADING_RE = /^\s{0,3}(#{1,6})[ \t]+(.+?)\s*$/;
|
|
9
|
+
const TRAILING_FENCE_RE = /\s+#+\s*$/;
|
|
10
|
+
const LOCAL_NODE_SECTION_ARTIFACT_VERSION = 2;
|
|
11
|
+
const LOCAL_MANIFEST_FILE = 'manifest.json';
|
|
12
|
+
const PRLL_WIKI_DIR = '.parall-wiki';
|
|
13
|
+
// ---------------------------------------------------------------------------
|
|
14
|
+
// Wiki resolution helpers
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
export async function listWikis(ctx) {
|
|
17
|
+
return ctx.client.getWikis(ctx.orgId);
|
|
18
|
+
}
|
|
19
|
+
export async function resolveWikiRef(ctx, wikiRef) {
|
|
20
|
+
const normalized = wikiRef?.trim() ?? '';
|
|
21
|
+
if (!normalized) {
|
|
22
|
+
return resolveWikiRefOrDefault(ctx);
|
|
23
|
+
}
|
|
24
|
+
const wikis = await listWikis(ctx);
|
|
25
|
+
const exact = wikis.find((wiki) => wiki.id === normalized || wiki.slug === normalized);
|
|
26
|
+
if (exact) {
|
|
27
|
+
return exact;
|
|
28
|
+
}
|
|
29
|
+
const lower = normalized.toLowerCase();
|
|
30
|
+
const fuzzy = wikis.find((wiki) => wiki.slug.toLowerCase() === lower || wiki.name.toLowerCase() === lower);
|
|
31
|
+
if (fuzzy) {
|
|
32
|
+
return fuzzy;
|
|
33
|
+
}
|
|
34
|
+
throw new Error(`wiki not found: ${wikiRef}`);
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Resolve wiki reference. If wikiRef is omitted or empty, auto-resolves to
|
|
38
|
+
* the org's single wiki. Errors if the org has zero or multiple wikis without
|
|
39
|
+
* a ref specified.
|
|
40
|
+
*/
|
|
41
|
+
export async function resolveWikiRefOrDefault(ctx, wikiRef) {
|
|
42
|
+
if (wikiRef?.trim()) {
|
|
43
|
+
return resolveWikiRef(ctx, wikiRef);
|
|
44
|
+
}
|
|
45
|
+
const wikis = await listWikis(ctx);
|
|
46
|
+
if (wikis.length === 1) {
|
|
47
|
+
return wikis[0];
|
|
48
|
+
}
|
|
49
|
+
if (wikis.length === 0) {
|
|
50
|
+
throw new Error('No wikis found in this organization. Create one first.');
|
|
51
|
+
}
|
|
52
|
+
throw new Error(`Multiple wikis found. Specify which one:\n${wikis.map((w) => ` ${w.slug} (${w.name})`).join('\n')}`);
|
|
53
|
+
}
|
|
54
|
+
// ---------------------------------------------------------------------------
|
|
55
|
+
// Wiki tree / blob (API fallback for non-mounted content)
|
|
56
|
+
// ---------------------------------------------------------------------------
|
|
57
|
+
export async function getWikiTree(ctx, wikiRef, options = {}) {
|
|
58
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
59
|
+
const response = await ctx.client.getWikiTree(ctx.orgId, wiki.id, {
|
|
60
|
+
path: normalizeWikiPath(options.path),
|
|
61
|
+
});
|
|
62
|
+
return {
|
|
63
|
+
wiki,
|
|
64
|
+
path: response.path,
|
|
65
|
+
entries: response.data,
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
export async function getWikiBlob(ctx, wikiRef, options) {
|
|
69
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
70
|
+
const blob = await ctx.client.getWikiBlob(ctx.orgId, wiki.id, {
|
|
71
|
+
path: normalizeRequiredWikiPath(options.path),
|
|
72
|
+
});
|
|
73
|
+
return {
|
|
74
|
+
wiki,
|
|
75
|
+
content: blob.content,
|
|
76
|
+
size: blob.size,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
// ---------------------------------------------------------------------------
|
|
80
|
+
// Content browsing: outline, search, query, section
|
|
81
|
+
// ---------------------------------------------------------------------------
|
|
82
|
+
export async function getWikiOutline(ctx, wikiRef, options = {}) {
|
|
83
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
84
|
+
const normalizedPrefix = normalizeWikiPath(options.pathPrefix);
|
|
85
|
+
const source = await loadWikiNodeSections(ctx, wiki, {
|
|
86
|
+
ref: options.ref,
|
|
87
|
+
pathPrefix: normalizedPrefix,
|
|
88
|
+
});
|
|
89
|
+
return {
|
|
90
|
+
wiki_id: wiki.id,
|
|
91
|
+
wiki_slug: wiki.slug,
|
|
92
|
+
wiki_name: wiki.name,
|
|
93
|
+
ref: normalizeRef(options.ref),
|
|
94
|
+
path_prefix: normalizedPrefix || undefined,
|
|
95
|
+
generated_at: source.generatedAt,
|
|
96
|
+
file_count: source.fileCount,
|
|
97
|
+
nodes: source.sections.map(stripNodeSectionContent),
|
|
98
|
+
source: source.kind,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
export async function searchWiki(ctx, wikiRef, query, options = {}) {
|
|
102
|
+
const trimmedQuery = query.trim();
|
|
103
|
+
if (!trimmedQuery) {
|
|
104
|
+
throw new Error('query is required');
|
|
105
|
+
}
|
|
106
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
107
|
+
const normalizedPrefix = normalizeWikiPath(options.pathPrefix);
|
|
108
|
+
const source = await loadWikiNodeSections(ctx, wiki, {
|
|
109
|
+
ref: options.ref,
|
|
110
|
+
pathPrefix: normalizedPrefix,
|
|
111
|
+
});
|
|
112
|
+
const nodes = buildIndexNodes(source.sections);
|
|
113
|
+
return scoreNodes(nodes, trimmedQuery, {
|
|
114
|
+
limit: options.limit,
|
|
115
|
+
includeContent: options.includeContent,
|
|
116
|
+
source: source.kind,
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
export async function queryWiki(ctx, wikiRef, query, options = {}) {
|
|
120
|
+
const trimmedQuery = query.trim();
|
|
121
|
+
if (!trimmedQuery) {
|
|
122
|
+
throw new Error('query is required');
|
|
123
|
+
}
|
|
124
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
125
|
+
const normalizedPrefix = normalizeWikiPath(options.pathPrefix);
|
|
126
|
+
const source = await loadWikiNodeSections(ctx, wiki, {
|
|
127
|
+
ref: options.ref,
|
|
128
|
+
pathPrefix: normalizedPrefix,
|
|
129
|
+
});
|
|
130
|
+
const planned = planWikiQuery(source.sections, trimmedQuery, {
|
|
131
|
+
limit: options.limit,
|
|
132
|
+
includeContent: options.includeContent,
|
|
133
|
+
source: source.kind,
|
|
134
|
+
});
|
|
135
|
+
return {
|
|
136
|
+
wiki_id: wiki.id,
|
|
137
|
+
wiki_slug: wiki.slug,
|
|
138
|
+
wiki_name: wiki.name,
|
|
139
|
+
query: trimmedQuery,
|
|
140
|
+
ref: normalizeRef(options.ref),
|
|
141
|
+
path_prefix: normalizedPrefix || undefined,
|
|
142
|
+
generated_at: source.generatedAt,
|
|
143
|
+
file_count: source.fileCount,
|
|
144
|
+
source: source.kind,
|
|
145
|
+
documents: planned.documents,
|
|
146
|
+
sections: planned.sections,
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
export async function getWikiSection(ctx, wikiRef, options) {
|
|
150
|
+
const nodeId = options.nodeId.trim();
|
|
151
|
+
if (!nodeId) {
|
|
152
|
+
throw new Error('nodeId is required');
|
|
153
|
+
}
|
|
154
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
155
|
+
const normalizedRef = normalizeRef(options.ref);
|
|
156
|
+
if (!normalizedRef) {
|
|
157
|
+
const local = await tryLoadLocalNodeSections(ctx, wiki);
|
|
158
|
+
const localNode = local ? findLocalNodeSection(local, nodeId) : undefined;
|
|
159
|
+
if (localNode) {
|
|
160
|
+
return {
|
|
161
|
+
...stripNodeSectionContent(localNode),
|
|
162
|
+
content: localNode.content,
|
|
163
|
+
source: 'local_mount',
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
const parsedNode = parseNodeId(nodeId);
|
|
168
|
+
const source = await loadWikiMarkdownFile(ctx, wiki, parsedNode.path, normalizedRef);
|
|
169
|
+
const node = parseMarkdownFile(wiki, source.file).find((entry) => entry.node_id === nodeId);
|
|
170
|
+
if (!node) {
|
|
171
|
+
throw new Error(`wiki section not found: ${nodeId}`);
|
|
172
|
+
}
|
|
173
|
+
return {
|
|
174
|
+
...stripNodeSectionContent(node),
|
|
175
|
+
content: node.content,
|
|
176
|
+
source: source.kind,
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
// ---------------------------------------------------------------------------
|
|
180
|
+
// Changeset browsing (API-only)
|
|
181
|
+
// ---------------------------------------------------------------------------
|
|
182
|
+
export async function getWikiChangesetDiff(ctx, wikiRef, changesetId) {
|
|
183
|
+
const normalizedChangesetId = changesetId.trim();
|
|
184
|
+
if (!normalizedChangesetId) {
|
|
185
|
+
throw new Error('changesetId is required');
|
|
186
|
+
}
|
|
187
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
188
|
+
const diff = await ctx.client.getWikiChangesetDiff(ctx.orgId, wiki.id, normalizedChangesetId);
|
|
189
|
+
return {
|
|
190
|
+
wiki_id: wiki.id,
|
|
191
|
+
wiki_slug: wiki.slug,
|
|
192
|
+
wiki_name: wiki.name,
|
|
193
|
+
changeset_id: normalizedChangesetId,
|
|
194
|
+
diff,
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
// ---------------------------------------------------------------------------
|
|
198
|
+
// Workspace status / diff (MCP-compatible wrappers)
|
|
199
|
+
// ---------------------------------------------------------------------------
|
|
200
|
+
export async function getWikiWorkspaceStatus(ctx, wikiRef) {
|
|
201
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
202
|
+
const mount = await requireLocalWikiMount(ctx, wiki);
|
|
203
|
+
const localChanges = await computeLocalChanges(mount);
|
|
204
|
+
return {
|
|
205
|
+
wiki_id: wiki.id,
|
|
206
|
+
wiki_slug: wiki.slug,
|
|
207
|
+
wiki_name: wiki.name,
|
|
208
|
+
mount_path: mount.root,
|
|
209
|
+
mode: 'read_write',
|
|
210
|
+
default_ref: wiki.default_branch,
|
|
211
|
+
changed_paths: localChanges.map((f) => f.path),
|
|
212
|
+
diff_files: localChanges,
|
|
213
|
+
dirty: localChanges.length > 0,
|
|
214
|
+
source: 'local_mount',
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
export async function getWikiWorkspaceDiff(ctx, wikiRef) {
|
|
218
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
219
|
+
const mount = await requireLocalWikiMount(ctx, wiki);
|
|
220
|
+
const manifest = await readLocalManifest(mount.root);
|
|
221
|
+
const diff = await computeLocalDiff(mount, manifest);
|
|
222
|
+
return {
|
|
223
|
+
wiki_id: wiki.id,
|
|
224
|
+
wiki_slug: wiki.slug,
|
|
225
|
+
wiki_name: wiki.name,
|
|
226
|
+
mount_path: mount.root,
|
|
227
|
+
changed_paths: diff.changedPaths,
|
|
228
|
+
diff_files: diff.diffFiles,
|
|
229
|
+
patch: diff.patch,
|
|
230
|
+
source: 'local_mount',
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
export async function getAfcsStatus(ctx, wikiRef) {
|
|
234
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
235
|
+
const mount = await requireLocalWikiMount(ctx, wiki);
|
|
236
|
+
const localChanges = await computeLocalChanges(mount);
|
|
237
|
+
// Fetch changesets by this agent
|
|
238
|
+
let changesets = [];
|
|
239
|
+
try {
|
|
240
|
+
const allChangesets = await ctx.client.getWikiChangesets(ctx.orgId, wiki.id);
|
|
241
|
+
const agentId = resolveAgentId(ctx.agentId);
|
|
242
|
+
const relevant = agentId
|
|
243
|
+
? allChangesets.filter((cs) => cs.created_by === agentId)
|
|
244
|
+
: allChangesets;
|
|
245
|
+
changesets = relevant.map(toChangesetSummary);
|
|
246
|
+
}
|
|
247
|
+
catch {
|
|
248
|
+
// Changesets API may fail — non-fatal for status
|
|
249
|
+
}
|
|
250
|
+
const permissions = {
|
|
251
|
+
readable_prefixes: ['(all — server ACL)'],
|
|
252
|
+
writable_prefixes: ['(server ACL)'],
|
|
253
|
+
};
|
|
254
|
+
return {
|
|
255
|
+
wiki_id: wiki.id,
|
|
256
|
+
wiki_slug: wiki.slug,
|
|
257
|
+
wiki_name: wiki.name,
|
|
258
|
+
mount_path: mount.root,
|
|
259
|
+
mode: 'read_write',
|
|
260
|
+
default_ref: wiki.default_branch,
|
|
261
|
+
local_changes: localChanges,
|
|
262
|
+
changesets,
|
|
263
|
+
permissions,
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
export async function getAfcsDiff(ctx, wikiRef) {
|
|
267
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
268
|
+
const mount = await requireLocalWikiMount(ctx, wiki);
|
|
269
|
+
const manifest = await readLocalManifest(mount.root);
|
|
270
|
+
const diff = await computeLocalDiff(mount, manifest);
|
|
271
|
+
return {
|
|
272
|
+
wiki_id: wiki.id,
|
|
273
|
+
wiki_slug: wiki.slug,
|
|
274
|
+
wiki_name: wiki.name,
|
|
275
|
+
mount_path: mount.root,
|
|
276
|
+
changed_paths: diff.changedPaths,
|
|
277
|
+
diff_files: diff.diffFiles,
|
|
278
|
+
patch: diff.patch,
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
// ---------------------------------------------------------------------------
|
|
282
|
+
// AFCS: propose changeset (REST-based)
|
|
283
|
+
// ---------------------------------------------------------------------------
|
|
284
|
+
export async function proposeWikiChangeset(ctx, wikiRef, options) {
|
|
285
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
286
|
+
const mount = await requireLocalWikiMount(ctx, wiki, { requireWrite: true });
|
|
287
|
+
const title = options.title.trim();
|
|
288
|
+
if (!title) {
|
|
289
|
+
throw new Error('title is required');
|
|
290
|
+
}
|
|
291
|
+
const manifest = await readLocalManifest(mount.root);
|
|
292
|
+
const diff = await computeLocalDiff(mount, manifest);
|
|
293
|
+
if (diff.changedPaths.length === 0) {
|
|
294
|
+
throw new Error(`no changes to propose for wiki ${wiki.slug}`);
|
|
295
|
+
}
|
|
296
|
+
// Build file_changes for the changeset API
|
|
297
|
+
const fileChanges = [];
|
|
298
|
+
for (const filePath of diff.changedPaths) {
|
|
299
|
+
const localPath = resolvePathInsideRoot(mount.root, filePath);
|
|
300
|
+
const existsLocally = await pathExists(localPath);
|
|
301
|
+
const baseEntry = manifest.files[filePath];
|
|
302
|
+
if (!existsLocally) {
|
|
303
|
+
// Deleted
|
|
304
|
+
fileChanges.push({
|
|
305
|
+
path: filePath,
|
|
306
|
+
action: 'delete',
|
|
307
|
+
});
|
|
308
|
+
}
|
|
309
|
+
else {
|
|
310
|
+
const content = await fs.readFile(localPath);
|
|
311
|
+
fileChanges.push({
|
|
312
|
+
path: filePath,
|
|
313
|
+
action: baseEntry ? 'update' : 'create',
|
|
314
|
+
content_base64: content.toString('base64'),
|
|
315
|
+
});
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
let changeset;
|
|
319
|
+
const existingChangesetId = normalizeOptionalString(options.changesetId);
|
|
320
|
+
if (existingChangesetId) {
|
|
321
|
+
// Re-propose existing changeset with updated file_changes
|
|
322
|
+
changeset = await ctx.client.updateWikiChangeset(ctx.orgId, wiki.id, existingChangesetId, {
|
|
323
|
+
title,
|
|
324
|
+
message: normalizeOptionalString(options.message),
|
|
325
|
+
file_changes: fileChanges,
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
else {
|
|
329
|
+
// Create new changeset via API
|
|
330
|
+
changeset = await ctx.client.createWikiChangeset(ctx.orgId, wiki.id, {
|
|
331
|
+
title,
|
|
332
|
+
message: normalizeOptionalString(options.message),
|
|
333
|
+
file_changes: fileChanges,
|
|
334
|
+
source_chat_id: normalizeOptionalString(options.sourceChatId),
|
|
335
|
+
source_message_id: normalizeOptionalString(options.sourceMessageId),
|
|
336
|
+
source_run_id: normalizeOptionalString(options.sourceRunId),
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
return {
|
|
340
|
+
wiki_id: wiki.id,
|
|
341
|
+
wiki_slug: wiki.slug,
|
|
342
|
+
wiki_name: wiki.name,
|
|
343
|
+
mount_path: mount.root,
|
|
344
|
+
changed_paths: diff.changedPaths,
|
|
345
|
+
diff_files: diff.diffFiles,
|
|
346
|
+
changeset,
|
|
347
|
+
source: 'local_mount',
|
|
348
|
+
};
|
|
349
|
+
}
|
|
350
|
+
// ---------------------------------------------------------------------------
|
|
351
|
+
// AFCS: reset workspace
|
|
352
|
+
// ---------------------------------------------------------------------------
|
|
353
|
+
/**
|
|
354
|
+
* Discard all local changes and restore files to the last synced state.
|
|
355
|
+
* Reads the local manifest and restores all files from the server.
|
|
356
|
+
*/
|
|
357
|
+
export async function resetWikiWorkspace(ctx, wikiRef) {
|
|
358
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
359
|
+
const mount = await requireLocalWikiMount(ctx, wiki);
|
|
360
|
+
const manifest = await readLocalManifest(mount.root);
|
|
361
|
+
// Count changes before reset
|
|
362
|
+
const changes = await computeLocalChanges(mount);
|
|
363
|
+
const changedCount = changes.length;
|
|
364
|
+
if (changedCount === 0) {
|
|
365
|
+
return { wiki_id: wiki.id, wiki_slug: wiki.slug, files_restored: 0 };
|
|
366
|
+
}
|
|
367
|
+
// Re-download all manifest files and overwrite local state
|
|
368
|
+
const token = resolveApiToken();
|
|
369
|
+
const manifestPaths = Object.keys(manifest.files);
|
|
370
|
+
if (manifestPaths.length > 0) {
|
|
371
|
+
const downloaded = await bulkDownloadFiles(mount.bulkDownloadUrl, token, manifestPaths);
|
|
372
|
+
for (const entry of downloaded) {
|
|
373
|
+
const fullPath = resolvePathInsideRoot(mount.root, entry.path);
|
|
374
|
+
await fs.mkdir(path.dirname(fullPath), { recursive: true });
|
|
375
|
+
await fs.writeFile(fullPath, Buffer.from(entry.content_base64, 'base64'));
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
// Remove any local file not in manifest
|
|
379
|
+
await removeFilesNotInManifest(mount.root, manifest);
|
|
380
|
+
return {
|
|
381
|
+
wiki_id: wiki.id,
|
|
382
|
+
wiki_slug: wiki.slug,
|
|
383
|
+
files_restored: changedCount,
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
export async function listWikiChangesets(ctx, wikiRef) {
|
|
387
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
388
|
+
const allChangesets = await ctx.client.getWikiChangesets(ctx.orgId, wiki.id);
|
|
389
|
+
return {
|
|
390
|
+
wiki_id: wiki.id,
|
|
391
|
+
wiki_slug: wiki.slug,
|
|
392
|
+
wiki_name: wiki.name,
|
|
393
|
+
changesets: allChangesets.map(toChangesetSummary),
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
export async function getWikiChangesetDetail(ctx, wikiRef, changesetId) {
|
|
397
|
+
const normalizedId = changesetId.trim();
|
|
398
|
+
if (!normalizedId) {
|
|
399
|
+
throw new Error('changeset ID is required. Run `parall wiki changesets <wiki>` to list available changesets.');
|
|
400
|
+
}
|
|
401
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
402
|
+
const changeset = await ctx.client.getWikiChangeset(ctx.orgId, wiki.id, normalizedId);
|
|
403
|
+
return {
|
|
404
|
+
wiki_id: wiki.id,
|
|
405
|
+
wiki_slug: wiki.slug,
|
|
406
|
+
wiki_name: wiki.name,
|
|
407
|
+
changeset,
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
export async function requestWikiAccess(ctx, wikiRef, targetPath, reason) {
|
|
411
|
+
const normalizedPath = normalizeRequiredWikiPath(targetPath);
|
|
412
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
413
|
+
try {
|
|
414
|
+
await ctx.client.request('POST', `/wiki/v1/orgs/${ctx.orgId}/wikis/${wiki.id}/access-requests`, {
|
|
415
|
+
path: normalizedPath,
|
|
416
|
+
reason: reason?.trim() || undefined,
|
|
417
|
+
});
|
|
418
|
+
}
|
|
419
|
+
catch (error) {
|
|
420
|
+
if (error instanceof ApiError && error.status === 404) {
|
|
421
|
+
return {
|
|
422
|
+
wiki_id: wiki.id,
|
|
423
|
+
wiki_slug: wiki.slug,
|
|
424
|
+
wiki_name: wiki.name,
|
|
425
|
+
path: normalizedPath,
|
|
426
|
+
reason: reason?.trim() || undefined,
|
|
427
|
+
status: 'submitted',
|
|
428
|
+
message: 'Access request noted. The server access-request endpoint is not yet implemented. '
|
|
429
|
+
+ 'Ask a wiki admin to grant access manually.',
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
throw error;
|
|
433
|
+
}
|
|
434
|
+
return {
|
|
435
|
+
wiki_id: wiki.id,
|
|
436
|
+
wiki_slug: wiki.slug,
|
|
437
|
+
wiki_name: wiki.name,
|
|
438
|
+
path: normalizedPath,
|
|
439
|
+
reason: reason?.trim() || undefined,
|
|
440
|
+
status: 'submitted',
|
|
441
|
+
message: 'Access request submitted. Waiting for approval.',
|
|
442
|
+
};
|
|
443
|
+
}
|
|
444
|
+
export async function getWikiLog(ctx, wikiRef, filePath) {
|
|
445
|
+
const wiki = await resolveWikiRef(ctx, wikiRef);
|
|
446
|
+
if (filePath?.trim()) {
|
|
447
|
+
throw new Error('Per-file wiki history is not implemented yet. Omit the path to see org-wide operations.');
|
|
448
|
+
}
|
|
449
|
+
const ops = await ctx.client.getWikiOperations(ctx.orgId, wiki.id);
|
|
450
|
+
const entries = ops.data.map((op) => ({
|
|
451
|
+
type: 'operation',
|
|
452
|
+
id: op.id,
|
|
453
|
+
action: op.op_type,
|
|
454
|
+
actor_id: op.actor_id,
|
|
455
|
+
created_at: op.created_at,
|
|
456
|
+
}));
|
|
457
|
+
return {
|
|
458
|
+
wiki_id: wiki.id,
|
|
459
|
+
wiki_slug: wiki.slug,
|
|
460
|
+
wiki_name: wiki.name,
|
|
461
|
+
entries,
|
|
462
|
+
};
|
|
463
|
+
}
|
|
464
|
+
/**
|
|
465
|
+
* List accessible wikis for this org, sync each via REST
|
|
466
|
+
* (manifest comparison + bulk download), and remove stale directories.
|
|
467
|
+
* Mount path is determined client-side: {mountRoot}/{slug}/
|
|
468
|
+
*/
|
|
469
|
+
export async function syncAllMounts(ctx) {
|
|
470
|
+
const wikis = await ctx.client.getWikis(ctx.orgId);
|
|
471
|
+
const mountRoot = ctx.mountRoot?.trim() || process.env.PRLL_WIKI_MOUNT_ROOT?.trim();
|
|
472
|
+
const activeMountPaths = new Set();
|
|
473
|
+
const synced = [];
|
|
474
|
+
for (const wiki of wikis) {
|
|
475
|
+
const mountPath = resolveMountPath(wiki.slug, mountRoot);
|
|
476
|
+
activeMountPaths.add(mountPath);
|
|
477
|
+
await syncSingleWiki(ctx, wiki, mountPath);
|
|
478
|
+
synced.push({ slug: wiki.slug, path: mountPath });
|
|
479
|
+
}
|
|
480
|
+
await pruneStaleMounts(mountRoot, activeMountPaths);
|
|
481
|
+
return { ok: true, mounts: wikis.length, synced };
|
|
482
|
+
}
|
|
483
|
+
/**
|
|
484
|
+
* Run {@link syncAllMounts} in a loop, sleeping `intervalSec` between
|
|
485
|
+
* iterations. Returns a `stop` callback to break the loop.
|
|
486
|
+
*/
|
|
487
|
+
export function watchMounts(ctx, intervalSec) {
|
|
488
|
+
requireAgentId(ctx); // fail fast if agentId missing
|
|
489
|
+
let stopped = false;
|
|
490
|
+
const doSync = async () => {
|
|
491
|
+
try {
|
|
492
|
+
await syncAllMounts(ctx);
|
|
493
|
+
}
|
|
494
|
+
catch (err) {
|
|
495
|
+
process.stderr.write(`wiki refresh failed: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
496
|
+
}
|
|
497
|
+
};
|
|
498
|
+
const timer = setInterval(doSync, intervalSec * 1000);
|
|
499
|
+
const stop = () => {
|
|
500
|
+
if (stopped)
|
|
501
|
+
return;
|
|
502
|
+
stopped = true;
|
|
503
|
+
clearInterval(timer);
|
|
504
|
+
};
|
|
505
|
+
process.on('SIGINT', stop);
|
|
506
|
+
process.on('SIGTERM', stop);
|
|
507
|
+
return { stop };
|
|
508
|
+
}
|
|
509
|
+
// ---------------------------------------------------------------------------
|
|
510
|
+
// Sync internals (REST-based)
|
|
511
|
+
// ---------------------------------------------------------------------------
|
|
512
|
+
/**
|
|
513
|
+
* Sync a single wiki:
|
|
514
|
+
* 1. Fetch server manifest (file listing with SHAs)
|
|
515
|
+
* 2. Compare with local manifest
|
|
516
|
+
* 3. Download new/modified files via bulk download
|
|
517
|
+
* 4. Delete removed files
|
|
518
|
+
* 5. Save updated local manifest
|
|
519
|
+
*/
|
|
520
|
+
async function syncSingleWiki(ctx, wiki, mountPath) {
|
|
521
|
+
await fs.mkdir(mountPath, { recursive: true });
|
|
522
|
+
const metaDir = path.join(mountPath, PRLL_WIKI_DIR);
|
|
523
|
+
await fs.mkdir(metaDir, { recursive: true });
|
|
524
|
+
const token = resolveApiToken();
|
|
525
|
+
const baseUrl = (ctx.baseUrl ?? process.env.PRLL_WIKI_URL ?? process.env.PRLL_API_URL ?? '').replace(/\/+$/, '');
|
|
526
|
+
const manifestUrl = `${baseUrl}/wiki/v1/orgs/${ctx.orgId}/wikis/${wiki.id}/manifest`;
|
|
527
|
+
const bulkDownloadUrl = `${baseUrl}/wiki/v1/orgs/${ctx.orgId}/wikis/${wiki.id}/files/bulk-download`;
|
|
528
|
+
// 1. Fetch server manifest
|
|
529
|
+
const manifestResp = await wikiServiceFetch(manifestUrl, token);
|
|
530
|
+
const serverEntries = manifestResp.data;
|
|
531
|
+
// 2. Read local manifest
|
|
532
|
+
const localManifest = await readLocalManifest(mountPath);
|
|
533
|
+
// 3. Compare: find new, modified, deleted files
|
|
534
|
+
const serverByPath = new Map(serverEntries.map((e) => [e.path, e]));
|
|
535
|
+
const toDownload = [];
|
|
536
|
+
const toDelete = [];
|
|
537
|
+
for (const entry of serverEntries) {
|
|
538
|
+
const local = localManifest.files[entry.path];
|
|
539
|
+
if (!local || local.sha !== entry.sha) {
|
|
540
|
+
toDownload.push(entry.path);
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
for (const localPath of Object.keys(localManifest.files)) {
|
|
544
|
+
if (!serverByPath.has(localPath)) {
|
|
545
|
+
toDelete.push(localPath);
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
// 4. Bulk download changed files
|
|
549
|
+
if (toDownload.length > 0) {
|
|
550
|
+
const downloaded = await bulkDownloadFiles(bulkDownloadUrl, token, toDownload);
|
|
551
|
+
for (const entry of downloaded) {
|
|
552
|
+
const fullPath = resolvePathInsideRoot(mountPath, entry.path);
|
|
553
|
+
await fs.mkdir(path.dirname(fullPath), { recursive: true });
|
|
554
|
+
await fs.writeFile(fullPath, Buffer.from(entry.content_base64, 'base64'));
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
// 5. Delete removed files
|
|
558
|
+
for (const deletePath of toDelete) {
|
|
559
|
+
const fullPath = resolvePathInsideRoot(mountPath, deletePath);
|
|
560
|
+
await fs.rm(fullPath, { force: true });
|
|
561
|
+
await removeEmptyParents(mountPath, path.dirname(fullPath));
|
|
562
|
+
}
|
|
563
|
+
// 6. Save updated local manifest
|
|
564
|
+
const newManifest = {
|
|
565
|
+
wiki_id: wiki.id,
|
|
566
|
+
synced_at: new Date().toISOString(),
|
|
567
|
+
files: {},
|
|
568
|
+
};
|
|
569
|
+
for (const entry of serverEntries) {
|
|
570
|
+
newManifest.files[entry.path] = {
|
|
571
|
+
sha: entry.sha,
|
|
572
|
+
size: entry.size,
|
|
573
|
+
};
|
|
574
|
+
}
|
|
575
|
+
await writeLocalManifest(mountPath, newManifest);
|
|
576
|
+
}
|
|
577
|
+
// ---------------------------------------------------------------------------
|
|
578
|
+
// Local manifest I/O
|
|
579
|
+
// ---------------------------------------------------------------------------
|
|
580
|
+
async function readLocalManifest(mountRoot) {
|
|
581
|
+
const manifestPath = path.join(mountRoot, PRLL_WIKI_DIR, LOCAL_MANIFEST_FILE);
|
|
582
|
+
try {
|
|
583
|
+
const raw = await fs.readFile(manifestPath, 'utf8');
|
|
584
|
+
return JSON.parse(raw);
|
|
585
|
+
}
|
|
586
|
+
catch {
|
|
587
|
+
return { wiki_id: '', synced_at: '', files: {} };
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
async function writeLocalManifest(mountRoot, manifest) {
|
|
591
|
+
const metaDir = path.join(mountRoot, PRLL_WIKI_DIR);
|
|
592
|
+
await fs.mkdir(metaDir, { recursive: true });
|
|
593
|
+
await fs.writeFile(path.join(metaDir, LOCAL_MANIFEST_FILE), JSON.stringify(manifest, null, 2), 'utf8');
|
|
594
|
+
}
|
|
595
|
+
// ---------------------------------------------------------------------------
|
|
596
|
+
// REST helpers for wiki-service
|
|
597
|
+
// ---------------------------------------------------------------------------
|
|
598
|
+
function resolveApiToken() {
|
|
599
|
+
const token = process.env.PRLL_API_KEY?.trim();
|
|
600
|
+
if (!token) {
|
|
601
|
+
throw new Error('PRLL_API_KEY is required for wiki operations');
|
|
602
|
+
}
|
|
603
|
+
return token;
|
|
604
|
+
}
|
|
605
|
+
async function wikiServiceFetch(url, token, options = {}) {
|
|
606
|
+
const method = options.method ?? 'GET';
|
|
607
|
+
const headers = {
|
|
608
|
+
'Authorization': `Bearer ${token}`,
|
|
609
|
+
'Content-Type': 'application/json',
|
|
610
|
+
};
|
|
611
|
+
const response = await fetch(url, {
|
|
612
|
+
method,
|
|
613
|
+
headers,
|
|
614
|
+
body: options.body,
|
|
615
|
+
});
|
|
616
|
+
if (!response.ok) {
|
|
617
|
+
const text = await response.text().catch(() => '');
|
|
618
|
+
throw new Error(`wiki-service ${method} ${url}: ${response.status} ${response.statusText}${text ? ` — ${text}` : ''}`);
|
|
619
|
+
}
|
|
620
|
+
return response.json();
|
|
621
|
+
}
|
|
622
|
+
async function bulkDownloadFiles(bulkDownloadUrl, token, paths) {
|
|
623
|
+
// Batch in chunks to avoid overly large requests
|
|
624
|
+
const BATCH_SIZE = 100;
|
|
625
|
+
const results = [];
|
|
626
|
+
for (let i = 0; i < paths.length; i += BATCH_SIZE) {
|
|
627
|
+
const batch = paths.slice(i, i + BATCH_SIZE);
|
|
628
|
+
const raw = await wikiServiceFetch(bulkDownloadUrl, token, {
|
|
629
|
+
method: 'POST',
|
|
630
|
+
body: JSON.stringify({ paths: batch }),
|
|
631
|
+
});
|
|
632
|
+
results.push(...raw.data);
|
|
633
|
+
}
|
|
634
|
+
return results;
|
|
635
|
+
}
|
|
636
|
+
// ---------------------------------------------------------------------------
|
|
637
|
+
// Local change detection (manifest-based)
|
|
638
|
+
// ---------------------------------------------------------------------------
|
|
639
|
+
function computeSHA256(content) {
|
|
640
|
+
return createHash('sha256').update(content).digest('hex');
|
|
641
|
+
}
|
|
642
|
+
/** Compute list of changed files by comparing local disk to local manifest. */
|
|
643
|
+
async function computeLocalChanges(mount) {
|
|
644
|
+
const manifest = await readLocalManifest(mount.root);
|
|
645
|
+
const diffFiles = [];
|
|
646
|
+
const visited = new Set();
|
|
647
|
+
// Walk local files to find modified/added
|
|
648
|
+
await walkLocalFiles(mount.root, mount.root, async (fullPath, relativePath) => {
|
|
649
|
+
if (!matchesAllowedPrefix(relativePath, [])) {
|
|
650
|
+
return;
|
|
651
|
+
}
|
|
652
|
+
visited.add(relativePath);
|
|
653
|
+
const localContent = await fs.readFile(fullPath);
|
|
654
|
+
const localSha = computeSHA256(localContent);
|
|
655
|
+
const manifestEntry = manifest.files[relativePath];
|
|
656
|
+
if (!manifestEntry) {
|
|
657
|
+
// New file
|
|
658
|
+
const lines = localContent.toString('utf8').split('\n').length;
|
|
659
|
+
diffFiles.push({ path: relativePath, additions: lines, deletions: 0 });
|
|
660
|
+
}
|
|
661
|
+
else if (localSha !== manifestEntry.sha) {
|
|
662
|
+
// Modified file — compute line diff estimate
|
|
663
|
+
const localLines = localContent.toString('utf8').split('\n');
|
|
664
|
+
const additions = localLines.length;
|
|
665
|
+
const deletions = estimateLines(manifestEntry.size);
|
|
666
|
+
diffFiles.push({ path: relativePath, additions, deletions });
|
|
667
|
+
}
|
|
668
|
+
});
|
|
669
|
+
// Find deleted files (in manifest but not on disk)
|
|
670
|
+
for (const [filePath, entry] of Object.entries(manifest.files)) {
|
|
671
|
+
if (!visited.has(filePath)) {
|
|
672
|
+
const deletions = estimateLines(entry.size);
|
|
673
|
+
diffFiles.push({ path: filePath, additions: 0, deletions });
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
diffFiles.sort((a, b) => a.path.localeCompare(b.path));
|
|
677
|
+
return diffFiles;
|
|
678
|
+
}
|
|
679
|
+
/** Compute diff including unified patch by comparing local files vs manifest base content. */
|
|
680
|
+
async function computeLocalDiff(mount, manifest) {
|
|
681
|
+
const diffFiles = [];
|
|
682
|
+
const patchParts = [];
|
|
683
|
+
const visited = new Set();
|
|
684
|
+
const token = resolveApiToken();
|
|
685
|
+
// Walk local files to find modified/added
|
|
686
|
+
await walkLocalFiles(mount.root, mount.root, async (fullPath, relativePath) => {
|
|
687
|
+
if (!matchesAllowedPrefix(relativePath, [])) {
|
|
688
|
+
return;
|
|
689
|
+
}
|
|
690
|
+
visited.add(relativePath);
|
|
691
|
+
const localContent = await fs.readFile(fullPath);
|
|
692
|
+
const localSha = computeSHA256(localContent);
|
|
693
|
+
const manifestEntry = manifest.files[relativePath];
|
|
694
|
+
if (!manifestEntry) {
|
|
695
|
+
// New file
|
|
696
|
+
const localText = localContent.toString('utf8');
|
|
697
|
+
const localLines = localText.split('\n');
|
|
698
|
+
diffFiles.push({ path: relativePath, additions: localLines.length, deletions: 0 });
|
|
699
|
+
patchParts.push(buildUnifiedDiff(relativePath, '', localText));
|
|
700
|
+
}
|
|
701
|
+
else if (localSha !== manifestEntry.sha) {
|
|
702
|
+
// Modified file — fetch base content from server for diff
|
|
703
|
+
const localText = localContent.toString('utf8');
|
|
704
|
+
let baseText = '';
|
|
705
|
+
try {
|
|
706
|
+
const baseEntries = await bulkDownloadFiles(mount.bulkDownloadUrl, token, [relativePath]);
|
|
707
|
+
if (baseEntries.length > 0) {
|
|
708
|
+
baseText = Buffer.from(baseEntries[0].content_base64, 'base64').toString('utf8');
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
catch {
|
|
712
|
+
// If we can't fetch base, treat as full addition
|
|
713
|
+
}
|
|
714
|
+
const localLines = localText.split('\n');
|
|
715
|
+
const baseLines = baseText.split('\n');
|
|
716
|
+
diffFiles.push({
|
|
717
|
+
path: relativePath,
|
|
718
|
+
additions: localLines.length,
|
|
719
|
+
deletions: baseLines.length,
|
|
720
|
+
});
|
|
721
|
+
patchParts.push(buildUnifiedDiff(relativePath, baseText, localText));
|
|
722
|
+
}
|
|
723
|
+
});
|
|
724
|
+
// Find deleted files
|
|
725
|
+
for (const [filePath, entry] of Object.entries(manifest.files)) {
|
|
726
|
+
if (!visited.has(filePath)) {
|
|
727
|
+
let baseText = '';
|
|
728
|
+
try {
|
|
729
|
+
const baseEntries = await bulkDownloadFiles(mount.bulkDownloadUrl, token, [filePath]);
|
|
730
|
+
if (baseEntries.length > 0) {
|
|
731
|
+
baseText = Buffer.from(baseEntries[0].content_base64, 'base64').toString('utf8');
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
catch {
|
|
735
|
+
// Estimate
|
|
736
|
+
}
|
|
737
|
+
const baseLines = baseText.split('\n');
|
|
738
|
+
diffFiles.push({ path: filePath, additions: 0, deletions: baseLines.length });
|
|
739
|
+
patchParts.push(buildUnifiedDiff(filePath, baseText, ''));
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
diffFiles.sort((a, b) => a.path.localeCompare(b.path));
|
|
743
|
+
const changedPaths = diffFiles.map((f) => f.path);
|
|
744
|
+
const patch = patchParts.join('');
|
|
745
|
+
return { changedPaths, diffFiles, patch };
|
|
746
|
+
}
|
|
747
|
+
/** Build a minimal unified diff between two text strings. */
|
|
748
|
+
function buildUnifiedDiff(filePath, oldText, newText) {
|
|
749
|
+
const oldLines = oldText ? oldText.split('\n') : [];
|
|
750
|
+
const newLines = newText ? newText.split('\n') : [];
|
|
751
|
+
const aPath = oldText ? `a/${filePath}` : '/dev/null';
|
|
752
|
+
const bPath = newText ? `b/${filePath}` : '/dev/null';
|
|
753
|
+
const parts = [
|
|
754
|
+
`diff --git a/${filePath} b/${filePath}\n`,
|
|
755
|
+
];
|
|
756
|
+
if (!oldText) {
|
|
757
|
+
parts.push('new file mode 100644\n');
|
|
758
|
+
}
|
|
759
|
+
else if (!newText) {
|
|
760
|
+
parts.push('deleted file mode 100644\n');
|
|
761
|
+
}
|
|
762
|
+
parts.push(`--- ${aPath}\n`);
|
|
763
|
+
parts.push(`+++ ${bPath}\n`);
|
|
764
|
+
parts.push(`@@ -1,${oldLines.length} +1,${newLines.length} @@\n`);
|
|
765
|
+
for (const line of oldLines) {
|
|
766
|
+
parts.push(`-${line}\n`);
|
|
767
|
+
}
|
|
768
|
+
for (const line of newLines) {
|
|
769
|
+
parts.push(`+${line}\n`);
|
|
770
|
+
}
|
|
771
|
+
return parts.join('');
|
|
772
|
+
}
|
|
773
|
+
/** Estimate number of lines from file size (rough: ~40 bytes per line). */
|
|
774
|
+
function estimateLines(sizeBytes) {
|
|
775
|
+
return Math.max(1, Math.round(sizeBytes / 40));
|
|
776
|
+
}
|
|
777
|
+
// ---------------------------------------------------------------------------
|
|
778
|
+
// File system helpers
|
|
779
|
+
// ---------------------------------------------------------------------------
|
|
780
|
+
/** Walk all files (not just markdown) under a root, excluding .parall-wiki. */
|
|
781
|
+
async function walkLocalFiles(root, dir, visitor) {
|
|
782
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
783
|
+
for (const entry of entries) {
|
|
784
|
+
if (entry.isDirectory()) {
|
|
785
|
+
if (entry.name === PRLL_WIKI_DIR) {
|
|
786
|
+
continue;
|
|
787
|
+
}
|
|
788
|
+
await walkLocalFiles(root, path.join(dir, entry.name), visitor);
|
|
789
|
+
continue;
|
|
790
|
+
}
|
|
791
|
+
if (!entry.isFile()) {
|
|
792
|
+
continue;
|
|
793
|
+
}
|
|
794
|
+
const fullPath = path.join(dir, entry.name);
|
|
795
|
+
const relativePath = toPosix(path.relative(root, fullPath));
|
|
796
|
+
await visitor(fullPath, relativePath);
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
/** Walk only markdown files under a root. */
|
|
800
|
+
async function walkLocalMarkdownFiles(root, dir, visitor) {
|
|
801
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
802
|
+
for (const entry of entries) {
|
|
803
|
+
if (entry.isDirectory()) {
|
|
804
|
+
if (entry.name === PRLL_WIKI_DIR) {
|
|
805
|
+
continue;
|
|
806
|
+
}
|
|
807
|
+
await walkLocalMarkdownFiles(root, path.join(dir, entry.name), visitor);
|
|
808
|
+
continue;
|
|
809
|
+
}
|
|
810
|
+
if (!entry.isFile()) {
|
|
811
|
+
continue;
|
|
812
|
+
}
|
|
813
|
+
const fullPath = path.join(dir, entry.name);
|
|
814
|
+
const relativePath = toPosix(path.relative(root, fullPath));
|
|
815
|
+
if (!isMarkdownFile(relativePath)) {
|
|
816
|
+
continue;
|
|
817
|
+
}
|
|
818
|
+
await visitor(fullPath, relativePath);
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
/** Remove files not present in manifest. */
|
|
822
|
+
async function removeFilesNotInManifest(root, manifest) {
|
|
823
|
+
await walkLocalFiles(root, root, async (fullPath, relativePath) => {
|
|
824
|
+
if (!(relativePath in manifest.files)) {
|
|
825
|
+
await fs.rm(fullPath, { force: true });
|
|
826
|
+
await removeEmptyParents(root, path.dirname(fullPath));
|
|
827
|
+
}
|
|
828
|
+
});
|
|
829
|
+
}
|
|
830
|
+
/** Remove empty parent directories up to (but not including) root. */
|
|
831
|
+
async function removeEmptyParents(root, dir) {
|
|
832
|
+
const absRoot = path.resolve(root);
|
|
833
|
+
let current = path.resolve(dir);
|
|
834
|
+
while (current !== absRoot && current.startsWith(absRoot)) {
|
|
835
|
+
try {
|
|
836
|
+
const entries = await fs.readdir(current);
|
|
837
|
+
if (entries.length > 0)
|
|
838
|
+
break;
|
|
839
|
+
await fs.rmdir(current);
|
|
840
|
+
current = path.dirname(current);
|
|
841
|
+
}
|
|
842
|
+
catch {
|
|
843
|
+
break;
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
// ---------------------------------------------------------------------------
|
|
848
|
+
// Mount resolution
|
|
849
|
+
// ---------------------------------------------------------------------------
|
|
850
|
+
async function resolveLocalWikiMount(ctx, wiki) {
|
|
851
|
+
const mountRoot = ctx.mountRoot ?? process.env.PRLL_WIKI_MOUNT_ROOT;
|
|
852
|
+
let root;
|
|
853
|
+
try {
|
|
854
|
+
root = resolveMountPath(wiki.slug, mountRoot);
|
|
855
|
+
}
|
|
856
|
+
catch {
|
|
857
|
+
return null;
|
|
858
|
+
}
|
|
859
|
+
if (!(await pathExists(root))) {
|
|
860
|
+
return null;
|
|
861
|
+
}
|
|
862
|
+
const baseUrl = (ctx.baseUrl ?? process.env.PRLL_WIKI_URL ?? process.env.PRLL_API_URL ?? '').replace(/\/+$/, '');
|
|
863
|
+
const wikiBase = `${baseUrl}/wiki/v1/orgs/${ctx.orgId}/wikis/${wiki.id}`;
|
|
864
|
+
return {
|
|
865
|
+
root,
|
|
866
|
+
wikiId: wiki.id,
|
|
867
|
+
manifestUrl: `${wikiBase}/manifest`,
|
|
868
|
+
blobUrl: `${wikiBase}/files`,
|
|
869
|
+
bulkDownloadUrl: `${wikiBase}/files/bulk-download`,
|
|
870
|
+
changesetsUrl: `${wikiBase}/changesets`,
|
|
871
|
+
};
|
|
872
|
+
}
|
|
873
|
+
async function requireLocalWikiMount(ctx, wiki, options = {}) {
|
|
874
|
+
const mount = await resolveLocalWikiMount(ctx, wiki);
|
|
875
|
+
if (!mount) {
|
|
876
|
+
throw new Error(`wiki ${wiki.slug} is not available in a local agent mount`);
|
|
877
|
+
}
|
|
878
|
+
if (options.requireWrite && 'read_write' !== 'read_write') {
|
|
879
|
+
throw new Error(`wiki ${wiki.slug} mount is read-only`);
|
|
880
|
+
}
|
|
881
|
+
return mount;
|
|
882
|
+
}
|
|
883
|
+
// ---------------------------------------------------------------------------
|
|
884
|
+
// Node section loading (unchanged logic, adapts to REST mount)
|
|
885
|
+
// ---------------------------------------------------------------------------
|
|
886
|
+
async function loadWikiNodeSections(ctx, wiki, options = {}) {
|
|
887
|
+
const normalizedRef = normalizeRef(options.ref);
|
|
888
|
+
const normalizedPrefix = normalizeWikiPath(options.pathPrefix);
|
|
889
|
+
if (!normalizedRef) {
|
|
890
|
+
const local = await tryLoadLocalNodeSections(ctx, wiki, normalizedPrefix);
|
|
891
|
+
if (local) {
|
|
892
|
+
const sections = filterNodeSectionsByPathPrefix(flattenLocalNodeSectionFiles(local.files), normalizedPrefix);
|
|
893
|
+
return {
|
|
894
|
+
kind: 'local_mount',
|
|
895
|
+
generatedAt: local.generated_at,
|
|
896
|
+
fileCount: countMatchingMarkdownFiles(local.files, normalizedPrefix),
|
|
897
|
+
sections,
|
|
898
|
+
};
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
const artifact = await ctx.client.getWikiNodeSections(ctx.orgId, wiki.id, { ref: normalizedRef });
|
|
902
|
+
const sections = filterNodeSectionsByPathPrefix(artifact.nodes, normalizedPrefix);
|
|
903
|
+
return {
|
|
904
|
+
kind: 'api',
|
|
905
|
+
generatedAt: artifact.generated_at,
|
|
906
|
+
fileCount: countMatchingSectionFiles(artifact.nodes, normalizedPrefix),
|
|
907
|
+
sections,
|
|
908
|
+
};
|
|
909
|
+
}
|
|
910
|
+
async function loadWikiMarkdownFile(ctx, wiki, filePath, ref) {
|
|
911
|
+
const normalizedRef = normalizeRef(ref);
|
|
912
|
+
const normalizedPath = normalizeRequiredWikiPath(filePath);
|
|
913
|
+
if (!normalizedRef) {
|
|
914
|
+
const local = await tryLoadLocalMarkdownFile(ctx, wiki, normalizedPath);
|
|
915
|
+
if (local) {
|
|
916
|
+
return { kind: 'local_mount', file: local };
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
return {
|
|
920
|
+
kind: 'api',
|
|
921
|
+
file: await loadRemoteMarkdownFile(ctx, wiki, normalizedRef, normalizedPath),
|
|
922
|
+
};
|
|
923
|
+
}
|
|
924
|
+
async function tryLoadLocalMarkdownFile(ctx, wiki, filePath) {
|
|
925
|
+
const mount = await resolveLocalWikiMount(ctx, wiki);
|
|
926
|
+
if (!mount) {
|
|
927
|
+
return null;
|
|
928
|
+
}
|
|
929
|
+
enforceAllowedPrefix(filePath, []);
|
|
930
|
+
const target = resolvePathInsideRoot(mount.root, filePath);
|
|
931
|
+
const stat = await fs.stat(target).catch(() => null);
|
|
932
|
+
if (!stat) {
|
|
933
|
+
throw new Error(`wiki file not found in local mount: ${filePath}`);
|
|
934
|
+
}
|
|
935
|
+
if (!stat.isFile()) {
|
|
936
|
+
throw new Error(`wiki path is not a file: ${filePath}`);
|
|
937
|
+
}
|
|
938
|
+
if (!isMarkdownFile(filePath)) {
|
|
939
|
+
throw new Error(`wiki section requires a Markdown file: ${filePath}`);
|
|
940
|
+
}
|
|
941
|
+
return {
|
|
942
|
+
path: toPosix(filePath),
|
|
943
|
+
content: await fs.readFile(target, 'utf8'),
|
|
944
|
+
};
|
|
945
|
+
}
|
|
946
|
+
async function loadRemoteMarkdownFile(ctx, wiki, ref, filePath) {
|
|
947
|
+
if (!isMarkdownFile(filePath)) {
|
|
948
|
+
throw new Error(`wiki section requires a Markdown file: ${filePath}`);
|
|
949
|
+
}
|
|
950
|
+
const blob = await ctx.client.getWikiBlob(ctx.orgId, wiki.id, {
|
|
951
|
+
path: filePath,
|
|
952
|
+
});
|
|
953
|
+
return {
|
|
954
|
+
path: filePath,
|
|
955
|
+
content: blob.content,
|
|
956
|
+
};
|
|
957
|
+
}
|
|
958
|
+
async function tryLoadLocalNodeSections(ctx, wiki, pathPrefix) {
|
|
959
|
+
const mount = await resolveLocalWikiMount(ctx, wiki);
|
|
960
|
+
if (!mount) {
|
|
961
|
+
return null;
|
|
962
|
+
}
|
|
963
|
+
if (pathPrefix) {
|
|
964
|
+
enforceAllowedPrefix(pathPrefix, []);
|
|
965
|
+
}
|
|
966
|
+
return loadOrBuildLocalNodeSectionArtifact(wiki, mount);
|
|
967
|
+
}
|
|
968
|
+
async function loadOrBuildLocalNodeSectionArtifact(wiki, mount) {
|
|
969
|
+
const artifactPath = path.join(mount.root, PRLL_WIKI_DIR, 'node-sections.json');
|
|
970
|
+
const descriptors = await collectLocalMarkdownDescriptors(mount.root, []);
|
|
971
|
+
const cached = await readLocalNodeSectionArtifact(artifactPath);
|
|
972
|
+
const artifact = {
|
|
973
|
+
version: LOCAL_NODE_SECTION_ARTIFACT_VERSION,
|
|
974
|
+
generated_at: new Date().toISOString(),
|
|
975
|
+
wiki_id: wiki.id,
|
|
976
|
+
wiki_slug: wiki.slug,
|
|
977
|
+
wiki_name: wiki.name,
|
|
978
|
+
file_count: descriptors.length,
|
|
979
|
+
files: await buildLocalNodeSectionFiles(wiki, mount.root, descriptors, cached),
|
|
980
|
+
};
|
|
981
|
+
await fs.mkdir(path.dirname(artifactPath), { recursive: true });
|
|
982
|
+
await fs.writeFile(artifactPath, JSON.stringify(artifact, null, 2), 'utf8');
|
|
983
|
+
return artifact;
|
|
984
|
+
}
|
|
985
|
+
async function readLocalNodeSectionArtifact(artifactPath) {
|
|
986
|
+
try {
|
|
987
|
+
const raw = await fs.readFile(artifactPath, 'utf8');
|
|
988
|
+
return JSON.parse(raw);
|
|
989
|
+
}
|
|
990
|
+
catch {
|
|
991
|
+
return null;
|
|
992
|
+
}
|
|
993
|
+
}
|
|
994
|
+
async function collectLocalMarkdownDescriptors(root, allowedPrefixes) {
|
|
995
|
+
const descriptors = [];
|
|
996
|
+
await walkLocalMarkdownFiles(root, root, async (fullPath, relativePath) => {
|
|
997
|
+
if (!matchesAllowedPrefix(relativePath, allowedPrefixes)) {
|
|
998
|
+
return;
|
|
999
|
+
}
|
|
1000
|
+
const stat = await fs.stat(fullPath);
|
|
1001
|
+
descriptors.push({
|
|
1002
|
+
path: relativePath,
|
|
1003
|
+
size: stat.size,
|
|
1004
|
+
mtime_ms: stat.mtimeMs,
|
|
1005
|
+
});
|
|
1006
|
+
});
|
|
1007
|
+
return descriptors.sort((a, b) => a.path.localeCompare(b.path));
|
|
1008
|
+
}
|
|
1009
|
+
async function buildLocalNodeSectionFiles(wiki, root, descriptors, cached) {
|
|
1010
|
+
const cachedByPath = new Map();
|
|
1011
|
+
if (cached
|
|
1012
|
+
&& cached.version === LOCAL_NODE_SECTION_ARTIFACT_VERSION
|
|
1013
|
+
&& cached.wiki_id === wiki.id
|
|
1014
|
+
&& cached.wiki_slug === wiki.slug
|
|
1015
|
+
&& cached.wiki_name === wiki.name) {
|
|
1016
|
+
for (const file of cached.files) {
|
|
1017
|
+
cachedByPath.set(file.path, file);
|
|
1018
|
+
}
|
|
1019
|
+
}
|
|
1020
|
+
const files = [];
|
|
1021
|
+
for (const descriptor of descriptors) {
|
|
1022
|
+
const existing = cachedByPath.get(descriptor.path);
|
|
1023
|
+
if (existing
|
|
1024
|
+
&& existing.size === descriptor.size
|
|
1025
|
+
&& existing.mtime_ms === descriptor.mtime_ms) {
|
|
1026
|
+
files.push(existing);
|
|
1027
|
+
continue;
|
|
1028
|
+
}
|
|
1029
|
+
const content = await fs.readFile(resolvePathInsideRoot(root, descriptor.path), 'utf8');
|
|
1030
|
+
files.push({
|
|
1031
|
+
...descriptor,
|
|
1032
|
+
nodes: parseMarkdownFile(wiki, {
|
|
1033
|
+
path: descriptor.path,
|
|
1034
|
+
content,
|
|
1035
|
+
}),
|
|
1036
|
+
});
|
|
1037
|
+
}
|
|
1038
|
+
return files.sort((a, b) => a.path.localeCompare(b.path));
|
|
1039
|
+
}
|
|
1040
|
+
function flattenLocalNodeSectionFiles(files) {
|
|
1041
|
+
const sections = [];
|
|
1042
|
+
for (const file of files) {
|
|
1043
|
+
sections.push(...file.nodes);
|
|
1044
|
+
}
|
|
1045
|
+
sections.sort((a, b) => {
|
|
1046
|
+
if (a.path === b.path) {
|
|
1047
|
+
return a.start_line - b.start_line;
|
|
1048
|
+
}
|
|
1049
|
+
return a.path.localeCompare(b.path);
|
|
1050
|
+
});
|
|
1051
|
+
return sections;
|
|
1052
|
+
}
|
|
1053
|
+
function findLocalNodeSection(artifact, nodeId) {
|
|
1054
|
+
for (const file of artifact.files) {
|
|
1055
|
+
const node = file.nodes.find((entry) => entry.node_id === nodeId);
|
|
1056
|
+
if (node) {
|
|
1057
|
+
return node;
|
|
1058
|
+
}
|
|
1059
|
+
}
|
|
1060
|
+
return undefined;
|
|
1061
|
+
}
|
|
1062
|
+
function filterNodeSectionsByPathPrefix(sections, pathPrefix) {
|
|
1063
|
+
if (!pathPrefix) {
|
|
1064
|
+
return sections;
|
|
1065
|
+
}
|
|
1066
|
+
return sections.filter((section) => (section.path === pathPrefix || section.path.startsWith(`${pathPrefix}/`)));
|
|
1067
|
+
}
|
|
1068
|
+
function countMatchingMarkdownFiles(files, pathPrefix) {
|
|
1069
|
+
if (!pathPrefix) {
|
|
1070
|
+
return files.length;
|
|
1071
|
+
}
|
|
1072
|
+
return files.filter((file) => (file.path === pathPrefix || file.path.startsWith(`${pathPrefix}/`))).length;
|
|
1073
|
+
}
|
|
1074
|
+
function countMatchingSectionFiles(sections, pathPrefix) {
|
|
1075
|
+
const paths = new Set();
|
|
1076
|
+
for (const section of sections) {
|
|
1077
|
+
if (!pathPrefix || section.path === pathPrefix || section.path.startsWith(`${pathPrefix}/`)) {
|
|
1078
|
+
paths.add(section.path);
|
|
1079
|
+
}
|
|
1080
|
+
}
|
|
1081
|
+
return paths.size;
|
|
1082
|
+
}
|
|
1083
|
+
// ---------------------------------------------------------------------------
|
|
1084
|
+
// Stale mount cleanup
|
|
1085
|
+
// ---------------------------------------------------------------------------
|
|
1086
|
+
async function pruneStaleMounts(mountRoot, activePaths) {
|
|
1087
|
+
const root = mountRoot?.trim();
|
|
1088
|
+
if (!root)
|
|
1089
|
+
return;
|
|
1090
|
+
const absRoot = path.resolve(root);
|
|
1091
|
+
if (!(await pathExists(absRoot)))
|
|
1092
|
+
return;
|
|
1093
|
+
const stat = await fs.stat(absRoot);
|
|
1094
|
+
if (!stat.isDirectory()) {
|
|
1095
|
+
throw new Error(`mount root "${absRoot}" is not a directory`);
|
|
1096
|
+
}
|
|
1097
|
+
const activeArr = Array.from(activePaths);
|
|
1098
|
+
const staleDirs = [];
|
|
1099
|
+
await findStaleMountDirs(absRoot, absRoot, activePaths, activeArr, staleDirs);
|
|
1100
|
+
// Remove deepest first to avoid partial-tree issues
|
|
1101
|
+
staleDirs.sort((a, b) => b.length - a.length);
|
|
1102
|
+
for (const staleDir of staleDirs) {
|
|
1103
|
+
await fs.rm(staleDir, { recursive: true, force: true });
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
async function findStaleMountDirs(absRoot, dir, activePaths, activeArr, staleDirs) {
|
|
1107
|
+
let names;
|
|
1108
|
+
try {
|
|
1109
|
+
names = await fs.readdir(dir);
|
|
1110
|
+
}
|
|
1111
|
+
catch {
|
|
1112
|
+
return;
|
|
1113
|
+
}
|
|
1114
|
+
for (const name of names) {
|
|
1115
|
+
const fullPath = path.join(dir, name);
|
|
1116
|
+
const stat = await fs.lstat(fullPath).catch(() => null);
|
|
1117
|
+
if (!stat?.isDirectory() || stat.isSymbolicLink())
|
|
1118
|
+
continue;
|
|
1119
|
+
if (name === PRLL_WIKI_DIR) {
|
|
1120
|
+
const mountDir = path.dirname(fullPath);
|
|
1121
|
+
if (mountDir === absRoot)
|
|
1122
|
+
continue;
|
|
1123
|
+
if (activePaths.has(mountDir))
|
|
1124
|
+
continue;
|
|
1125
|
+
const prefix = mountDir + path.sep;
|
|
1126
|
+
if (activeArr.some((ap) => ap.startsWith(prefix)))
|
|
1127
|
+
continue;
|
|
1128
|
+
if (await pathExists(path.join(fullPath, LOCAL_MANIFEST_FILE))) {
|
|
1129
|
+
staleDirs.push(mountDir);
|
|
1130
|
+
}
|
|
1131
|
+
continue;
|
|
1132
|
+
}
|
|
1133
|
+
await findStaleMountDirs(absRoot, fullPath, activePaths, activeArr, staleDirs);
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
// ---------------------------------------------------------------------------
|
|
1137
|
+
// Changeset summary helper
|
|
1138
|
+
// ---------------------------------------------------------------------------
|
|
1139
|
+
function toChangesetSummary(cs) {
|
|
1140
|
+
return {
|
|
1141
|
+
id: cs.id,
|
|
1142
|
+
status: cs.status,
|
|
1143
|
+
title: cs.title,
|
|
1144
|
+
changed_paths: cs.changed_paths,
|
|
1145
|
+
feedback: cs.last_error ?? null,
|
|
1146
|
+
created_at: cs.created_at,
|
|
1147
|
+
updated_at: cs.updated_at,
|
|
1148
|
+
};
|
|
1149
|
+
}
|
|
1150
|
+
// ---------------------------------------------------------------------------
|
|
1151
|
+
// Search & index engine (unchanged)
|
|
1152
|
+
// ---------------------------------------------------------------------------
|
|
1153
|
+
function stripNodeSectionContent(section) {
|
|
1154
|
+
const { content: _content, ...node } = section;
|
|
1155
|
+
return node;
|
|
1156
|
+
}
|
|
1157
|
+
function buildIndexNodes(sections) {
|
|
1158
|
+
const nodes = [];
|
|
1159
|
+
for (const section of sections) {
|
|
1160
|
+
nodes.push(buildIndexedNode(section));
|
|
1161
|
+
}
|
|
1162
|
+
return nodes;
|
|
1163
|
+
}
|
|
1164
|
+
function parseMarkdownFile(wiki, file) {
|
|
1165
|
+
const lines = file.content.split('\n');
|
|
1166
|
+
const frontMatterEnd = detectFrontMatterEnd(lines);
|
|
1167
|
+
const headings = extractMarkdownHeadings(lines, frontMatterEnd);
|
|
1168
|
+
const fileTitle = defaultFileTitle(file.path);
|
|
1169
|
+
if (headings.length === 0) {
|
|
1170
|
+
const docNode = buildNodeSection(wiki, {
|
|
1171
|
+
nodeId: `${file.path}:doc`,
|
|
1172
|
+
path: file.path,
|
|
1173
|
+
title: fileTitle,
|
|
1174
|
+
headingPath: [],
|
|
1175
|
+
level: 0,
|
|
1176
|
+
startLine: findFirstNonEmptyLine(lines, frontMatterEnd + 1, lines.length),
|
|
1177
|
+
endLine: lines.length,
|
|
1178
|
+
content: lines.slice(frontMatterEnd).join('\n').trim(),
|
|
1179
|
+
});
|
|
1180
|
+
return !docNode.content ? [] : [docNode];
|
|
1181
|
+
}
|
|
1182
|
+
const nodes = [];
|
|
1183
|
+
const firstHeadingLine = headings[0].line;
|
|
1184
|
+
const preface = lines.slice(frontMatterEnd, firstHeadingLine - 1).join('\n').trim();
|
|
1185
|
+
if (preface) {
|
|
1186
|
+
nodes.push(buildNodeSection(wiki, {
|
|
1187
|
+
nodeId: `${file.path}:preface`,
|
|
1188
|
+
path: file.path,
|
|
1189
|
+
title: fileTitle,
|
|
1190
|
+
headingPath: [],
|
|
1191
|
+
level: 0,
|
|
1192
|
+
startLine: findFirstNonEmptyLine(lines, frontMatterEnd + 1, firstHeadingLine - 1),
|
|
1193
|
+
endLine: firstHeadingLine - 1,
|
|
1194
|
+
content: preface,
|
|
1195
|
+
}));
|
|
1196
|
+
}
|
|
1197
|
+
for (let i = 0; i < headings.length; i += 1) {
|
|
1198
|
+
const current = headings[i];
|
|
1199
|
+
const nextLine = i + 1 < headings.length ? headings[i + 1].line : lines.length + 1;
|
|
1200
|
+
nodes.push(buildNodeSection(wiki, {
|
|
1201
|
+
nodeId: `${file.path}:L${current.line}`,
|
|
1202
|
+
path: file.path,
|
|
1203
|
+
title: current.title,
|
|
1204
|
+
headingPath: current.path,
|
|
1205
|
+
level: current.level,
|
|
1206
|
+
startLine: current.line,
|
|
1207
|
+
endLine: nextLine - 1,
|
|
1208
|
+
content: lines.slice(current.line, nextLine - 1).join('\n').trim(),
|
|
1209
|
+
}));
|
|
1210
|
+
}
|
|
1211
|
+
return nodes;
|
|
1212
|
+
}
|
|
1213
|
+
function buildNodeSection(wiki, input) {
|
|
1214
|
+
const content = input.content.trim();
|
|
1215
|
+
const sectionPath = input.headingPath.length > 0
|
|
1216
|
+
? `${input.path} > ${input.headingPath.join(' > ')}`
|
|
1217
|
+
: input.path;
|
|
1218
|
+
return {
|
|
1219
|
+
wiki_id: wiki.id,
|
|
1220
|
+
wiki_slug: wiki.slug,
|
|
1221
|
+
wiki_name: wiki.name,
|
|
1222
|
+
node_id: input.nodeId,
|
|
1223
|
+
path: input.path,
|
|
1224
|
+
title: input.title,
|
|
1225
|
+
heading_path: input.headingPath.length > 0 ? [...input.headingPath] : undefined,
|
|
1226
|
+
section_path: sectionPath,
|
|
1227
|
+
level: input.level,
|
|
1228
|
+
start_line: input.startLine,
|
|
1229
|
+
end_line: input.endLine,
|
|
1230
|
+
content,
|
|
1231
|
+
};
|
|
1232
|
+
}
|
|
1233
|
+
function buildIndexedNode(section) {
|
|
1234
|
+
const searchText = [
|
|
1235
|
+
section.path,
|
|
1236
|
+
section.title,
|
|
1237
|
+
section.heading_path?.join(' ') ?? '',
|
|
1238
|
+
section.content,
|
|
1239
|
+
].join('\n').trim();
|
|
1240
|
+
const termFreq = new Map();
|
|
1241
|
+
const tokens = tokenize(searchText);
|
|
1242
|
+
for (const token of tokens) {
|
|
1243
|
+
termFreq.set(token, (termFreq.get(token) ?? 0) + 1);
|
|
1244
|
+
}
|
|
1245
|
+
return {
|
|
1246
|
+
section,
|
|
1247
|
+
content: section.content,
|
|
1248
|
+
lowerPath: section.path.toLowerCase(),
|
|
1249
|
+
lowerTitle: section.title.toLowerCase(),
|
|
1250
|
+
lowerBody: section.content.toLowerCase(),
|
|
1251
|
+
lowerJoined: section.section_path.toLowerCase(),
|
|
1252
|
+
termFreq,
|
|
1253
|
+
docLen: tokens.length,
|
|
1254
|
+
};
|
|
1255
|
+
}
|
|
1256
|
+
function scoreNodes(nodes, query, options) {
|
|
1257
|
+
const queryTokens = tokenize(query);
|
|
1258
|
+
if (queryTokens.length === 0) {
|
|
1259
|
+
throw new Error('query did not contain searchable terms');
|
|
1260
|
+
}
|
|
1261
|
+
const docFreq = new Map();
|
|
1262
|
+
let totalDocLen = 0;
|
|
1263
|
+
for (const node of nodes) {
|
|
1264
|
+
totalDocLen += node.docLen;
|
|
1265
|
+
for (const token of node.termFreq.keys()) {
|
|
1266
|
+
docFreq.set(token, (docFreq.get(token) ?? 0) + 1);
|
|
1267
|
+
}
|
|
1268
|
+
}
|
|
1269
|
+
const avgDocLen = nodes.length > 0 ? Math.max(totalDocLen / nodes.length, 1) : 1;
|
|
1270
|
+
const lowerQuery = query.toLowerCase();
|
|
1271
|
+
const limit = options.limit && options.limit > 0 ? options.limit : 5;
|
|
1272
|
+
const results = [];
|
|
1273
|
+
for (const node of nodes) {
|
|
1274
|
+
const score = scoreNode(node, queryTokens, lowerQuery, docFreq, nodes.length, avgDocLen);
|
|
1275
|
+
if (score <= 0) {
|
|
1276
|
+
continue;
|
|
1277
|
+
}
|
|
1278
|
+
results.push({
|
|
1279
|
+
...stripNodeSectionContent(node.section),
|
|
1280
|
+
score,
|
|
1281
|
+
snippet: buildSnippet(node, lowerQuery, queryTokens),
|
|
1282
|
+
content: options.includeContent ? node.content : undefined,
|
|
1283
|
+
source: options.source,
|
|
1284
|
+
});
|
|
1285
|
+
}
|
|
1286
|
+
results.sort((a, b) => {
|
|
1287
|
+
if (b.score === a.score) {
|
|
1288
|
+
if (a.path === b.path) {
|
|
1289
|
+
return a.start_line - b.start_line;
|
|
1290
|
+
}
|
|
1291
|
+
return a.path.localeCompare(b.path);
|
|
1292
|
+
}
|
|
1293
|
+
return b.score - a.score;
|
|
1294
|
+
});
|
|
1295
|
+
return results.slice(0, limit);
|
|
1296
|
+
}
|
|
1297
|
+
function planWikiQuery(sections, query, options) {
|
|
1298
|
+
const queryTokens = tokenize(query);
|
|
1299
|
+
if (queryTokens.length === 0) {
|
|
1300
|
+
throw new Error('query did not contain searchable terms');
|
|
1301
|
+
}
|
|
1302
|
+
const lowerQuery = query.toLowerCase();
|
|
1303
|
+
const limit = options.limit && options.limit > 0 ? options.limit : 5;
|
|
1304
|
+
const documents = buildIndexedDocuments(sections);
|
|
1305
|
+
const rankedDocuments = scoreDocuments(documents, queryTokens, lowerQuery)
|
|
1306
|
+
.filter((entry) => entry.score > 0)
|
|
1307
|
+
.sort((a, b) => {
|
|
1308
|
+
if (b.score === a.score) {
|
|
1309
|
+
return a.document.path.localeCompare(b.document.path);
|
|
1310
|
+
}
|
|
1311
|
+
return b.score - a.score;
|
|
1312
|
+
});
|
|
1313
|
+
const shortlistedDocuments = rankedDocuments.slice(0, Math.max(limit * 2, 3));
|
|
1314
|
+
const shortlistedNodes = buildIndexNodes(shortlistedDocuments.flatMap((entry) => entry.document.sections));
|
|
1315
|
+
const docScoreByPath = new Map(shortlistedDocuments.map((entry) => [entry.document.path, entry.score]));
|
|
1316
|
+
const results = [];
|
|
1317
|
+
for (const node of shortlistedNodes) {
|
|
1318
|
+
const documentScore = docScoreByPath.get(node.section.path) ?? 0;
|
|
1319
|
+
const plan = scoreTreeAwareNode(node, queryTokens, lowerQuery, documentScore, shortlistedNodes);
|
|
1320
|
+
if (plan.score <= 0) {
|
|
1321
|
+
continue;
|
|
1322
|
+
}
|
|
1323
|
+
results.push({
|
|
1324
|
+
...stripNodeSectionContent(node.section),
|
|
1325
|
+
score: plan.score,
|
|
1326
|
+
document_score: documentScore,
|
|
1327
|
+
snippet: buildSnippet(node, lowerQuery, queryTokens),
|
|
1328
|
+
content: options.includeContent ? node.content : undefined,
|
|
1329
|
+
source: options.source,
|
|
1330
|
+
reasoning: plan.reasoning,
|
|
1331
|
+
});
|
|
1332
|
+
}
|
|
1333
|
+
results.sort((a, b) => {
|
|
1334
|
+
if (b.score === a.score) {
|
|
1335
|
+
if (a.path === b.path) {
|
|
1336
|
+
return a.start_line - b.start_line;
|
|
1337
|
+
}
|
|
1338
|
+
return a.path.localeCompare(b.path);
|
|
1339
|
+
}
|
|
1340
|
+
return b.score - a.score;
|
|
1341
|
+
});
|
|
1342
|
+
return {
|
|
1343
|
+
documents: shortlistedDocuments.map(({ document, score }) => ({
|
|
1344
|
+
path: document.path,
|
|
1345
|
+
title: document.title,
|
|
1346
|
+
score,
|
|
1347
|
+
matched_headings: findMatchedHeadings(document, lowerQuery, queryTokens),
|
|
1348
|
+
node_count: document.sections.length,
|
|
1349
|
+
})),
|
|
1350
|
+
sections: results.slice(0, limit),
|
|
1351
|
+
};
|
|
1352
|
+
}
|
|
1353
|
+
function buildIndexedDocuments(sections) {
|
|
1354
|
+
const byPath = new Map();
|
|
1355
|
+
for (const section of sections) {
|
|
1356
|
+
const bucket = byPath.get(section.path);
|
|
1357
|
+
if (bucket) {
|
|
1358
|
+
bucket.push(section);
|
|
1359
|
+
}
|
|
1360
|
+
else {
|
|
1361
|
+
byPath.set(section.path, [section]);
|
|
1362
|
+
}
|
|
1363
|
+
}
|
|
1364
|
+
const documents = [];
|
|
1365
|
+
for (const [filePath, fileSections] of byPath.entries()) {
|
|
1366
|
+
fileSections.sort((a, b) => a.start_line - b.start_line);
|
|
1367
|
+
const headings = Array.from(new Set(fileSections
|
|
1368
|
+
.flatMap((section) => section.heading_path ?? [])
|
|
1369
|
+
.filter(Boolean)));
|
|
1370
|
+
const title = fileSections.find((section) => section.level === 0)?.title ?? defaultFileTitle(filePath);
|
|
1371
|
+
const searchText = [
|
|
1372
|
+
filePath,
|
|
1373
|
+
title,
|
|
1374
|
+
headings.join(' '),
|
|
1375
|
+
].join('\n').trim();
|
|
1376
|
+
const tokens = tokenize(searchText);
|
|
1377
|
+
const termFreq = new Map();
|
|
1378
|
+
for (const token of tokens) {
|
|
1379
|
+
termFreq.set(token, (termFreq.get(token) ?? 0) + 1);
|
|
1380
|
+
}
|
|
1381
|
+
documents.push({
|
|
1382
|
+
path: filePath,
|
|
1383
|
+
title,
|
|
1384
|
+
sections: fileSections,
|
|
1385
|
+
lowerPath: filePath.toLowerCase(),
|
|
1386
|
+
lowerTitle: title.toLowerCase(),
|
|
1387
|
+
lowerJoined: `${filePath} ${title} ${headings.join(' ')}`.toLowerCase(),
|
|
1388
|
+
headings,
|
|
1389
|
+
termFreq,
|
|
1390
|
+
docLen: tokens.length,
|
|
1391
|
+
});
|
|
1392
|
+
}
|
|
1393
|
+
documents.sort((a, b) => a.path.localeCompare(b.path));
|
|
1394
|
+
return documents;
|
|
1395
|
+
}
|
|
1396
|
+
function scoreDocuments(documents, queryTokens, lowerQuery) {
|
|
1397
|
+
const docFreq = new Map();
|
|
1398
|
+
let totalDocLen = 0;
|
|
1399
|
+
for (const document of documents) {
|
|
1400
|
+
totalDocLen += document.docLen;
|
|
1401
|
+
for (const token of document.termFreq.keys()) {
|
|
1402
|
+
docFreq.set(token, (docFreq.get(token) ?? 0) + 1);
|
|
1403
|
+
}
|
|
1404
|
+
}
|
|
1405
|
+
const avgDocLen = documents.length > 0 ? Math.max(totalDocLen / documents.length, 1) : 1;
|
|
1406
|
+
return documents.map((document) => {
|
|
1407
|
+
const pseudoNode = {
|
|
1408
|
+
section: {
|
|
1409
|
+
wiki_id: document.sections[0]?.wiki_id ?? '',
|
|
1410
|
+
wiki_slug: document.sections[0]?.wiki_slug ?? '',
|
|
1411
|
+
wiki_name: document.sections[0]?.wiki_name ?? '',
|
|
1412
|
+
node_id: `${document.path}:doc`,
|
|
1413
|
+
path: document.path,
|
|
1414
|
+
title: document.title,
|
|
1415
|
+
heading_path: document.headings,
|
|
1416
|
+
section_path: document.path,
|
|
1417
|
+
level: 0,
|
|
1418
|
+
start_line: 1,
|
|
1419
|
+
end_line: 1,
|
|
1420
|
+
content: '',
|
|
1421
|
+
},
|
|
1422
|
+
content: '',
|
|
1423
|
+
lowerPath: document.lowerPath,
|
|
1424
|
+
lowerTitle: document.lowerTitle,
|
|
1425
|
+
lowerBody: '',
|
|
1426
|
+
lowerJoined: document.lowerJoined,
|
|
1427
|
+
termFreq: document.termFreq,
|
|
1428
|
+
docLen: document.docLen,
|
|
1429
|
+
};
|
|
1430
|
+
return {
|
|
1431
|
+
document,
|
|
1432
|
+
score: scoreNode(pseudoNode, queryTokens, lowerQuery, docFreq, documents.length, avgDocLen),
|
|
1433
|
+
};
|
|
1434
|
+
});
|
|
1435
|
+
}
|
|
1436
|
+
function scoreTreeAwareNode(node, queryTokens, lowerQuery, documentScore, scope) {
|
|
1437
|
+
const docFreq = new Map();
|
|
1438
|
+
let totalDocLen = 0;
|
|
1439
|
+
for (const entry of scope) {
|
|
1440
|
+
totalDocLen += entry.docLen;
|
|
1441
|
+
for (const token of entry.termFreq.keys()) {
|
|
1442
|
+
docFreq.set(token, (docFreq.get(token) ?? 0) + 1);
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
const avgDocLen = scope.length > 0 ? Math.max(totalDocLen / scope.length, 1) : 1;
|
|
1446
|
+
let score = scoreNode(node, queryTokens, lowerQuery, docFreq, scope.length, avgDocLen);
|
|
1447
|
+
const reasoning = [];
|
|
1448
|
+
if (documentScore > 0) {
|
|
1449
|
+
score += documentScore * 0.35;
|
|
1450
|
+
reasoning.push('selected from a high-scoring document');
|
|
1451
|
+
}
|
|
1452
|
+
if (lowerQuery && node.lowerTitle.includes(lowerQuery)) {
|
|
1453
|
+
score += 2.5;
|
|
1454
|
+
reasoning.push('section title directly matches the query');
|
|
1455
|
+
}
|
|
1456
|
+
const matchedAncestors = (node.section.heading_path ?? [])
|
|
1457
|
+
.slice(0, -1)
|
|
1458
|
+
.filter((heading) => heading.toLowerCase().includes(lowerQuery)
|
|
1459
|
+
|| queryTokens.some((token) => heading.toLowerCase().includes(token)));
|
|
1460
|
+
if (matchedAncestors.length > 0) {
|
|
1461
|
+
score += matchedAncestors.length * 1.25;
|
|
1462
|
+
reasoning.push('ancestor headings match the query context');
|
|
1463
|
+
}
|
|
1464
|
+
const headingTokenHits = queryTokens.filter((token) => ((node.section.heading_path ?? []).some((heading) => heading.toLowerCase().includes(token)))).length;
|
|
1465
|
+
if (headingTokenHits > 0) {
|
|
1466
|
+
score += headingTokenHits * 0.6;
|
|
1467
|
+
reasoning.push('heading path overlaps with query terms');
|
|
1468
|
+
}
|
|
1469
|
+
if (node.section.level > 0 && node.section.level <= 2) {
|
|
1470
|
+
score += 0.2;
|
|
1471
|
+
}
|
|
1472
|
+
return { score, reasoning };
|
|
1473
|
+
}
|
|
1474
|
+
function findMatchedHeadings(document, lowerQuery, queryTokens) {
|
|
1475
|
+
return document.headings.filter((heading) => {
|
|
1476
|
+
const lower = heading.toLowerCase();
|
|
1477
|
+
return (lowerQuery && lower.includes(lowerQuery))
|
|
1478
|
+
|| queryTokens.some((token) => lower.includes(token));
|
|
1479
|
+
}).slice(0, 5);
|
|
1480
|
+
}
|
|
1481
|
+
function scoreNode(node, queryTokens, lowerQuery, docFreq, totalDocs, avgDocLen) {
|
|
1482
|
+
const k1 = 1.5;
|
|
1483
|
+
const b = 0.75;
|
|
1484
|
+
const docLen = Math.max(node.docLen, 1);
|
|
1485
|
+
let score = 0;
|
|
1486
|
+
for (const token of queryTokens) {
|
|
1487
|
+
const tf = node.termFreq.get(token) ?? 0;
|
|
1488
|
+
if (tf === 0) {
|
|
1489
|
+
continue;
|
|
1490
|
+
}
|
|
1491
|
+
const df = docFreq.get(token) ?? 0;
|
|
1492
|
+
if (df === 0) {
|
|
1493
|
+
continue;
|
|
1494
|
+
}
|
|
1495
|
+
const idf = Math.log(1 + ((totalDocs - df + 0.5) / (df + 0.5)));
|
|
1496
|
+
score += idf * ((tf * (k1 + 1)) / (tf + k1 * (1 - b + b * (docLen / avgDocLen))));
|
|
1497
|
+
}
|
|
1498
|
+
if (lowerQuery) {
|
|
1499
|
+
if (node.lowerTitle.includes(lowerQuery)) {
|
|
1500
|
+
score += 3;
|
|
1501
|
+
}
|
|
1502
|
+
else if (node.lowerJoined.includes(lowerQuery)) {
|
|
1503
|
+
score += 2;
|
|
1504
|
+
}
|
|
1505
|
+
else if (node.lowerPath.includes(lowerQuery)) {
|
|
1506
|
+
score += 1.5;
|
|
1507
|
+
}
|
|
1508
|
+
if (node.lowerBody.includes(lowerQuery)) {
|
|
1509
|
+
score += 1;
|
|
1510
|
+
}
|
|
1511
|
+
}
|
|
1512
|
+
return score;
|
|
1513
|
+
}
|
|
1514
|
+
function buildSnippet(node, lowerQuery, queryTokens) {
|
|
1515
|
+
const lines = [node.section.title, ...node.content.split('\n')];
|
|
1516
|
+
for (const line of lines) {
|
|
1517
|
+
const compact = compactWhitespace(line);
|
|
1518
|
+
if (!compact) {
|
|
1519
|
+
continue;
|
|
1520
|
+
}
|
|
1521
|
+
const lower = compact.toLowerCase();
|
|
1522
|
+
if (lowerQuery && lower.includes(lowerQuery)) {
|
|
1523
|
+
return truncateSnippet(compact, 220);
|
|
1524
|
+
}
|
|
1525
|
+
}
|
|
1526
|
+
for (const line of lines) {
|
|
1527
|
+
const compact = compactWhitespace(line);
|
|
1528
|
+
if (!compact) {
|
|
1529
|
+
continue;
|
|
1530
|
+
}
|
|
1531
|
+
const lower = compact.toLowerCase();
|
|
1532
|
+
if (queryTokens.some((token) => lower.includes(token))) {
|
|
1533
|
+
return truncateSnippet(compact, 220);
|
|
1534
|
+
}
|
|
1535
|
+
}
|
|
1536
|
+
return truncateSnippet(compactWhitespace(node.content || node.section.section_path), 220);
|
|
1537
|
+
}
|
|
1538
|
+
// ---------------------------------------------------------------------------
|
|
1539
|
+
// Markdown parsing helpers
|
|
1540
|
+
// ---------------------------------------------------------------------------
|
|
1541
|
+
function extractMarkdownHeadings(lines, startIndex) {
|
|
1542
|
+
const headings = [];
|
|
1543
|
+
const stack = [];
|
|
1544
|
+
let fenceMarker = '';
|
|
1545
|
+
for (let i = startIndex; i < lines.length; i += 1) {
|
|
1546
|
+
const trimmed = lines[i].trim();
|
|
1547
|
+
const marker = parseFenceMarker(trimmed);
|
|
1548
|
+
if (marker) {
|
|
1549
|
+
if (!fenceMarker) {
|
|
1550
|
+
fenceMarker = marker;
|
|
1551
|
+
}
|
|
1552
|
+
else if (trimmed.startsWith(fenceMarker)) {
|
|
1553
|
+
fenceMarker = '';
|
|
1554
|
+
}
|
|
1555
|
+
continue;
|
|
1556
|
+
}
|
|
1557
|
+
if (fenceMarker) {
|
|
1558
|
+
continue;
|
|
1559
|
+
}
|
|
1560
|
+
const match = lines[i].match(MARKDOWN_HEADING_RE);
|
|
1561
|
+
if (!match) {
|
|
1562
|
+
continue;
|
|
1563
|
+
}
|
|
1564
|
+
const level = match[1].length;
|
|
1565
|
+
const title = normalizeHeadingTitle(match[2]);
|
|
1566
|
+
if (!title) {
|
|
1567
|
+
continue;
|
|
1568
|
+
}
|
|
1569
|
+
while (stack.length > 0 && stack[stack.length - 1].level >= level) {
|
|
1570
|
+
stack.pop();
|
|
1571
|
+
}
|
|
1572
|
+
const headingPath = [...stack.map((entry) => entry.title), title];
|
|
1573
|
+
headings.push({
|
|
1574
|
+
title,
|
|
1575
|
+
level,
|
|
1576
|
+
line: i + 1,
|
|
1577
|
+
path: headingPath,
|
|
1578
|
+
});
|
|
1579
|
+
stack.push({ title, level });
|
|
1580
|
+
}
|
|
1581
|
+
return headings;
|
|
1582
|
+
}
|
|
1583
|
+
function detectFrontMatterEnd(lines) {
|
|
1584
|
+
if (lines.length === 0) {
|
|
1585
|
+
return 0;
|
|
1586
|
+
}
|
|
1587
|
+
const first = lines[0].trim();
|
|
1588
|
+
if (first !== '---' && first !== '+++') {
|
|
1589
|
+
return 0;
|
|
1590
|
+
}
|
|
1591
|
+
for (let i = 1; i < lines.length; i += 1) {
|
|
1592
|
+
if (lines[i].trim() === first) {
|
|
1593
|
+
return i + 1;
|
|
1594
|
+
}
|
|
1595
|
+
}
|
|
1596
|
+
return 0;
|
|
1597
|
+
}
|
|
1598
|
+
function normalizeHeadingTitle(raw) {
|
|
1599
|
+
return raw.replace(TRAILING_FENCE_RE, '').trim().replace(/^`+|`+$/g, '');
|
|
1600
|
+
}
|
|
1601
|
+
function parseNodeId(nodeId) {
|
|
1602
|
+
const match = nodeId.match(/^(.*):(doc|preface|L(\d+))$/);
|
|
1603
|
+
if (!match || !match[1]) {
|
|
1604
|
+
throw new Error(`invalid wiki node_id: ${nodeId}`);
|
|
1605
|
+
}
|
|
1606
|
+
const filePath = normalizeRequiredWikiPath(match[1]);
|
|
1607
|
+
if (match[2] === 'doc') {
|
|
1608
|
+
return { path: filePath, kind: 'doc' };
|
|
1609
|
+
}
|
|
1610
|
+
if (match[2] === 'preface') {
|
|
1611
|
+
return { path: filePath, kind: 'preface' };
|
|
1612
|
+
}
|
|
1613
|
+
return {
|
|
1614
|
+
path: filePath,
|
|
1615
|
+
kind: 'line',
|
|
1616
|
+
line: Number.parseInt(match[3], 10),
|
|
1617
|
+
};
|
|
1618
|
+
}
|
|
1619
|
+
function parseFenceMarker(line) {
|
|
1620
|
+
if (line.startsWith('```')) {
|
|
1621
|
+
return '```';
|
|
1622
|
+
}
|
|
1623
|
+
if (line.startsWith('~~~')) {
|
|
1624
|
+
return '~~~';
|
|
1625
|
+
}
|
|
1626
|
+
return '';
|
|
1627
|
+
}
|
|
1628
|
+
function defaultFileTitle(filePath) {
|
|
1629
|
+
const base = path.posix.basename(filePath, path.posix.extname(filePath));
|
|
1630
|
+
return base.replaceAll('-', ' ').replaceAll('_', ' ').trim() || filePath;
|
|
1631
|
+
}
|
|
1632
|
+
function findFirstNonEmptyLine(lines, startLine, endLine) {
|
|
1633
|
+
const start = Math.max(startLine, 1);
|
|
1634
|
+
const end = Math.min(endLine, lines.length);
|
|
1635
|
+
for (let i = start; i <= end; i += 1) {
|
|
1636
|
+
if (lines[i - 1]?.trim()) {
|
|
1637
|
+
return i;
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
return start;
|
|
1641
|
+
}
|
|
1642
|
+
// ---------------------------------------------------------------------------
|
|
1643
|
+
// Path & string utilities
|
|
1644
|
+
// ---------------------------------------------------------------------------
|
|
1645
|
+
function normalizeRef(ref) {
|
|
1646
|
+
const value = ref?.trim();
|
|
1647
|
+
return value ? value : undefined;
|
|
1648
|
+
}
|
|
1649
|
+
function normalizeWikiPath(value) {
|
|
1650
|
+
if (!value) {
|
|
1651
|
+
return '';
|
|
1652
|
+
}
|
|
1653
|
+
const normalized = toPosix(value.trim()).replace(/^\.?\//, '').replace(/^\/+/, '');
|
|
1654
|
+
return normalized === '.' ? '' : normalized.replace(/\/+$/, '');
|
|
1655
|
+
}
|
|
1656
|
+
function normalizeRequiredWikiPath(value) {
|
|
1657
|
+
const normalized = normalizeWikiPath(value);
|
|
1658
|
+
if (!normalized) {
|
|
1659
|
+
throw new Error('path is required');
|
|
1660
|
+
}
|
|
1661
|
+
return normalized;
|
|
1662
|
+
}
|
|
1663
|
+
function resolveAgentId(explicit) {
|
|
1664
|
+
const value = explicit?.trim() || process.env.PRLL_AGENT_ID?.trim() || process.env.AGENT_ID?.trim();
|
|
1665
|
+
return value || undefined;
|
|
1666
|
+
}
|
|
1667
|
+
function resolveMountPath(mountPath, mountRoot) {
|
|
1668
|
+
const trimmedMountPath = mountPath.trim();
|
|
1669
|
+
if (!trimmedMountPath) {
|
|
1670
|
+
throw new Error('mount path is required');
|
|
1671
|
+
}
|
|
1672
|
+
if (mountRoot?.trim()) {
|
|
1673
|
+
const absoluteRoot = path.resolve(mountRoot.trim());
|
|
1674
|
+
const candidate = path.isAbsolute(trimmedMountPath)
|
|
1675
|
+
? path.resolve(trimmedMountPath)
|
|
1676
|
+
: path.resolve(absoluteRoot, trimmedMountPath);
|
|
1677
|
+
const relative = path.relative(absoluteRoot, candidate);
|
|
1678
|
+
if (!relative || relative === '.' || relative === '..' || relative.startsWith(`..${path.sep}`)) {
|
|
1679
|
+
throw new Error(`mount path "${mountPath}" escapes PRLL_WIKI_MOUNT_ROOT`);
|
|
1680
|
+
}
|
|
1681
|
+
return candidate;
|
|
1682
|
+
}
|
|
1683
|
+
if (path.isAbsolute(trimmedMountPath)) {
|
|
1684
|
+
throw new Error('absolute mount path requires PRLL_WIKI_MOUNT_ROOT');
|
|
1685
|
+
}
|
|
1686
|
+
return path.resolve(trimmedMountPath);
|
|
1687
|
+
}
|
|
1688
|
+
function resolvePathInsideRoot(root, relativePath) {
|
|
1689
|
+
const normalized = normalizeWikiPath(relativePath);
|
|
1690
|
+
const candidate = path.resolve(root, normalized);
|
|
1691
|
+
const relative = path.relative(root, candidate);
|
|
1692
|
+
if (relative === '' || relative === '.' || relative === '..' || relative.startsWith(`..${path.sep}`)) {
|
|
1693
|
+
throw new Error(`path "${relativePath}" escapes wiki root`);
|
|
1694
|
+
}
|
|
1695
|
+
return candidate;
|
|
1696
|
+
}
|
|
1697
|
+
// ACL is now enforced server-side via path_scopes. These helpers are no-ops.
|
|
1698
|
+
function enforceAllowedPrefix(_pathPrefix, _allowedPrefixes) {
|
|
1699
|
+
// No-op: server handles ACL via path_scopes.
|
|
1700
|
+
}
|
|
1701
|
+
function matchesAllowedPrefix(_filePath, _allowedPrefixes) {
|
|
1702
|
+
// Server manifest already filters by ACL — all returned files are readable.
|
|
1703
|
+
return true;
|
|
1704
|
+
}
|
|
1705
|
+
function isMarkdownFile(filePath) {
|
|
1706
|
+
const ext = path.posix.extname(toPosix(filePath)).toLowerCase();
|
|
1707
|
+
return ext === '.md' || ext === '.markdown' || ext === '.mdx';
|
|
1708
|
+
}
|
|
1709
|
+
function compactWhitespace(value) {
|
|
1710
|
+
return value.trim().split(/\s+/).filter(Boolean).join(' ');
|
|
1711
|
+
}
|
|
1712
|
+
function truncateSnippet(value, limit) {
|
|
1713
|
+
if (value.length <= limit) {
|
|
1714
|
+
return value;
|
|
1715
|
+
}
|
|
1716
|
+
if (limit <= 3) {
|
|
1717
|
+
return value.slice(0, limit);
|
|
1718
|
+
}
|
|
1719
|
+
return `${value.slice(0, limit - 3)}...`;
|
|
1720
|
+
}
|
|
1721
|
+
function toPosix(value) {
|
|
1722
|
+
return value.split(path.sep).join(path.posix.sep);
|
|
1723
|
+
}
|
|
1724
|
+
function normalizeOptionalString(value) {
|
|
1725
|
+
const normalized = value?.trim();
|
|
1726
|
+
return normalized ? normalized : undefined;
|
|
1727
|
+
}
|
|
1728
|
+
async function pathExists(target) {
|
|
1729
|
+
try {
|
|
1730
|
+
await fs.access(target);
|
|
1731
|
+
return true;
|
|
1732
|
+
}
|
|
1733
|
+
catch {
|
|
1734
|
+
return false;
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
function requireAgentId(ctx) {
|
|
1738
|
+
const agentId = resolveAgentId(ctx.agentId);
|
|
1739
|
+
if (!agentId) {
|
|
1740
|
+
throw new Error('wiki write tools require PRLL_AGENT_ID or an explicit agentId');
|
|
1741
|
+
}
|
|
1742
|
+
return agentId;
|
|
1743
|
+
}
|
|
1744
|
+
// ---------------------------------------------------------------------------
|
|
1745
|
+
// Tokenizer (CJK-aware, for search/query)
|
|
1746
|
+
// ---------------------------------------------------------------------------
|
|
1747
|
+
function tokenize(text) {
|
|
1748
|
+
const trimmed = text.trim();
|
|
1749
|
+
if (!trimmed) {
|
|
1750
|
+
return [];
|
|
1751
|
+
}
|
|
1752
|
+
const tokens = [];
|
|
1753
|
+
let word = '';
|
|
1754
|
+
let cjkRun = '';
|
|
1755
|
+
const flushWord = () => {
|
|
1756
|
+
if (!word) {
|
|
1757
|
+
return;
|
|
1758
|
+
}
|
|
1759
|
+
tokens.push(word.toLowerCase());
|
|
1760
|
+
word = '';
|
|
1761
|
+
};
|
|
1762
|
+
const flushCjk = () => {
|
|
1763
|
+
if (!cjkRun) {
|
|
1764
|
+
return;
|
|
1765
|
+
}
|
|
1766
|
+
const chars = Array.from(cjkRun.toLowerCase());
|
|
1767
|
+
tokens.push(...chars);
|
|
1768
|
+
if (chars.length > 1) {
|
|
1769
|
+
for (let i = 0; i < chars.length - 1; i += 1) {
|
|
1770
|
+
tokens.push(chars[i] + chars[i + 1]);
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
cjkRun = '';
|
|
1774
|
+
};
|
|
1775
|
+
for (const char of Array.from(trimmed)) {
|
|
1776
|
+
if (isCjkChar(char)) {
|
|
1777
|
+
flushWord();
|
|
1778
|
+
cjkRun += char;
|
|
1779
|
+
continue;
|
|
1780
|
+
}
|
|
1781
|
+
if (/[0-9\p{L}]/u.test(char)) {
|
|
1782
|
+
flushCjk();
|
|
1783
|
+
word += char.toLowerCase();
|
|
1784
|
+
continue;
|
|
1785
|
+
}
|
|
1786
|
+
flushWord();
|
|
1787
|
+
flushCjk();
|
|
1788
|
+
}
|
|
1789
|
+
flushWord();
|
|
1790
|
+
flushCjk();
|
|
1791
|
+
return tokens;
|
|
1792
|
+
}
|
|
1793
|
+
function isCjkChar(char) {
|
|
1794
|
+
const codePoint = char.codePointAt(0) ?? 0;
|
|
1795
|
+
return ((codePoint >= 0x4e00 && codePoint <= 0x9fff) ||
|
|
1796
|
+
(codePoint >= 0x3400 && codePoint <= 0x4dbf) ||
|
|
1797
|
+
(codePoint >= 0x3040 && codePoint <= 0x309f) ||
|
|
1798
|
+
(codePoint >= 0x30a0 && codePoint <= 0x30ff) ||
|
|
1799
|
+
(codePoint >= 0xac00 && codePoint <= 0xd7af));
|
|
1800
|
+
}
|