@parall/cli 1.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/agents.d.ts +3 -0
- package/dist/commands/agents.d.ts.map +1 -0
- package/dist/commands/agents.js +18 -0
- package/dist/commands/chats.d.ts +3 -0
- package/dist/commands/chats.d.ts.map +1 -0
- package/dist/commands/chats.js +105 -0
- package/dist/commands/dm.d.ts +3 -0
- package/dist/commands/dm.d.ts.map +1 -0
- package/dist/commands/dm.js +51 -0
- package/dist/commands/mcp.d.ts +3 -0
- package/dist/commands/mcp.d.ts.map +1 -0
- package/dist/commands/mcp.js +439 -0
- package/dist/commands/messages.d.ts +3 -0
- package/dist/commands/messages.d.ts.map +1 -0
- package/dist/commands/messages.js +102 -0
- package/dist/commands/projects.d.ts +3 -0
- package/dist/commands/projects.d.ts.map +1 -0
- package/dist/commands/projects.js +104 -0
- package/dist/commands/refs.d.ts +3 -0
- package/dist/commands/refs.d.ts.map +1 -0
- package/dist/commands/refs.js +50 -0
- package/dist/commands/tasks.d.ts +3 -0
- package/dist/commands/tasks.d.ts.map +1 -0
- package/dist/commands/tasks.js +240 -0
- package/dist/commands/users.d.ts +3 -0
- package/dist/commands/users.d.ts.map +1 -0
- package/dist/commands/users.js +49 -0
- package/dist/commands/wiki.d.ts +3 -0
- package/dist/commands/wiki.d.ts.map +1 -0
- package/dist/commands/wiki.js +644 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +31 -0
- package/dist/lib/client.d.ts +24 -0
- package/dist/lib/client.d.ts.map +1 -0
- package/dist/lib/client.js +47 -0
- package/dist/lib/output.d.ts +3 -0
- package/dist/lib/output.d.ts.map +1 -0
- package/dist/lib/output.js +18 -0
- package/dist/lib/wiki.d.ts +269 -0
- package/dist/lib/wiki.d.ts.map +1 -0
- package/dist/lib/wiki.js +1800 -0
- package/package.json +43 -0
|
@@ -0,0 +1,644 @@
|
|
|
1
|
+
import { resolveCredentials, resolveRuntimeContext } from '../lib/client.js';
|
|
2
|
+
import { printError, printJson } from '../lib/output.js';
|
|
3
|
+
import { getAfcsDiff, getAfcsStatus, getWikiBlob, getWikiChangesetDetail, getWikiChangesetDiff, getWikiLog, getWikiOutline, getWikiSection, getWikiTree, listWikiChangesets, queryWiki, requestWikiAccess, resetWikiWorkspace, listWikis, proposeWikiChangeset, searchWiki, syncAllMounts, watchMounts, } from '../lib/wiki.js';
|
|
4
|
+
export function registerWikiCommands(program) {
|
|
5
|
+
const wiki = program.command('wiki').description('Parall Wiki — read, edit, and propose changes');
|
|
6
|
+
// ---- Core workflow ----
|
|
7
|
+
wiki
|
|
8
|
+
.command('sync')
|
|
9
|
+
.description('Sync wiki files to local workspace. First run downloads all files; subsequent runs pull updates.')
|
|
10
|
+
.argument('[wiki]', 'Wiki ID or slug (omit to sync all wikis)')
|
|
11
|
+
.action(async (_wiki) => {
|
|
12
|
+
try {
|
|
13
|
+
const ctx = resolveCredentials();
|
|
14
|
+
const result = await syncAllMounts(ctx);
|
|
15
|
+
for (const entry of result.synced) {
|
|
16
|
+
process.stderr.write(`wiki ${entry.slug} synced → ${entry.path}\n`);
|
|
17
|
+
}
|
|
18
|
+
printJson(result);
|
|
19
|
+
}
|
|
20
|
+
catch (error) {
|
|
21
|
+
printError(error);
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
wiki
|
|
26
|
+
.command('status')
|
|
27
|
+
.description('Show local changes, pending changesets, and permission summary.')
|
|
28
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
29
|
+
.action(async (wikiRef) => {
|
|
30
|
+
try {
|
|
31
|
+
const ctx = resolveCredentials();
|
|
32
|
+
const result = await getAfcsStatus(ctx, wikiRef);
|
|
33
|
+
process.stderr.write(`Wiki: ${result.wiki_name} (${result.wiki_slug})\n`);
|
|
34
|
+
process.stderr.write(`Mount: ${result.mount_path}\n`);
|
|
35
|
+
process.stderr.write(`Mode: ${result.mode}\n\n`);
|
|
36
|
+
if (result.local_changes.length === 0) {
|
|
37
|
+
process.stderr.write('Local changes: none\n');
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
process.stderr.write('Local changes:\n');
|
|
41
|
+
for (const file of result.local_changes) {
|
|
42
|
+
const s = file.additions > 0 && file.deletions > 0 ? 'M'
|
|
43
|
+
: file.additions > 0 ? 'A' : 'D';
|
|
44
|
+
process.stderr.write(` ${s} ${file.path} (+${file.additions} -${file.deletions})\n`);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
process.stderr.write('\n');
|
|
48
|
+
if (result.changesets.length === 0) {
|
|
49
|
+
process.stderr.write('Changesets: none\n');
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
process.stderr.write('Changesets:\n');
|
|
53
|
+
for (const cs of result.changesets) {
|
|
54
|
+
process.stderr.write(` ${cs.id} ${cs.status.toUpperCase()} "${cs.title}"\n`);
|
|
55
|
+
if (cs.feedback) {
|
|
56
|
+
process.stderr.write(` Feedback: ${cs.feedback}\n`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
if (result.permissions) {
|
|
61
|
+
process.stderr.write('\nPermissions:\n');
|
|
62
|
+
process.stderr.write(` Read: ${result.permissions.readable_prefixes.join(', ')}\n`);
|
|
63
|
+
process.stderr.write(` Write: ${result.permissions.writable_prefixes.join(', ') || 'none'}\n`);
|
|
64
|
+
}
|
|
65
|
+
printJson(result);
|
|
66
|
+
}
|
|
67
|
+
catch (error) {
|
|
68
|
+
printError(error);
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
wiki
|
|
72
|
+
.command('diff')
|
|
73
|
+
.description('Show local workspace diff against last synced state.')
|
|
74
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
75
|
+
.option('--raw', 'Print only the unified diff patch')
|
|
76
|
+
.action(async (wikiRef, options) => {
|
|
77
|
+
try {
|
|
78
|
+
const ctx = resolveCredentials();
|
|
79
|
+
const result = await getAfcsDiff(ctx, wikiRef);
|
|
80
|
+
if (options?.raw) {
|
|
81
|
+
process.stdout.write(result.patch);
|
|
82
|
+
if (result.patch && !result.patch.endsWith('\n'))
|
|
83
|
+
process.stdout.write('\n');
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
printJson(result);
|
|
87
|
+
}
|
|
88
|
+
catch (error) {
|
|
89
|
+
printError(error);
|
|
90
|
+
}
|
|
91
|
+
});
|
|
92
|
+
wiki
|
|
93
|
+
.command('reset')
|
|
94
|
+
.description('Discard all local changes and restore files to the last synced state (re-downloads from server).')
|
|
95
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
96
|
+
.action(async (wikiRef) => {
|
|
97
|
+
try {
|
|
98
|
+
const ctx = resolveCredentials();
|
|
99
|
+
const result = await resetWikiWorkspace(ctx, wikiRef);
|
|
100
|
+
process.stderr.write(`Reset ${result.files_restored} file(s) to last synced state.\n`);
|
|
101
|
+
printJson(result);
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
printError(error);
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
wiki
|
|
108
|
+
.command('request-access')
|
|
109
|
+
.description('Request read/write access to a locked wiki path. Creates an approval card for a maintainer.')
|
|
110
|
+
.argument('<path>', 'Path to request access for (e.g. docs/ops/)')
|
|
111
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
112
|
+
.option('--reason <reason>', 'Reason for the access request')
|
|
113
|
+
.action(async (targetPath, wikiRef, options) => {
|
|
114
|
+
try {
|
|
115
|
+
const ctx = resolveCredentials();
|
|
116
|
+
const result = await requestWikiAccess(ctx, wikiRef, targetPath, options.reason);
|
|
117
|
+
process.stderr.write(`${result.message}\n`);
|
|
118
|
+
printJson(result);
|
|
119
|
+
}
|
|
120
|
+
catch (error) {
|
|
121
|
+
printError(error);
|
|
122
|
+
}
|
|
123
|
+
});
|
|
124
|
+
wiki
|
|
125
|
+
.command('log')
|
|
126
|
+
.description('Show version history. With a path: per-file history. Without: recent operations.')
|
|
127
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
128
|
+
.argument('[path]', 'File path for per-file history')
|
|
129
|
+
.action(async (wikiRef, filePath) => {
|
|
130
|
+
try {
|
|
131
|
+
const ctx = resolveCredentials();
|
|
132
|
+
const result = await getWikiLog(ctx, wikiRef, filePath);
|
|
133
|
+
if (result.entries.length === 0) {
|
|
134
|
+
process.stderr.write('No history entries found.\n');
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
for (const entry of result.entries) {
|
|
138
|
+
const pathSuffix = entry.path ? ` ${entry.path}` : '';
|
|
139
|
+
process.stderr.write(`${entry.id.slice(0, 8)} ${entry.created_at} ${entry.action}${pathSuffix}\n`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
printJson(result);
|
|
143
|
+
}
|
|
144
|
+
catch (error) {
|
|
145
|
+
printError(error);
|
|
146
|
+
}
|
|
147
|
+
});
|
|
148
|
+
// ---- Changeset subcommands ----
|
|
149
|
+
const changeset = wiki.command('changeset').description('Manage wiki changesets (proposals)');
|
|
150
|
+
changeset
|
|
151
|
+
.command('create')
|
|
152
|
+
.description('Create a new changeset from local workspace changes.')
|
|
153
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
154
|
+
.option('--title <title>', 'Changeset title (required for new changesets)')
|
|
155
|
+
.option('--message <message>', 'Changeset commit message (supports prll:// refs)')
|
|
156
|
+
.option('--update <changesetId>', 'Re-propose an existing rejected/conflict changeset (title is inherited)')
|
|
157
|
+
.option('--chat-id <chatId>', 'Source chat ID for approval workflow')
|
|
158
|
+
.option('--message-id <messageId>', 'Source message ID for approval workflow')
|
|
159
|
+
.option('--session-id <sessionId>', 'Source agent session ID for approval workflow')
|
|
160
|
+
.action(async (wikiRef, options) => {
|
|
161
|
+
try {
|
|
162
|
+
if (!options.update && !options.title) {
|
|
163
|
+
process.stderr.write('Error: --title is required when creating a new changeset.\n');
|
|
164
|
+
process.stderr.write(' Use --update <id> to re-propose a rejected/conflict changeset (title inherited).\n');
|
|
165
|
+
process.exit(1);
|
|
166
|
+
}
|
|
167
|
+
const ctx = resolveCredentials();
|
|
168
|
+
const rtx = resolveRuntimeContext();
|
|
169
|
+
// Inherit title from existing changeset when re-proposing via --update
|
|
170
|
+
const inheritedTitle = options.update && !options.title
|
|
171
|
+
? (await getWikiChangesetDetail(ctx, wikiRef, options.update)).changeset.title
|
|
172
|
+
: undefined;
|
|
173
|
+
const result = await proposeWikiChangeset(ctx, wikiRef, {
|
|
174
|
+
title: options.title ?? inheritedTitle ?? '',
|
|
175
|
+
message: options.message,
|
|
176
|
+
changesetId: options.update,
|
|
177
|
+
sourceChatId: options.chatId ?? rtx.chatId,
|
|
178
|
+
sourceMessageId: options.messageId ?? rtx.triggerMessageId,
|
|
179
|
+
sourceRunId: options.sessionId ?? rtx.sessionId,
|
|
180
|
+
});
|
|
181
|
+
const verb = options.update ? 'Re-proposed' : 'Created';
|
|
182
|
+
process.stderr.write(`${verb} changeset ${result.changeset.id}\n`);
|
|
183
|
+
process.stderr.write(`Changed files: ${result.changed_paths.join(', ')}\n`);
|
|
184
|
+
printJson(result);
|
|
185
|
+
}
|
|
186
|
+
catch (error) {
|
|
187
|
+
printError(error);
|
|
188
|
+
}
|
|
189
|
+
});
|
|
190
|
+
changeset
|
|
191
|
+
.command('list')
|
|
192
|
+
.description('List all changesets. Shows ID, status, title, and date.')
|
|
193
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
194
|
+
.action(async (wikiRef) => {
|
|
195
|
+
try {
|
|
196
|
+
const ctx = resolveCredentials();
|
|
197
|
+
const result = await listWikiChangesets(ctx, wikiRef);
|
|
198
|
+
if (result.changesets.length === 0) {
|
|
199
|
+
process.stderr.write('No changesets found.\n');
|
|
200
|
+
}
|
|
201
|
+
else {
|
|
202
|
+
for (const cs of result.changesets) {
|
|
203
|
+
process.stderr.write(`${cs.id} ${cs.status.toUpperCase().padEnd(10)} "${cs.title}" ${cs.created_at}\n`);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
printJson(result);
|
|
207
|
+
}
|
|
208
|
+
catch (error) {
|
|
209
|
+
printError(error);
|
|
210
|
+
}
|
|
211
|
+
});
|
|
212
|
+
changeset
|
|
213
|
+
.command('show')
|
|
214
|
+
.description('Show changeset detail including status, changed files, and feedback.')
|
|
215
|
+
.argument('<changesetId>', 'Changeset ID')
|
|
216
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
217
|
+
.action(async (changesetId, wikiRef) => {
|
|
218
|
+
try {
|
|
219
|
+
const ctx = resolveCredentials();
|
|
220
|
+
const result = await getWikiChangesetDetail(ctx, wikiRef, changesetId);
|
|
221
|
+
const cs = result.changeset;
|
|
222
|
+
process.stderr.write(`Changeset: ${cs.id}\n`);
|
|
223
|
+
process.stderr.write(`Status: ${cs.status.toUpperCase()}\n`);
|
|
224
|
+
process.stderr.write(`Title: ${cs.title}\n`);
|
|
225
|
+
if (cs.message)
|
|
226
|
+
process.stderr.write(`Message: ${cs.message}\n`);
|
|
227
|
+
process.stderr.write(`Created: ${cs.created_at}\n`);
|
|
228
|
+
process.stderr.write(`Updated: ${cs.updated_at}\n`);
|
|
229
|
+
if (cs.changed_paths.length > 0) {
|
|
230
|
+
process.stderr.write('Changed files:\n');
|
|
231
|
+
for (const p of cs.changed_paths)
|
|
232
|
+
process.stderr.write(` ${p}\n`);
|
|
233
|
+
}
|
|
234
|
+
if (cs.last_error) {
|
|
235
|
+
process.stderr.write(`\nFeedback: ${cs.last_error}\n`);
|
|
236
|
+
if (cs.status === 'rejected') {
|
|
237
|
+
process.stderr.write('\nTo fix: edit the file(s), then run:\n'
|
|
238
|
+
+ ` parall wiki changeset create --update ${cs.id}\n`);
|
|
239
|
+
}
|
|
240
|
+
else if (cs.status === 'conflict') {
|
|
241
|
+
process.stderr.write('\nTo resolve: run `parall wiki sync`, fix conflicts, then:\n'
|
|
242
|
+
+ ` parall wiki changeset create --update ${cs.id}\n`);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
printJson(result);
|
|
246
|
+
}
|
|
247
|
+
catch (error) {
|
|
248
|
+
printError(error);
|
|
249
|
+
}
|
|
250
|
+
});
|
|
251
|
+
changeset
|
|
252
|
+
.command('diff')
|
|
253
|
+
.description('Show the diff of a changeset.')
|
|
254
|
+
.argument('<changesetId>', 'Changeset ID')
|
|
255
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
256
|
+
.option('--raw', 'Print only the unified diff patch')
|
|
257
|
+
.action(async (changesetId, wikiRef, options) => {
|
|
258
|
+
try {
|
|
259
|
+
const ctx = resolveCredentials();
|
|
260
|
+
const result = await getWikiChangesetDiff(ctx, wikiRef, changesetId);
|
|
261
|
+
if (options?.raw) {
|
|
262
|
+
process.stdout.write(result.diff.patch);
|
|
263
|
+
if (result.diff.patch && !result.diff.patch.endsWith('\n'))
|
|
264
|
+
process.stdout.write('\n');
|
|
265
|
+
return;
|
|
266
|
+
}
|
|
267
|
+
printJson(result);
|
|
268
|
+
}
|
|
269
|
+
catch (error) {
|
|
270
|
+
printError(error);
|
|
271
|
+
}
|
|
272
|
+
});
|
|
273
|
+
changeset
|
|
274
|
+
.command('merge')
|
|
275
|
+
.description('Merge a changeset into the wiki default branch')
|
|
276
|
+
.argument('<changesetId>', 'Changeset ID')
|
|
277
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
278
|
+
.action(async (changesetId, wikiRef) => {
|
|
279
|
+
try {
|
|
280
|
+
const ctx = resolveCredentials();
|
|
281
|
+
const { resolveWikiRefOrDefault } = await import('../lib/wiki.js');
|
|
282
|
+
const wiki = await resolveWikiRefOrDefault(ctx, wikiRef);
|
|
283
|
+
const result = await ctx.client.mergeWikiChangeset(ctx.orgId, wiki.id, changesetId);
|
|
284
|
+
process.stderr.write(`Merged changeset ${result.id} — "${result.title}"\n`);
|
|
285
|
+
printJson(result);
|
|
286
|
+
}
|
|
287
|
+
catch (error) {
|
|
288
|
+
printError(error);
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
changeset
|
|
292
|
+
.command('close')
|
|
293
|
+
.description('Close a changeset without merging')
|
|
294
|
+
.argument('<changesetId>', 'Changeset ID')
|
|
295
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
296
|
+
.action(async (changesetId, wikiRef) => {
|
|
297
|
+
try {
|
|
298
|
+
const ctx = resolveCredentials();
|
|
299
|
+
const { resolveWikiRefOrDefault } = await import('../lib/wiki.js');
|
|
300
|
+
const wiki = await resolveWikiRefOrDefault(ctx, wikiRef);
|
|
301
|
+
const result = await ctx.client.closeWikiChangeset(ctx.orgId, wiki.id, changesetId);
|
|
302
|
+
process.stderr.write(`Closed changeset ${result.id}\n`);
|
|
303
|
+
printJson(result);
|
|
304
|
+
}
|
|
305
|
+
catch (error) {
|
|
306
|
+
printError(error);
|
|
307
|
+
}
|
|
308
|
+
});
|
|
309
|
+
// ---- Content browsing (search, query, outline, section) ----
|
|
310
|
+
wiki
|
|
311
|
+
.command('search')
|
|
312
|
+
.description('Search wiki content by keyword')
|
|
313
|
+
.argument('<query>', 'Search query')
|
|
314
|
+
.argument('[wiki]', 'Wiki ID or slug')
|
|
315
|
+
.option('--path <path>', 'Restrict to a path prefix')
|
|
316
|
+
.option('--limit <n>', 'Max results (default 5)')
|
|
317
|
+
.option('--include-content', 'Include full section content in results')
|
|
318
|
+
.action(async (query, wikiRef, options) => {
|
|
319
|
+
try {
|
|
320
|
+
const ctx = resolveCredentials();
|
|
321
|
+
const limit = options.limit ? parseIntStrict(options.limit) : undefined;
|
|
322
|
+
const results = await searchWiki(ctx, wikiRef, query, {
|
|
323
|
+
pathPrefix: options.path,
|
|
324
|
+
limit,
|
|
325
|
+
includeContent: options.includeContent,
|
|
326
|
+
});
|
|
327
|
+
if (results.length === 0) {
|
|
328
|
+
process.stderr.write('No results found.\n');
|
|
329
|
+
}
|
|
330
|
+
else {
|
|
331
|
+
for (const r of results) {
|
|
332
|
+
process.stderr.write(`[${r.score.toFixed(2)}] ${r.section_path}\n`);
|
|
333
|
+
process.stderr.write(` ${r.snippet}\n\n`);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
printJson(results);
|
|
337
|
+
}
|
|
338
|
+
catch (error) {
|
|
339
|
+
printError(error);
|
|
340
|
+
}
|
|
341
|
+
});
|
|
342
|
+
wiki
|
|
343
|
+
.command('query')
|
|
344
|
+
.description('Tree-aware structured query over wiki sections')
|
|
345
|
+
.argument('<query>', 'Natural-language query')
|
|
346
|
+
.argument('[wiki]', 'Wiki ID or slug')
|
|
347
|
+
.option('--path <path>', 'Restrict to a path prefix')
|
|
348
|
+
.option('--limit <n>', 'Max results (default 5)')
|
|
349
|
+
.option('--include-content', 'Include full section content in results')
|
|
350
|
+
.action(async (query, wikiRef, options) => {
|
|
351
|
+
try {
|
|
352
|
+
const ctx = resolveCredentials();
|
|
353
|
+
const limit = options.limit ? parseIntStrict(options.limit) : undefined;
|
|
354
|
+
const result = await queryWiki(ctx, wikiRef, query, {
|
|
355
|
+
pathPrefix: options.path,
|
|
356
|
+
limit,
|
|
357
|
+
includeContent: options.includeContent,
|
|
358
|
+
});
|
|
359
|
+
if (result.documents.length > 0) {
|
|
360
|
+
process.stderr.write('Documents:\n');
|
|
361
|
+
for (const doc of result.documents) {
|
|
362
|
+
process.stderr.write(` [${doc.score.toFixed(2)}] ${doc.path} — ${doc.title} (${doc.node_count} sections)\n`);
|
|
363
|
+
}
|
|
364
|
+
process.stderr.write('\nSections:\n');
|
|
365
|
+
for (const s of result.sections) {
|
|
366
|
+
process.stderr.write(` [${s.score.toFixed(2)}] ${s.section_path}\n`);
|
|
367
|
+
process.stderr.write(` ${s.snippet}\n\n`);
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
else {
|
|
371
|
+
process.stderr.write('No results found.\n');
|
|
372
|
+
}
|
|
373
|
+
printJson(result);
|
|
374
|
+
}
|
|
375
|
+
catch (error) {
|
|
376
|
+
printError(error);
|
|
377
|
+
}
|
|
378
|
+
});
|
|
379
|
+
wiki
|
|
380
|
+
.command('outline')
|
|
381
|
+
.description('Show structural outline of wiki markdown sections')
|
|
382
|
+
.argument('[wiki]', 'Wiki ID or slug')
|
|
383
|
+
.option('--path <path>', 'Restrict to a path prefix')
|
|
384
|
+
.action(async (wikiRef, options) => {
|
|
385
|
+
try {
|
|
386
|
+
const ctx = resolveCredentials();
|
|
387
|
+
const result = await getWikiOutline(ctx, wikiRef, { pathPrefix: options.path });
|
|
388
|
+
process.stderr.write(`Wiki: ${result.wiki_name} (${result.wiki_slug}) | ${result.file_count} files | source: ${result.source}\n\n`);
|
|
389
|
+
let currentPath = '';
|
|
390
|
+
for (const node of result.nodes) {
|
|
391
|
+
if (node.path !== currentPath) {
|
|
392
|
+
currentPath = node.path;
|
|
393
|
+
process.stderr.write(`${currentPath}\n`);
|
|
394
|
+
}
|
|
395
|
+
const indent = ' '.repeat(Math.max(node.level, 1));
|
|
396
|
+
process.stderr.write(`${indent}${node.title} (${node.node_id})\n`);
|
|
397
|
+
}
|
|
398
|
+
printJson(result);
|
|
399
|
+
}
|
|
400
|
+
catch (error) {
|
|
401
|
+
printError(error);
|
|
402
|
+
}
|
|
403
|
+
});
|
|
404
|
+
wiki
|
|
405
|
+
.command('tree')
|
|
406
|
+
.description('List files and directories in a wiki')
|
|
407
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
408
|
+
.option('--path <path>', 'Restrict to a subdirectory')
|
|
409
|
+
.action(async (wikiRef, options) => {
|
|
410
|
+
try {
|
|
411
|
+
const ctx = resolveCredentials();
|
|
412
|
+
printJson(await getWikiTree(ctx, wikiRef, { path: options?.path }));
|
|
413
|
+
}
|
|
414
|
+
catch (error) {
|
|
415
|
+
printError(error);
|
|
416
|
+
}
|
|
417
|
+
});
|
|
418
|
+
wiki
|
|
419
|
+
.command('cat')
|
|
420
|
+
.description('Read a wiki page by file path (raw markdown content)')
|
|
421
|
+
.argument('<path>', 'File path (e.g. docs/auth.md)')
|
|
422
|
+
.argument('[wiki]', 'Wiki ID or slug')
|
|
423
|
+
.action(async (filePath, wikiRef) => {
|
|
424
|
+
try {
|
|
425
|
+
const ctx = resolveCredentials();
|
|
426
|
+
const result = await getWikiBlob(ctx, wikiRef, { path: filePath });
|
|
427
|
+
process.stderr.write(`${filePath} (${result.size} bytes)\n\n`);
|
|
428
|
+
process.stdout.write(result.content);
|
|
429
|
+
if (result.content && !result.content.endsWith('\n'))
|
|
430
|
+
process.stdout.write('\n');
|
|
431
|
+
}
|
|
432
|
+
catch (error) {
|
|
433
|
+
printError(error);
|
|
434
|
+
}
|
|
435
|
+
});
|
|
436
|
+
wiki
|
|
437
|
+
.command('section')
|
|
438
|
+
.description('Fetch a wiki section by node ID')
|
|
439
|
+
.argument('<nodeId>', 'Section node ID (from search/outline results)')
|
|
440
|
+
.argument('[wiki]', 'Wiki ID or slug')
|
|
441
|
+
.action(async (nodeId, wikiRef) => {
|
|
442
|
+
try {
|
|
443
|
+
const ctx = resolveCredentials();
|
|
444
|
+
const result = await getWikiSection(ctx, wikiRef, { nodeId });
|
|
445
|
+
process.stderr.write(`${result.section_path} (L${result.start_line}–${result.end_line})\n\n`);
|
|
446
|
+
process.stdout.write(result.content);
|
|
447
|
+
if (result.content && !result.content.endsWith('\n'))
|
|
448
|
+
process.stdout.write('\n');
|
|
449
|
+
}
|
|
450
|
+
catch (error) {
|
|
451
|
+
printError(error);
|
|
452
|
+
}
|
|
453
|
+
});
|
|
454
|
+
wiki
|
|
455
|
+
.command('locate')
|
|
456
|
+
.description('Resolve a heading path to a stable section ID (e.g. docs/auth.md#OAuth → section details)')
|
|
457
|
+
.argument('<anchor>', 'File path with heading anchor (e.g. docs/auth.md#OAuth or docs/auth.md#Auth#OAuth)')
|
|
458
|
+
.argument('[wiki]', 'Wiki ID or slug')
|
|
459
|
+
.action(async (anchor, wikiRef) => {
|
|
460
|
+
try {
|
|
461
|
+
const ctx = resolveCredentials();
|
|
462
|
+
const hashIdx = anchor.indexOf('#');
|
|
463
|
+
if (hashIdx < 0) {
|
|
464
|
+
process.stderr.write('Error: anchor must contain # (e.g. docs/auth.md#OAuth)\n');
|
|
465
|
+
process.exit(1);
|
|
466
|
+
}
|
|
467
|
+
const filePath = anchor.slice(0, hashIdx);
|
|
468
|
+
const fragment = anchor.slice(hashIdx + 1);
|
|
469
|
+
const fragParts = fragment.split('#').map(s => s.toLowerCase());
|
|
470
|
+
const { resolveWikiRefOrDefault, getWikiOutline } = await import('../lib/wiki.js');
|
|
471
|
+
const wiki = await resolveWikiRefOrDefault(ctx, wikiRef);
|
|
472
|
+
const outline = await getWikiOutline(ctx, wiki.slug, { pathPrefix: filePath });
|
|
473
|
+
// Find section whose heading_path ends with the fragment parts
|
|
474
|
+
const match = outline.nodes.find(node => {
|
|
475
|
+
if (node.path !== filePath || !node.heading_path || node.heading_path.length < fragParts.length)
|
|
476
|
+
return false;
|
|
477
|
+
const offset = node.heading_path.length - fragParts.length;
|
|
478
|
+
return fragParts.every((part, i) => node.heading_path[offset + i].toLowerCase() === part);
|
|
479
|
+
});
|
|
480
|
+
if (!match) {
|
|
481
|
+
process.stderr.write(`Section not found: ${anchor}\n`);
|
|
482
|
+
process.stderr.write(`Available sections in ${filePath}:\n`);
|
|
483
|
+
for (const node of outline.nodes.filter(n => n.path === filePath && n.level > 0)) {
|
|
484
|
+
process.stderr.write(` ${node.path}#${(node.heading_path ?? []).join('#')} (${node.node_id})\n`);
|
|
485
|
+
}
|
|
486
|
+
process.exit(1);
|
|
487
|
+
}
|
|
488
|
+
const stableId = `${match.path}#${(match.heading_path ?? [match.title]).join('#')}`;
|
|
489
|
+
const prllUri = `prll://${wiki.id}/${match.path}#${(match.heading_path ?? [match.title]).join('#')}`;
|
|
490
|
+
process.stderr.write(`Resolved: ${anchor}\n`);
|
|
491
|
+
process.stderr.write(` section_path: ${match.section_path}\n`);
|
|
492
|
+
process.stderr.write(` node_id: ${match.node_id}\n`);
|
|
493
|
+
process.stderr.write(` stable_id: ${stableId}\n`);
|
|
494
|
+
process.stderr.write(` prll_uri: ${prllUri}\n`);
|
|
495
|
+
process.stderr.write(` lines: L${match.start_line}–${match.end_line}\n`);
|
|
496
|
+
printJson({ ...match, stable_id: stableId, prll_uri: prllUri });
|
|
497
|
+
}
|
|
498
|
+
catch (error) {
|
|
499
|
+
printError(error);
|
|
500
|
+
}
|
|
501
|
+
});
|
|
502
|
+
wiki
|
|
503
|
+
.command('pages')
|
|
504
|
+
.description('List all wiki pages with metadata (lightweight page index)')
|
|
505
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
506
|
+
.action(async (wikiRef) => {
|
|
507
|
+
try {
|
|
508
|
+
const ctx = resolveCredentials();
|
|
509
|
+
const wiki = await (await import('../lib/wiki.js')).resolveWikiRefOrDefault(ctx, wikiRef);
|
|
510
|
+
const result = await ctx.client.getWikiPageIndex(ctx.orgId, wiki.id);
|
|
511
|
+
process.stderr.write(`Wiki: ${result.wiki_name} (${result.wiki_slug}) | ${result.page_count} pages\n\n`);
|
|
512
|
+
for (const page of result.pages) {
|
|
513
|
+
const headings = page.top_headings.length > 0 ? ` — ${page.top_headings.join(', ')}` : '';
|
|
514
|
+
process.stderr.write(` ${page.path} "${page.title}" (${page.section_count} sections, ${page.size_bytes}B)${headings}\n`);
|
|
515
|
+
}
|
|
516
|
+
printJson(result);
|
|
517
|
+
}
|
|
518
|
+
catch (error) {
|
|
519
|
+
printError(error);
|
|
520
|
+
}
|
|
521
|
+
});
|
|
522
|
+
// ---- References (lat.md-style knowledge graph) ----
|
|
523
|
+
const wikiRefs = wiki.command('refs').description('Wiki reference graph — extract and validate prll:// links');
|
|
524
|
+
wikiRefs
|
|
525
|
+
.command('list')
|
|
526
|
+
.description('List all prll:// references found in wiki markdown files')
|
|
527
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
528
|
+
.action(async (wikiRef) => {
|
|
529
|
+
try {
|
|
530
|
+
const ctx = resolveCredentials();
|
|
531
|
+
const w = await (await import('../lib/wiki.js')).resolveWikiRefOrDefault(ctx, wikiRef);
|
|
532
|
+
const result = await ctx.client.getWikiRefs(ctx.orgId, w.id);
|
|
533
|
+
process.stderr.write(`Wiki: ${result.wiki_name} (${result.wiki_slug}) | ${result.total} refs\n\n`);
|
|
534
|
+
for (const r of result.refs) {
|
|
535
|
+
const ctx_text = r.context ? ` "${r.context}"` : '';
|
|
536
|
+
process.stderr.write(` ${r.source_path}:${r.line} → ${r.uri}${ctx_text}\n`);
|
|
537
|
+
}
|
|
538
|
+
printJson(result);
|
|
539
|
+
}
|
|
540
|
+
catch (error) {
|
|
541
|
+
printError(error);
|
|
542
|
+
}
|
|
543
|
+
});
|
|
544
|
+
wikiRefs
|
|
545
|
+
.command('backlinks')
|
|
546
|
+
.description('Show what references a specific wiki page or section (who links here?)')
|
|
547
|
+
.argument('<anchor>', 'Wiki path or prll:// URI (e.g. docs/auth.md or docs/auth.md#OAuth)')
|
|
548
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
549
|
+
.option('--limit <n>', 'Max results (default 20)')
|
|
550
|
+
.action(async (anchor, wikiRef, options) => {
|
|
551
|
+
try {
|
|
552
|
+
const ctx = resolveCredentials();
|
|
553
|
+
const limit = options.limit ? parseIntStrict(options.limit) : 20;
|
|
554
|
+
// Build prll:// URI from the anchor if it's not already one
|
|
555
|
+
let uri = anchor;
|
|
556
|
+
if (!anchor.startsWith('prll://')) {
|
|
557
|
+
const { resolveWikiRefOrDefault } = await import('../lib/wiki.js');
|
|
558
|
+
const wiki = await resolveWikiRefOrDefault(ctx, wikiRef);
|
|
559
|
+
const hashIdx = anchor.indexOf('#');
|
|
560
|
+
const filePath = hashIdx >= 0 ? anchor.slice(0, hashIdx) : anchor;
|
|
561
|
+
const fragment = hashIdx >= 0 ? anchor.slice(hashIdx + 1) : '';
|
|
562
|
+
uri = `prll://${wiki.id}/${filePath}${fragment ? '#' + fragment : ''}`;
|
|
563
|
+
}
|
|
564
|
+
const result = await ctx.client.getBacklinks(ctx.orgId, { uri, limit });
|
|
565
|
+
if (result.backlinks.length === 0) {
|
|
566
|
+
process.stderr.write(`No backlinks found for ${uri}\n`);
|
|
567
|
+
}
|
|
568
|
+
else {
|
|
569
|
+
process.stderr.write(`Backlinks for ${uri}:\n\n`);
|
|
570
|
+
for (const bl of result.backlinks) {
|
|
571
|
+
const who = bl.sender_name ?? bl.author_name ?? bl.source_type;
|
|
572
|
+
const snippet = bl.snippet ? ` — ${bl.snippet}` : '';
|
|
573
|
+
const ctx_text = bl.context ? ` [${bl.context}]` : '';
|
|
574
|
+
process.stderr.write(` ${bl.source_type}/${bl.source_id} ${who}${ctx_text}${snippet}\n`);
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
printJson(result);
|
|
578
|
+
}
|
|
579
|
+
catch (error) {
|
|
580
|
+
printError(error);
|
|
581
|
+
}
|
|
582
|
+
});
|
|
583
|
+
wikiRefs
|
|
584
|
+
.command('check')
|
|
585
|
+
.description('Validate prll:// wiki refs that target files/headings inside the same wiki')
|
|
586
|
+
.argument('[wiki]', 'Wiki ID or slug (auto-resolves if org has one wiki)')
|
|
587
|
+
.action(async (wikiRef) => {
|
|
588
|
+
try {
|
|
589
|
+
const ctx = resolveCredentials();
|
|
590
|
+
const w = await (await import('../lib/wiki.js')).resolveWikiRefOrDefault(ctx, wikiRef);
|
|
591
|
+
const result = await ctx.client.checkWikiRefs(ctx.orgId, w.id);
|
|
592
|
+
if (result.total === 0) {
|
|
593
|
+
process.stderr.write(`Wiki: ${result.wiki_name} — all references valid ✓\n`);
|
|
594
|
+
}
|
|
595
|
+
else {
|
|
596
|
+
process.stderr.write(`Wiki: ${result.wiki_name} — ${result.total} broken ref(s) found:\n\n`);
|
|
597
|
+
for (const b of result.broken) {
|
|
598
|
+
process.stderr.write(` ${b.source_path}:${b.line} → ${b.uri}\n`);
|
|
599
|
+
process.stderr.write(` reason: ${b.reason}\n\n`);
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
printJson(result);
|
|
603
|
+
}
|
|
604
|
+
catch (error) {
|
|
605
|
+
printError(error);
|
|
606
|
+
}
|
|
607
|
+
});
|
|
608
|
+
// ---- Utility (not for direct agent use) ----
|
|
609
|
+
wiki
|
|
610
|
+
.command('list')
|
|
611
|
+
.description('List organization wikis')
|
|
612
|
+
.action(async () => {
|
|
613
|
+
try {
|
|
614
|
+
const ctx = resolveCredentials();
|
|
615
|
+
printJson(await listWikis(ctx));
|
|
616
|
+
}
|
|
617
|
+
catch (error) {
|
|
618
|
+
printError(error);
|
|
619
|
+
}
|
|
620
|
+
});
|
|
621
|
+
wiki
|
|
622
|
+
.command('watch')
|
|
623
|
+
.description('Periodically sync wiki files (used by runtime, not for direct use)')
|
|
624
|
+
.option('--interval-sec <n>', 'Refresh interval in seconds', '60')
|
|
625
|
+
.action(async (options) => {
|
|
626
|
+
try {
|
|
627
|
+
const intervalSec = parseIntStrict(options.intervalSec);
|
|
628
|
+
const ctx = resolveCredentials();
|
|
629
|
+
process.stderr.write(`watching wiki mounts every ${intervalSec}s\n`);
|
|
630
|
+
watchMounts(ctx, intervalSec);
|
|
631
|
+
}
|
|
632
|
+
catch (error) {
|
|
633
|
+
printError(error);
|
|
634
|
+
process.exit(1);
|
|
635
|
+
}
|
|
636
|
+
});
|
|
637
|
+
}
|
|
638
|
+
function parseIntStrict(value) {
|
|
639
|
+
const parsed = Number.parseInt(value, 10);
|
|
640
|
+
if (!Number.isFinite(parsed) || parsed <= 0) {
|
|
641
|
+
throw new Error(`invalid positive integer: ${value}`);
|
|
642
|
+
}
|
|
643
|
+
return parsed;
|
|
644
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":""}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createRequire } from 'module';
|
|
3
|
+
import { Command } from 'commander';
|
|
4
|
+
import { registerAgentCommands } from './commands/agents.js';
|
|
5
|
+
import { registerChatCommands } from './commands/chats.js';
|
|
6
|
+
import { registerDMCommands } from './commands/dm.js';
|
|
7
|
+
import { registerMessageCommands } from './commands/messages.js';
|
|
8
|
+
import { registerTaskCommands } from './commands/tasks.js';
|
|
9
|
+
import { registerProjectCommands } from './commands/projects.js';
|
|
10
|
+
import { registerUserCommands } from './commands/users.js';
|
|
11
|
+
import { registerWikiCommands } from './commands/wiki.js';
|
|
12
|
+
import { registerMcpCommands } from './commands/mcp.js';
|
|
13
|
+
import { registerRefCommands } from './commands/refs.js';
|
|
14
|
+
const require = createRequire(import.meta.url);
|
|
15
|
+
const pkg = require('../package.json');
|
|
16
|
+
const program = new Command();
|
|
17
|
+
program
|
|
18
|
+
.name('parall')
|
|
19
|
+
.description('CLI client for Parall — universal agent & human access to Parall API')
|
|
20
|
+
.version(pkg.version);
|
|
21
|
+
registerAgentCommands(program);
|
|
22
|
+
registerChatCommands(program);
|
|
23
|
+
registerDMCommands(program);
|
|
24
|
+
registerMessageCommands(program);
|
|
25
|
+
registerTaskCommands(program);
|
|
26
|
+
registerProjectCommands(program);
|
|
27
|
+
registerUserCommands(program);
|
|
28
|
+
registerWikiCommands(program);
|
|
29
|
+
registerMcpCommands(program);
|
|
30
|
+
registerRefCommands(program);
|
|
31
|
+
program.parse();
|