@runwingman/flightdeck-cli 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/cli.js ADDED
@@ -0,0 +1,2767 @@
1
+ #!/usr/bin/env node
2
+
3
+ import { execFileSync } from 'node:child_process';
4
+ import { createDecipheriv } from 'node:crypto';
5
+ import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
6
+ import { Command } from 'commander';
7
+ import { tmpdir } from 'node:os';
8
+ import { join } from 'node:path';
9
+ import { initConfigFromToken, loadConfig } from './config.js';
10
+ import { getAllWorkspaceKeyMappings, getMeta, getRow, getRows, openDb } from './db.js';
11
+ import { resolveStorageLinks } from './render.js';
12
+ import { SuperbasedClient } from './client.js';
13
+ import { buildWrappedMemberKeys, createGroupIdentity, decodeNsec, encryptForNpub, getSession } from './nostr.js';
14
+ import { createStorageMarkdown, defaultFileName, detectMimeType, uploadEncryptedAudioToStorage, uploadFileToStorage } from './storage.js';
15
+ import { syncWorkspace } from './sync.js';
16
+ import { buildScopeTags, computeScopeLineage, decryptRecordPayload, encryptOwnerPayload, loadGroupKeyMap, makeGroupWriteShare, normalizeScopeLevel, outboundApproval, outboundAudioNote, outboundChannel, outboundChatMessage, outboundComment, outboundDirectory, outboundDocument, outboundFlow, outboundReport, outboundSchedule, outboundScope, outboundTask, parseReferencesFromDescription, recordFamilyHash, resolveFlowLinkage, scopeDepth } from './translators.js';
17
+ import { bootstrapWorkspaceKey, buildWorkspaceSession, decryptWorkspaceKey, deleteCachedWorkspaceKeyBlob, getCachedWorkspaceKeyBlob, isWorkspaceKeyRegistered } from './workspace-keys.js';
18
+ import { buildChatContextPayload, buildChatHistoryPayload, buildChatRelatedPayload, buildChatSearchPayload, sendChatReplyCurrent } from './chat-runtime.js';
19
+ import { canonicalizeFlowStep, canonicalizeFlowSteps } from './flow-steps.js';
20
+
21
+ function requireConfig() {
22
+ const config = loadConfig();
23
+ if (!config) throw new Error('No config found. Run `flightdeck-cli init --token <token>` first.');
24
+ return config;
25
+ }
26
+
27
+ function getDbRows(tableSql, params = []) {
28
+ const db = openDb();
29
+ return getRows(db, tableSql, params);
30
+ }
31
+
32
+ function getDbRow(tableSql, params = []) {
33
+ const db = openDb();
34
+ return getRow(db, tableSql, params);
35
+ }
36
+
37
+ function printResult(output) {
38
+ if (program.opts().json) console.log(JSON.stringify(output, null, 2));
39
+ else console.log(output);
40
+ }
41
+
42
+ function printRuntimeResult(output, format = 'json') {
43
+ const normalized = String(format || 'json').trim().toLowerCase();
44
+ if (normalized !== 'json') {
45
+ throw new Error(`Unsupported format: ${format}. Phase 03 chat runtime commands only support --format json.`);
46
+ }
47
+ console.log(JSON.stringify(output, null, 2));
48
+ }
49
+
50
+ function jsonField(value, fallback = []) {
51
+ if (!value) return fallback;
52
+ try {
53
+ return JSON.parse(value);
54
+ } catch {
55
+ return fallback;
56
+ }
57
+ }
58
+
59
+ function getClientAndState() {
60
+ const config = requireConfig();
61
+ const realSession = getSession();
62
+ const db = openDb();
63
+ const keyRows = getRows(db, `SELECT * FROM group_keys_cache ORDER BY group_id, key_version`);
64
+ const groupKeys = loadGroupKeyMap(realSession, keyRows, decodeNsec);
65
+
66
+ // Load workspace session key from local cache (sync — no network)
67
+ let wsSession = null;
68
+ const cachedBlob = getCachedWorkspaceKeyBlob(db, config.workspaceOwnerNpub);
69
+ if (cachedBlob) {
70
+ try {
71
+ const { wsKeySecret, wsKeyNpub, wsKeyEpoch } = decryptWorkspaceKey(cachedBlob, realSession.secret, realSession.npub);
72
+ wsSession = buildWorkspaceSession(wsKeySecret, wsKeyNpub, wsKeyEpoch, realSession.npub);
73
+ } catch (err) {
74
+ console.warn(`Warning: could not decrypt cached workspace key: ${err.message}`);
75
+ }
76
+ }
77
+
78
+ // session = workspace key for outbound operations, falls back to real key if not yet bootstrapped
79
+ const session = wsSession ?? realSession;
80
+ const client = new SuperbasedClient({ config, session: realSession, groupKeys });
81
+ if (wsSession) client.setAuthSecret(wsSession.secret);
82
+
83
+ const wsKeyMappings = getAllWorkspaceKeyMappings(db);
84
+ return { config, session, realSession, wsSession, db, client, groupKeys, wsKeyMappings };
85
+ }
86
+
87
+ async function ensureWorkspaceKey(state) {
88
+ if (state.wsSession) {
89
+ try {
90
+ state.client.setAuthSecret(state.realSession.secret);
91
+ const registered = await isWorkspaceKeyRegistered({
92
+ client: state.client,
93
+ config: state.config,
94
+ wsKeyNpub: state.wsSession.npub,
95
+ userNpub: state.realSession.npub,
96
+ });
97
+ if (registered) {
98
+ state.client.setAuthSecret(state.wsSession.secret);
99
+ return state;
100
+ }
101
+ deleteCachedWorkspaceKeyBlob(state.db, state.config.workspaceOwnerNpub, state.wsSession.npub);
102
+ state.client.setAuthSecret(state.realSession.secret);
103
+ state = getClientAndState();
104
+ } catch (err) {
105
+ if (state.wsSession) state.client.setAuthSecret(state.wsSession.secret);
106
+ console.warn(`Warning: workspace key registration check failed: ${err.message}`);
107
+ return state;
108
+ }
109
+ }
110
+ try {
111
+ await bootstrapWorkspaceKey({
112
+ db: state.db,
113
+ realSession: state.realSession,
114
+ config: state.config,
115
+ client: state.client,
116
+ });
117
+ // Reload state so wsSession is populated from the newly cached blob
118
+ return getClientAndState();
119
+ } catch (err) {
120
+ console.warn(`Warning: workspace key bootstrap failed: ${err.message}`);
121
+ return state;
122
+ }
123
+ }
124
+
125
+ async function refreshClientAndState() {
126
+ let state = await ensureWorkspaceKey(getClientAndState());
127
+
128
+ await syncWorkspace({
129
+ client: state.client,
130
+ config: state.config,
131
+ session: state.realSession,
132
+ wsSession: state.wsSession,
133
+ quiet: true,
134
+ });
135
+ state = getClientAndState();
136
+ return state;
137
+ }
138
+
139
+ function parseRawRow(row) {
140
+ return row?.raw_json ? JSON.parse(row.raw_json) : null;
141
+ }
142
+
143
+ function materializeFlowRow(row) {
144
+ if (!row) return null;
145
+ const parsed = parseRawRow(row) || row;
146
+ return {
147
+ ...parsed,
148
+ steps: Array.isArray(parsed.steps) ? parsed.steps : jsonField(row.steps_json, []),
149
+ group_ids: Array.isArray(parsed.group_ids) ? parsed.group_ids : jsonField(row.group_ids_json, []),
150
+ shares: Array.isArray(parsed.shares) ? parsed.shares : jsonField(row.shares_json, []),
151
+ };
152
+ }
153
+
154
+ function getPrimaryGroup(db) {
155
+ return getRow(db, `SELECT * FROM groups_cache ORDER BY name ASC, group_id ASC LIMIT 1`);
156
+ }
157
+
158
+ function requirePrimaryGroup(db) {
159
+ const row = getPrimaryGroup(db);
160
+ if (!row) throw new Error('No accessible groups found. Run sync first or share a group to this agent.');
161
+ return row;
162
+ }
163
+
164
+ function findGroupByRef(db, groupRef) {
165
+ return getRow(db, `SELECT * FROM groups_cache WHERE group_id = ? OR current_group_npub = ?`, [groupRef, groupRef]);
166
+ }
167
+
168
+ function resolveStorageAccessGroupIds(db, groupRefs = []) {
169
+ return [...new Set((groupRefs || []).map((groupRef) => {
170
+ const ref = String(groupRef || '').trim();
171
+ if (!ref) return null;
172
+ const row = findGroupByRef(db, ref);
173
+ if (row?.group_id) return row.group_id;
174
+ return null;
175
+ }).filter(Boolean))];
176
+ }
177
+
178
+ function buildDefaultGroupShares(group, label = '') {
179
+ return [makeGroupWriteShare(group, label)];
180
+ }
181
+
182
+ function findCommentRow(db, commentId) {
183
+ return getRow(db, `SELECT * FROM comments WHERE record_id = ?`, [commentId]);
184
+ }
185
+
186
+ function findDirectoryRow(db, directoryId) {
187
+ return getRow(db, `SELECT * FROM directories WHERE record_id = ?`, [directoryId]);
188
+ }
189
+
190
+ function findFlowRow(db, flowId) {
191
+ return getRow(db, `SELECT * FROM flows WHERE record_id = ?`, [flowId]);
192
+ }
193
+
194
+ function findScopeRow(db, scopeId) {
195
+ return getRow(db, `SELECT * FROM scopes WHERE record_id = ?`, [scopeId]);
196
+ }
197
+
198
+ function findTaskRow(db, taskId) {
199
+ return getRow(db, `SELECT * FROM tasks WHERE record_id = ?`, [taskId]);
200
+ }
201
+
202
+ function findReportRow(db, reportId) {
203
+ return getRow(db, `SELECT * FROM reports WHERE record_id = ?`, [reportId]);
204
+ }
205
+
206
+ function maybeParseInt(value) {
207
+ if (value == null || value === '') return null;
208
+ const num = Number.parseInt(value, 10);
209
+ return Number.isFinite(num) ? num : null;
210
+ }
211
+
212
+ function resolveAssigneeValue(options, currentValue = null) {
213
+ if (options.clearAssignee) return null;
214
+ if (options.assign !== undefined) {
215
+ const nextValue = String(options.assign || '').trim();
216
+ return nextValue || null;
217
+ }
218
+ return currentValue;
219
+ }
220
+
221
+ function resolveScopeLinkPatch(db, scopeRef, options = {}) {
222
+ if (options.clearScope) {
223
+ return {
224
+ scope_id: null,
225
+ scope_l1_id: null,
226
+ scope_l2_id: null,
227
+ scope_l3_id: null,
228
+ scope_l4_id: null,
229
+ scope_l5_id: null,
230
+ };
231
+ }
232
+ const ref = String(scopeRef || '').trim();
233
+ if (!ref) return {};
234
+ const row = findScopeRow(db, ref);
235
+ if (!row) throw new Error(`Scope not found: ${scopeRef}`);
236
+ const scope = parseRawRow(row) || row;
237
+ return {
238
+ scope_id: scope.record_id,
239
+ scope_l1_id: scope.l1_id ?? null,
240
+ scope_l2_id: scope.l2_id ?? null,
241
+ scope_l3_id: scope.l3_id ?? null,
242
+ scope_l4_id: scope.l4_id ?? null,
243
+ scope_l5_id: scope.l5_id ?? null,
244
+ };
245
+ }
246
+
247
+ function resolveRecordSharesAndGroups({ db, explicitGroupRef = null, inherited = null }) {
248
+ if (explicitGroupRef) {
249
+ const group = findGroupByRef(db, explicitGroupRef);
250
+ if (!group) throw new Error(`Group not found: ${explicitGroupRef}`);
251
+ return {
252
+ groupIds: [group.group_id],
253
+ shares: buildDefaultGroupShares(group, group.name || ''),
254
+ };
255
+ }
256
+
257
+ if (inherited) {
258
+ const inheritedGroupIds = inherited.group_ids ?? jsonField(inherited.group_ids_json);
259
+ const inheritedShares = inherited.shares ?? jsonField(inherited.shares_json);
260
+ if ((inheritedGroupIds || []).length > 0 || (inheritedShares || []).length > 0) {
261
+ return {
262
+ groupIds: inheritedGroupIds || [],
263
+ shares: inheritedShares || [],
264
+ };
265
+ }
266
+ }
267
+
268
+ const primaryGroup = requirePrimaryGroup(db);
269
+ return {
270
+ groupIds: [primaryGroup.group_id],
271
+ shares: buildDefaultGroupShares(primaryGroup, primaryGroup.name || ''),
272
+ };
273
+ }
274
+
275
+ function scopePatchFromRecord(record = null) {
276
+ if (!record) return {};
277
+ return {
278
+ scope_id: record.scope_id ?? null,
279
+ scope_l1_id: record.scope_l1_id ?? null,
280
+ scope_l2_id: record.scope_l2_id ?? null,
281
+ scope_l3_id: record.scope_l3_id ?? null,
282
+ scope_l4_id: record.scope_l4_id ?? null,
283
+ scope_l5_id: record.scope_l5_id ?? null,
284
+ };
285
+ }
286
+
287
+ function requireParsedRow(row, label, ref) {
288
+ if (!row) throw new Error(`${label} not found: ${ref}`);
289
+ return parseRawRow(row) || row;
290
+ }
291
+
292
+ function resolveCreateInheritedRecord(db, {
293
+ parentDirectoryId = null,
294
+ flowId = null,
295
+ parentTaskId = null,
296
+ taskIds = [],
297
+ scopeRef = null,
298
+ } = {}) {
299
+ if (parentDirectoryId) return requireParsedRow(findDirectoryRow(db, parentDirectoryId), 'Directory', parentDirectoryId);
300
+ if (flowId) return requireParsedRow(findFlowRow(db, flowId), 'Flow', flowId);
301
+ if (parentTaskId) return requireParsedRow(findTaskRow(db, parentTaskId), 'Task', parentTaskId);
302
+ if (Array.isArray(taskIds) && taskIds.length > 0) {
303
+ return requireParsedRow(findTaskRow(db, taskIds[0]), 'Task', taskIds[0]);
304
+ }
305
+ if (scopeRef) return requireParsedRow(findScopeRow(db, scopeRef), 'Scope', scopeRef);
306
+ return null;
307
+ }
308
+
309
+ function resolveScopeHierarchy(db, scopeId, { parentId = null } = {}) {
310
+ const parentRow = parentId ? findScopeRow(db, parentId) : null;
311
+ const parent = parentRow ? (parseRawRow(parentRow) || parentRow) : null;
312
+ if (parentId && !parent) throw new Error(`Parent scope not found: ${parentId}`);
313
+
314
+ const parentDepth = parent ? scopeDepth(parent.level) : 0;
315
+ if (parentDepth >= 5) throw new Error('Cannot create scope deeper than level 5.');
316
+ const level = `l${parentDepth + 1}`;
317
+
318
+ return computeScopeLineage(scopeId, level, parent);
319
+ }
320
+
321
+ function normalizeReportType(type, fallback = 'text') {
322
+ const value = String(type || '').trim().toLowerCase();
323
+ if (['metric', 'timeseries', 'table', 'text'].includes(value)) return value;
324
+ if (type == null || type === '') return fallback;
325
+ throw new Error('Report type must be one of: metric, timeseries, table, text.');
326
+ }
327
+
328
+ function parseJsonText(value, label = 'JSON') {
329
+ try {
330
+ return JSON.parse(value);
331
+ } catch (error) {
332
+ throw new Error(`Invalid ${label}: ${error instanceof Error ? error.message : String(error)}`);
333
+ }
334
+ }
335
+
336
+ function resolveReportPayloadInput(options, currentPayload = null) {
337
+ if (options.payloadFile) {
338
+ return parseJsonText(readFileSync(options.payloadFile, 'utf8'), `report payload file ${options.payloadFile}`);
339
+ }
340
+ if (options.payload !== undefined) {
341
+ return parseJsonText(options.payload, 'report payload');
342
+ }
343
+ if (currentPayload !== null) return currentPayload;
344
+ throw new Error('Provide --payload-file or --payload.');
345
+ }
346
+
347
+ function reportFromRow(row) {
348
+ const parsed = parseRawRow(row) || row;
349
+ if (!parsed.payload) parsed.payload = jsonField(row?.payload_json, {});
350
+ if (!parsed.group_ids) parsed.group_ids = jsonField(row?.group_ids_json, []);
351
+ return parsed;
352
+ }
353
+
354
+ function reportSummary(report) {
355
+ const payload = report?.payload ?? {};
356
+ switch (report?.declaration_type) {
357
+ case 'metric': {
358
+ const trend = payload.trend?.value != null ? ` | trend ${payload.trend.direction || 'flat'} ${payload.trend.value}` : '';
359
+ const unit = payload.unit ? ` ${payload.unit}` : '';
360
+ return `${payload.label || 'metric'}: ${payload.value ?? ''}${unit}${trend}`;
361
+ }
362
+ case 'timeseries': {
363
+ const series = Array.isArray(payload.series) ? payload.series : [];
364
+ const pointCount = series.reduce((sum, entry) => sum + (Array.isArray(entry?.points) ? entry.points.length : 0), 0);
365
+ return `${series.length} series, ${pointCount} points`;
366
+ }
367
+ case 'table': {
368
+ const columns = Array.isArray(payload.columns) ? payload.columns : [];
369
+ const rows = Array.isArray(payload.rows) ? payload.rows : [];
370
+ return `${columns.length} columns, ${rows.length} rows`;
371
+ }
372
+ case 'text':
373
+ default:
374
+ return String(payload.body || '').replace(/\s+/g, ' ').trim().slice(0, 80);
375
+ }
376
+ }
377
+
378
+ function padAligned(text, width, align = 'left') {
379
+ const value = String(text ?? '');
380
+ if (align === 'right') return value.padStart(width, ' ');
381
+ if (align === 'center') {
382
+ const total = Math.max(0, width - value.length);
383
+ const left = Math.floor(total / 2);
384
+ const right = total - left;
385
+ return `${' '.repeat(left)}${value}${' '.repeat(right)}`;
386
+ }
387
+ return value.padEnd(width, ' ');
388
+ }
389
+
390
+ function renderAsciiTable(columns = [], rows = []) {
391
+ const normalizedColumns = columns.map((column) => ({
392
+ key: column.key,
393
+ label: column.label,
394
+ align: column.align || 'left',
395
+ }));
396
+ const widths = normalizedColumns.map((column) => {
397
+ const cellWidth = rows.reduce((max, row) => {
398
+ const value = row && typeof row === 'object' ? row[column.key] : '';
399
+ return Math.max(max, String(value ?? '').length);
400
+ }, 0);
401
+ return Math.max(String(column.label || '').length, cellWidth);
402
+ });
403
+ const header = normalizedColumns.map((column, index) => padAligned(column.label, widths[index], 'left')).join(' | ');
404
+ const divider = widths.map((width) => '-'.repeat(width)).join('-+-');
405
+ const body = rows.map((row) => normalizedColumns.map((column, index) => (
406
+ padAligned(row?.[column.key] ?? '', widths[index], column.align)
407
+ )).join(' | '));
408
+ return [header, divider, ...body].join('\n');
409
+ }
410
+
411
+ function renderTimeseriesSeries(series = []) {
412
+ return series.flatMap((entry) => {
413
+ const label = String(entry?.label || entry?.key || 'series');
414
+ const points = Array.isArray(entry?.points) ? entry.points : [];
415
+ const numericValues = points.map((point) => Number(point?.y)).filter((value) => Number.isFinite(value));
416
+ const maxValue = numericValues.length > 0 ? Math.max(...numericValues, 0) : 0;
417
+ const rows = points.map((point) => {
418
+ const y = Number(point?.y);
419
+ const safeY = Number.isFinite(y) ? y : 0;
420
+ const barWidth = safeY > 0 && maxValue > 0 ? Math.max(1, Math.round((safeY / maxValue) * 24)) : 0;
421
+ const bar = '#'.repeat(barWidth);
422
+ return `${String(point?.x ?? '').padEnd(12)} | ${String(point?.y ?? '').padStart(6)} ${bar}`;
423
+ });
424
+ return [`${label}:`, ...rows];
425
+ });
426
+ }
427
+
428
+ function renderReportHuman(report) {
429
+ const lines = [
430
+ `Report: ${report.title || report.record_id}`,
431
+ `Type: ${report.declaration_type}`,
432
+ `Generated: ${report.generated_at || report.updated_at || ''}`,
433
+ ];
434
+ if (report.surface) lines.push(`Surface: ${report.surface}`);
435
+ if (report.scope_id) lines.push(`Scope: ${report.scope_id}${report.scope_level ? ` (${report.scope_level})` : ''}`);
436
+ lines.push('');
437
+
438
+ const payload = report.payload ?? {};
439
+ switch (report.declaration_type) {
440
+ case 'metric': {
441
+ const unit = payload.unit ? ` ${payload.unit}` : '';
442
+ lines.push(`${payload.label || 'Metric'}: ${payload.value ?? ''}${unit}`);
443
+ if (payload.trend?.value != null) {
444
+ lines.push(`Trend: ${payload.trend.direction || 'flat'} ${payload.trend.value}${payload.trend.label ? ` (${payload.trend.label})` : ''}`);
445
+ }
446
+ break;
447
+ }
448
+ case 'timeseries': {
449
+ if (payload.x_label || payload.y_label) {
450
+ lines.push(`Axes: ${payload.x_label || 'x'} / ${payload.y_label || 'y'}`);
451
+ }
452
+ lines.push(...renderTimeseriesSeries(payload.series));
453
+ break;
454
+ }
455
+ case 'table': {
456
+ lines.push(renderAsciiTable(
457
+ Array.isArray(payload.columns) ? payload.columns : [],
458
+ Array.isArray(payload.rows) ? payload.rows : [],
459
+ ));
460
+ break;
461
+ }
462
+ case 'text':
463
+ default: {
464
+ const tone = String(payload.tone || 'neutral').toUpperCase();
465
+ lines.push(`[${tone}] ${payload.body || ''}`);
466
+ break;
467
+ }
468
+ }
469
+
470
+ return lines.join('\n').trimEnd();
471
+ }
472
+
473
+ function normalizeNpubList(values = []) {
474
+ const input = Array.isArray(values) ? values : [values];
475
+ return [...new Set(input.map((value) => String(value || '').trim()).filter(Boolean))];
476
+ }
477
+
478
+ function resolveRotatedMemberNpubs(group, options, session) {
479
+ const currentMembers = normalizeNpubList(group.member_npubs ?? jsonField(group.member_npubs_json));
480
+ const additions = normalizeNpubList(options.addMember);
481
+ const removals = new Set(normalizeNpubList(options.removeMember));
482
+ const nextMembers = new Set([...currentMembers, ...additions]);
483
+
484
+ for (const memberNpub of removals) {
485
+ nextMembers.delete(memberNpub);
486
+ }
487
+
488
+ if (group.owner_npub) nextMembers.add(group.owner_npub);
489
+ if (session.npub) nextMembers.add(session.npub);
490
+
491
+ const resolved = [...nextMembers];
492
+ if (resolved.length === 0) {
493
+ throw new Error('Group rotation requires at least one member.');
494
+ }
495
+ return resolved;
496
+ }
497
+
498
+ async function createAudioAttachmentBatch({
499
+ db,
500
+ config,
501
+ session,
502
+ client,
503
+ groupKeys,
504
+ filePath,
505
+ title,
506
+ targetRecordId,
507
+ targetRecordFamilyHash,
508
+ targetGroupIds,
509
+ writeGroupId,
510
+ }) {
511
+ const uploaded = await uploadEncryptedAudioToStorage(client, filePath, {
512
+ ownerNpub: config.workspaceOwnerNpub,
513
+ accessGroupIds: resolveStorageAccessGroupIds(db, targetGroupIds),
514
+ contentType: detectMimeType(filePath, 'audio/webm'),
515
+ fileName: defaultFileName(filePath, 'voice-note'),
516
+ });
517
+ const audioRecordId = crypto.randomUUID();
518
+ const audioEnvelope = outboundAudioNote(config.appNpub, session, groupKeys, {
519
+ recordId: audioRecordId,
520
+ ownerNpub: config.workspaceOwnerNpub,
521
+ targetRecordId,
522
+ targetRecordFamilyHash,
523
+ title,
524
+ storageObjectId: uploaded.object_id,
525
+ mimeType: uploaded.content_type,
526
+ durationSeconds: null,
527
+ sizeBytes: uploaded.size_bytes,
528
+ mediaEncryption: uploaded.media_encryption,
529
+ transcriptStatus: 'pending',
530
+ targetGroupIds,
531
+ writeGroupNpub: writeGroupId,
532
+ });
533
+ const attachment = {
534
+ kind: 'audio',
535
+ audio_note_record_id: audioRecordId,
536
+ title,
537
+ duration_seconds: null,
538
+ };
539
+ return { audioEnvelope, attachment, audioRecordId };
540
+ }
541
+
542
+ async function syncRecordsAndRefresh(client, config, session, envelopes) {
543
+ const result = await client.syncRecords(envelopes);
544
+ // For re-sync after writes, resolve real session for fallback decrypt
545
+ const realSession = session.isWorkspaceKey ? getSession() : session;
546
+ const wsSession = session.isWorkspaceKey ? session : null;
547
+ await syncWorkspace({ client, config, session: realSession, wsSession, quiet: true });
548
+ return result;
549
+ }
550
+
551
+ async function withResolvedLinks(client, row, bodyField) {
552
+ if (!row) return null;
553
+ return {
554
+ ...row,
555
+ resolved_storage_links: await resolveStorageLinks(client, row[bodyField] || ''),
556
+ };
557
+ }
558
+
559
+ function audioTranscriptPreview(text) {
560
+ const words = String(text || '').trim().split(/\s+/).filter(Boolean);
561
+ return words.slice(0, 21).join(' ') + (words.length > 21 ? '...' : '');
562
+ }
563
+
564
+ function audioTranscriptSummary(text) {
565
+ const source = String(text || '').trim();
566
+ if (!source) return '';
567
+ const sentences = source.match(/[^.!?]+[.!?]?/g) || [source];
568
+ return sentences.slice(0, 2).join(' ').trim();
569
+ }
570
+
571
+ function audioSourceExtension(mimeType) {
572
+ if (String(mimeType || '').includes('ogg')) return 'ogg';
573
+ if (String(mimeType || '').includes('wav')) return 'wav';
574
+ if (String(mimeType || '').includes('mp4')) return 'm4a';
575
+ return 'webm';
576
+ }
577
+
578
+ function sanitizeStorageFileName(name, extension = 'bin') {
579
+ const base = String(name || '').replace(/[^a-zA-Z0-9._-]/g, '_');
580
+ return `${base || 'voice-note'}.${extension}`;
581
+ }
582
+
583
+ function candidateStorageDates(note) {
584
+ const dates = new Set();
585
+ const updatedAt = String(note?.updated_at || '').slice(0, 10);
586
+ if (updatedAt) dates.add(updatedAt);
587
+
588
+ const title = String(note?.title || '');
589
+ const match = title.match(/(\d{1,2})\s+([A-Za-z]{3})\s+(\d{4})/);
590
+ if (match) {
591
+ const months = {
592
+ Jan: '01', Feb: '02', Mar: '03', Apr: '04', May: '05', Jun: '06',
593
+ Jul: '07', Aug: '08', Sep: '09', Oct: '10', Nov: '11', Dec: '12',
594
+ };
595
+ const month = months[match[2]];
596
+ if (month) {
597
+ const day = String(match[1]).padStart(2, '0');
598
+ dates.add(`${match[3]}-${month}-${day}`);
599
+ }
600
+ }
601
+
602
+ return [...dates].filter(Boolean);
603
+ }
604
+
605
+ async function fallbackDownloadAudioNoteBuffer(note) {
606
+ const allowStorageKeyGuess = process.env.FLIGHTDECK_CLI_ALLOW_STORAGE_KEY_GUESS
607
+ || process.env.WINGMAN_YOKE_ALLOW_STORAGE_KEY_GUESS;
608
+ if (!/^(1|true|yes)$/i.test(String(allowStorageKeyGuess || ''))) {
609
+ return null;
610
+ }
611
+ const endpoint = process.env.STORAGE_S3_ENDPOINT_PUBLIC || 'https://storage.otherstuff.studio';
612
+ const bucket = process.env.STORAGE_S3_BUCKET || 'superbased-storage';
613
+ const accessKeyId = process.env.STORAGE_S3_ACCESS_KEY || 'superbased';
614
+ const secretAccessKey = process.env.STORAGE_S3_SECRET_KEY || 'superbased-secret';
615
+ const candidateDates = candidateStorageDates(note);
616
+ if (!note?.storage_object_id || !note?.owner_npub || candidateDates.length === 0) return null;
617
+
618
+ const ext = audioSourceExtension(note.mime_type);
619
+ const fileCandidates = [
620
+ sanitizeStorageFileName(note.title, ext),
621
+ `voice-note.${ext}`,
622
+ `${note.record_id}.${ext}`,
623
+ `${note.storage_object_id}.${ext}`,
624
+ ];
625
+ const keyCandidates = candidateDates.flatMap((objectDate) =>
626
+ fileCandidates.map((fileName) => `v4/${note.owner_npub}/${objectDate}/${note.storage_object_id}-${fileName}`)
627
+ );
628
+ const helperInput = JSON.stringify({
629
+ accessKeyId,
630
+ secretAccessKey,
631
+ bucket,
632
+ region: process.env.STORAGE_S3_REGION || 'us-east-1',
633
+ endpoint,
634
+ virtualHostedStyle: !/^(true|1|yes)$/i.test(String(process.env.STORAGE_S3_FORCE_PATH_STYLE || 'true')),
635
+ keys: keyCandidates,
636
+ });
637
+
638
+ const helperScript = `
639
+ const input = JSON.parse(process.env.FLIGHTDECK_CLI_AUDIO_S3_INPUT || process.env.WINGMAN_YOKE_AUDIO_S3_INPUT || '{}');
640
+ const client = new Bun.S3Client({
641
+ accessKeyId: input.accessKeyId,
642
+ secretAccessKey: input.secretAccessKey,
643
+ bucket: input.bucket,
644
+ region: input.region,
645
+ endpoint: input.endpoint,
646
+ virtualHostedStyle: Boolean(input.virtualHostedStyle),
647
+ });
648
+ let payload = null;
649
+ for (const key of input.keys || []) {
650
+ try {
651
+ const url = client.presign(key, { expiresIn: 120 });
652
+ const response = await fetch(url);
653
+ if (!response.ok) continue;
654
+ const bytes = Buffer.from(await response.arrayBuffer()).toString('base64');
655
+ payload = { key, bytes };
656
+ break;
657
+ } catch {}
658
+ }
659
+ if (payload) console.log(JSON.stringify(payload));
660
+ `;
661
+
662
+ try {
663
+ const raw = execFileSync('bun', ['-e', helperScript], {
664
+ encoding: 'utf8',
665
+ env: {
666
+ ...process.env,
667
+ FLIGHTDECK_CLI_AUDIO_S3_INPUT: helperInput,
668
+ WINGMAN_YOKE_AUDIO_S3_INPUT: helperInput,
669
+ },
670
+ stdio: ['ignore', 'pipe', 'pipe'],
671
+ }).trim();
672
+ if (!raw) return null;
673
+ const payload = JSON.parse(raw);
674
+ return payload?.bytes ? Buffer.from(payload.bytes, 'base64') : null;
675
+ } catch {
676
+ return null;
677
+ }
678
+ }
679
+
680
+ async function downloadAndDecryptAudioNote(client, note) {
681
+ if (!note?.storage_object_id) throw new Error('Audio note has no storage object.');
682
+ if (!note?.media_encryption?.key_b64 || !note?.media_encryption?.iv_b64) {
683
+ throw new Error('Audio note is missing media_encryption metadata.');
684
+ }
685
+
686
+ await client.getStorageObject(note.storage_object_id);
687
+
688
+ let cipherBuffer = null;
689
+ try {
690
+ cipherBuffer = Buffer.from(await client.getStorageContent(note.storage_object_id));
691
+ } catch (error) {
692
+ const fallback = await fallbackDownloadAudioNoteBuffer(note);
693
+ if (!fallback) throw error;
694
+ cipherBuffer = fallback;
695
+ }
696
+
697
+ const key = Buffer.from(note.media_encryption.key_b64, 'base64');
698
+ const iv = Buffer.from(note.media_encryption.iv_b64, 'base64');
699
+ const authTag = cipherBuffer.slice(cipherBuffer.length - 16);
700
+ const encrypted = cipherBuffer.slice(0, cipherBuffer.length - 16);
701
+ const decipher = createDecipheriv('aes-256-gcm', key, iv);
702
+ decipher.setAuthTag(authTag);
703
+ return Buffer.concat([decipher.update(encrypted), decipher.final()]);
704
+ }
705
+
706
+ function transcribeAudioBuffer(audioBuffer, audioNoteId, mimeType = 'audio/webm;codecs=opus', modelPath) {
707
+ const ffmpegPath = process.env.FFMPEG_BIN || '/opt/homebrew/bin/ffmpeg';
708
+ const whisperCliPath = process.env.WHISPER_CLI || '/opt/homebrew/bin/whisper-cli';
709
+ const sourceExt = String(mimeType || '').includes('ogg') ? 'ogg' : 'webm';
710
+ const tempDir = mkdtempSync(join(tmpdir(), 'flightdeck-cli-audio-'));
711
+ const sourcePath = join(tempDir, `${audioNoteId}.${sourceExt}`);
712
+ const wavPath = join(tempDir, `${audioNoteId}.wav`);
713
+
714
+ try {
715
+ writeFileSync(sourcePath, audioBuffer);
716
+ execFileSync(ffmpegPath, ['-y', '-i', sourcePath, '-ar', '16000', '-ac', '1', '-c:a', 'pcm_s16le', wavPath], {
717
+ stdio: ['ignore', 'ignore', 'pipe'],
718
+ });
719
+ return execFileSync(whisperCliPath, ['-m', modelPath, '-f', wavPath, '--no-timestamps', '-l', 'auto'], {
720
+ encoding: 'utf8',
721
+ timeout: 300000,
722
+ }).trim();
723
+ } finally {
724
+ rmSync(tempDir, { recursive: true, force: true });
725
+ }
726
+ }
727
+
728
+ const program = new Command();
729
+ program
730
+ .name('flightdeck-cli')
731
+ .description('FlightDeck CLI for Coworker v4 / SuperBased')
732
+ .option('--json', 'print machine-readable JSON');
733
+
734
+ program
735
+ .command('init')
736
+ .requiredOption('--token <token>', 'SuperBased connection token')
737
+ .action((options) => {
738
+ const config = initConfigFromToken(options.token);
739
+ console.log(`Saved config for ${config.workspaceOwnerNpub} at ${config.directHttpsUrl}`);
740
+ });
741
+
742
+ program
743
+ .command('status')
744
+ .action(() => {
745
+ const config = requireConfig();
746
+ const db = openDb();
747
+ const counts = {
748
+ channels: getRow(db, `SELECT COUNT(*) AS count FROM channels`)?.count ?? 0,
749
+ messages: getRow(db, `SELECT COUNT(*) AS count FROM messages`)?.count ?? 0,
750
+ tasks: getRow(db, `SELECT COUNT(*) AS count FROM tasks`)?.count ?? 0,
751
+ comments: getRow(db, `SELECT COUNT(*) AS count FROM comments`)?.count ?? 0,
752
+ documents: getRow(db, `SELECT COUNT(*) AS count FROM documents`)?.count ?? 0,
753
+ schedules: getRow(db, `SELECT COUNT(*) AS count FROM schedules`)?.count ?? 0,
754
+ flows: getRow(db, `SELECT COUNT(*) AS count FROM flows`)?.count ?? 0,
755
+ approvals: getRow(db, `SELECT COUNT(*) AS count FROM approvals`)?.count ?? 0,
756
+ app_schemas: getRow(db, `SELECT COUNT(*) AS count FROM app_schemas`)?.count ?? 0,
757
+ };
758
+ const output = {
759
+ config: {
760
+ directHttpsUrl: config.directHttpsUrl,
761
+ workspaceOwnerNpub: config.workspaceOwnerNpub,
762
+ appNpub: config.appNpub,
763
+ serviceNpub: config.serviceNpub,
764
+ },
765
+ lastSyncAt: getMeta(db, 'sync:last_at'),
766
+ counts,
767
+ };
768
+ if (program.opts().json) console.log(JSON.stringify(output, null, 2));
769
+ else console.log(output);
770
+ });
771
+
772
+ async function runSyncCommand() {
773
+ const { config, realSession, wsSession, client } = await ensureWorkspaceKey(getClientAndState());
774
+ const counts = await syncWorkspace({ client, config, session: realSession, wsSession });
775
+ if (program.opts().json) console.log(JSON.stringify({ synced: counts }, null, 2));
776
+ else console.log(counts);
777
+ }
778
+
779
+ program.command('sync').action(runSyncCommand);
780
+ program.command('getLatest').action(runSyncCommand);
781
+
782
+ const schemas = program.command('schemas');
783
+ schemas.command('list')
784
+ .description('list downloaded app schema manifests')
785
+ .option('--app <npub>', 'filter by app namespace npub')
786
+ .action((options) => {
787
+ const db = openDb();
788
+ const rows = options.app
789
+ ? getRows(db, `SELECT * FROM app_schemas WHERE app_npub = ? ORDER BY updated_at DESC`, [options.app])
790
+ : getRows(db, `SELECT * FROM app_schemas ORDER BY app_name COLLATE NOCASE, updated_at DESC`);
791
+ printResult(rows.map((row) => {
792
+ const recordFamilies = jsonField(row.record_families_json, []);
793
+ const schemas = jsonField(row.schemas_json, []);
794
+ return {
795
+ app_npub: row.app_npub,
796
+ app_name: row.app_name,
797
+ schema_hash: row.schema_hash,
798
+ schema_version: row.schema_version,
799
+ family_count: recordFamilies.length,
800
+ schema_count: schemas.length,
801
+ record_families: recordFamilies.map((family) => family.record_family_hash).filter(Boolean),
802
+ updated_at: row.updated_at,
803
+ };
804
+ }));
805
+ });
806
+
807
+ schemas.command('get')
808
+ .description('show a downloaded app schema manifest')
809
+ .argument('<appNpub>')
810
+ .option('--hash <schemaHash>', 'specific schema hash')
811
+ .action((appNpub, options) => {
812
+ const db = openDb();
813
+ const row = options.hash
814
+ ? getRow(db, `SELECT * FROM app_schemas WHERE app_npub = ? AND schema_hash = ?`, [appNpub, options.hash])
815
+ : getRow(db, `SELECT * FROM app_schemas WHERE app_npub = ? ORDER BY updated_at DESC LIMIT 1`, [appNpub]);
816
+ if (!row) throw new Error(`No downloaded schema manifest for ${appNpub}. Run \`flightdeck-cli sync\` first.`);
817
+ printResult({
818
+ app_npub: row.app_npub,
819
+ app_name: row.app_name,
820
+ schema_hash: row.schema_hash,
821
+ schema_version: row.schema_version,
822
+ record_families: jsonField(row.record_families_json, []),
823
+ schemas: jsonField(row.schemas_json, []),
824
+ updated_at: row.updated_at,
825
+ synced_at: row.synced_at,
826
+ });
827
+ });
828
+
829
+ const groups = program.command('groups');
830
+ groups.command('rotate')
831
+ .argument('<groupRef>')
832
+ .option('--add-member <npub...>', 'include additional members in the rotated epoch')
833
+ .option('--remove-member <npub...>', 'exclude members from the rotated epoch')
834
+ .option('--name <name>', 'optionally rename the group while rotating')
835
+ .action(async (groupRef, options) => {
836
+ const { client, db, config, session } = await refreshClientAndState();
837
+ const row = findGroupByRef(db, groupRef);
838
+ if (!row) throw new Error(`Group not found: ${groupRef}`);
839
+ const group = parseRawRow(row) || row;
840
+ const memberNpubs = resolveRotatedMemberNpubs(group, options, session);
841
+ const groupIdentity = createGroupIdentity();
842
+ const response = await client.rotateGroup(group.group_id, {
843
+ group_npub: groupIdentity.npub,
844
+ member_keys: buildWrappedMemberKeys(groupIdentity, memberNpubs, session.npub, session.secret),
845
+ ...(options.name !== undefined ? { name: options.name } : {}),
846
+ });
847
+ await syncWorkspace({ client, config, session, quiet: true });
848
+ printResult({
849
+ ...response,
850
+ member_npubs: memberNpubs,
851
+ });
852
+ });
853
+
854
+ const tasks = program.command('tasks');
855
+ tasks.command('create')
856
+ .requiredOption('--title <title>')
857
+ .option('--description <description>')
858
+ .option('--state <state>', 'task state', 'new')
859
+ .option('--priority <priority>', 'task priority', 'sand')
860
+ .option('--assign <npub>', 'set assigned_to_npub')
861
+ .option('--tags <tags>')
862
+ .option('--scheduled-for <date>')
863
+ .option('--parent <taskId>')
864
+ .option('--group <groupRef>')
865
+ .option('--scope <scopeId>')
866
+ .option('--predecessor <taskIds...>', 'predecessor task IDs (blocks until all DONE)')
867
+ .option('--flow-id <flowId>', 'flow definition this task belongs to')
868
+ .option('--flow-run-id <flowRunId>', 'flow run instance ID')
869
+ .option('--flow-step <step>', 'step number in the flow')
870
+ .action(async (options) => {
871
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
872
+ const recordId = crypto.randomUUID();
873
+ const descRefs = parseReferencesFromDescription(options.description ?? '');
874
+ // Auto-resolve flow linkage from title unless explicit --flow-id was given
875
+ const hasExplicitFlow = !!(options.flowId || options.flowRunId || options.flowStep);
876
+ let flowPatch = {};
877
+ if (!hasExplicitFlow) {
878
+ const flows = getRows(db, `SELECT record_id, title FROM flows WHERE record_state != 'deleted'`);
879
+ const linkage = resolveFlowLinkage({
880
+ title: options.title,
881
+ description: options.description ?? '',
882
+ references: descRefs,
883
+ flows,
884
+ });
885
+ flowPatch = {
886
+ flow_id: linkage.flow_id,
887
+ flow_run_id: linkage.flow_run_id,
888
+ flow_step: linkage.flow_step,
889
+ references: linkage.references,
890
+ };
891
+ }
892
+ const inherited = resolveCreateInheritedRecord(db, {
893
+ flowId: options.flowId ?? flowPatch.flow_id ?? null,
894
+ parentTaskId: options.parent ?? null,
895
+ scopeRef: options.scope ?? null,
896
+ });
897
+ const { groupIds, shares } = resolveRecordSharesAndGroups({
898
+ db,
899
+ explicitGroupRef: options.group,
900
+ inherited,
901
+ });
902
+ const scopePatch = options.scope ? resolveScopeLinkPatch(db, options.scope) : scopePatchFromRecord(inherited);
903
+ const boardGroupId = groupIds[0] ?? null;
904
+ const envelope = outboundTask(config.appNpub, session, groupKeys, {
905
+ record_id: recordId,
906
+ owner_npub: config.workspaceOwnerNpub,
907
+ title: options.title,
908
+ description: options.description ?? '',
909
+ state: options.state ?? 'new',
910
+ priority: options.priority ?? 'sand',
911
+ assigned_to_npub: resolveAssigneeValue(options, null),
912
+ parent_task_id: options.parent ?? null,
913
+ board_group_id: boardGroupId,
914
+ scheduled_for: options.scheduledFor ?? null,
915
+ tags: options.tags ?? '',
916
+ predecessor_task_ids: options.predecessor ?? null,
917
+ flow_id: options.flowId ?? flowPatch.flow_id ?? null,
918
+ flow_run_id: options.flowRunId ?? flowPatch.flow_run_id ?? null,
919
+ flow_step: maybeParseInt(options.flowStep) ?? flowPatch.flow_step ?? null,
920
+ references: flowPatch.references ?? descRefs,
921
+ ...scopePatch,
922
+ shares,
923
+ group_ids: groupIds,
924
+ version: 0,
925
+ signature_npub: session.npub,
926
+ });
927
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
928
+ });
929
+
930
+ tasks.command('list')
931
+ .option('--state <state>')
932
+ .action((options) => {
933
+ const db = openDb();
934
+ const rows = options.state
935
+ ? getRows(db, `SELECT * FROM tasks WHERE state = ? AND record_state != 'deleted' ORDER BY updated_at DESC`, [options.state])
936
+ : getRows(db, `SELECT * FROM tasks WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
937
+ const output = rows.map((row) => ({
938
+ record_id: row.record_id,
939
+ title: row.title,
940
+ state: row.state,
941
+ priority: row.priority,
942
+ assigned_to_npub: row.assigned_to_npub ?? null,
943
+ scheduled_for: row.scheduled_for,
944
+ flow_id: row.flow_id ?? null,
945
+ flow_run_id: row.flow_run_id ?? null,
946
+ flow_step: row.flow_step ?? null,
947
+ references: jsonField(row.references_json),
948
+ group_ids: jsonField(row.group_ids_json),
949
+ updated_at: row.updated_at,
950
+ }));
951
+ if (program.opts().json) console.log(JSON.stringify(output, null, 2));
952
+ else output.forEach((row) => {
953
+ const flowTag = row.flow_id ? ` [flow:${row.flow_id.slice(0, 8)}]` : '';
954
+ console.log(`${row.record_id} | ${row.state} | ${row.title}${flowTag} | ${row.assigned_to_npub || 'unassigned'}`);
955
+ });
956
+ });
957
+
958
+ tasks.command('show')
959
+ .argument('<taskId>')
960
+ .action(async (taskId) => {
961
+ const { client, db } = getClientAndState();
962
+ const row = getRow(db, `SELECT * FROM tasks WHERE record_id = ?`, [taskId]);
963
+ if (!row) throw new Error(`Task not found: ${taskId}`);
964
+ const enriched = await withResolvedLinks(client, row, 'description');
965
+ if (program.opts().json) console.log(JSON.stringify(enriched, null, 2));
966
+ else console.log(enriched);
967
+ });
968
+
969
+ tasks.command('update')
970
+ .argument('<taskId>')
971
+ .option('--title <title>')
972
+ .option('--description <description>')
973
+ .option('--state <state>')
974
+ .option('--priority <priority>')
975
+ .option('--assign <npub>', 'set assigned_to_npub')
976
+ .option('--clear-assignee', 'clear assigned_to_npub')
977
+ .option('--tags <tags>')
978
+ .option('--scheduled-for <date>')
979
+ .option('--scope <scopeId>')
980
+ .option('--clear-scope')
981
+ .option('--predecessor <taskIds...>', 'predecessor task IDs')
982
+ .option('--flow-id <flowId>', 'flow definition this task belongs to')
983
+ .option('--flow-run-id <flowRunId>', 'flow run instance ID')
984
+ .option('--flow-step <step>', 'step number in the flow')
985
+ .action(async (taskId, options) => {
986
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
987
+ const row = getRow(db, `SELECT * FROM tasks WHERE record_id = ?`, [taskId]);
988
+ if (!row) throw new Error(`Task not found: ${taskId}`);
989
+ const task = JSON.parse(row.raw_json);
990
+ const nextTitle = options.title ?? task.title;
991
+ const nextDesc = options.description ?? task.description;
992
+ const descRefs = parseReferencesFromDescription(nextDesc);
993
+ const hasExplicitFlow = !!(options.flowId || options.flowRunId || options.flowStep);
994
+ let flowPatch = {};
995
+ if (!hasExplicitFlow && !task.flow_id) {
996
+ const flows = getRows(db, `SELECT record_id, title FROM flows WHERE record_state != 'deleted'`);
997
+ const linkage = resolveFlowLinkage({
998
+ title: nextTitle,
999
+ description: nextDesc,
1000
+ references: descRefs,
1001
+ flows,
1002
+ });
1003
+ flowPatch = {
1004
+ flow_id: linkage.flow_id,
1005
+ flow_run_id: linkage.flow_run_id,
1006
+ flow_step: linkage.flow_step,
1007
+ references: linkage.references,
1008
+ };
1009
+ }
1010
+ const envelope = outboundTask(config.appNpub, session, groupKeys, task, {
1011
+ title: nextTitle,
1012
+ description: nextDesc,
1013
+ state: options.state ?? task.state,
1014
+ priority: options.priority ?? task.priority,
1015
+ assigned_to_npub: resolveAssigneeValue(options, task.assigned_to_npub ?? null),
1016
+ tags: options.tags ?? task.tags,
1017
+ scheduled_for: options.scheduledFor ?? task.scheduled_for,
1018
+ predecessor_task_ids: options.predecessor ?? task.predecessor_task_ids ?? null,
1019
+ flow_id: options.flowId ?? flowPatch.flow_id ?? task.flow_id ?? null,
1020
+ flow_run_id: options.flowRunId ?? flowPatch.flow_run_id ?? task.flow_run_id ?? null,
1021
+ flow_step: options.flowStep != null ? maybeParseInt(options.flowStep) : (flowPatch.flow_step ?? task.flow_step ?? null),
1022
+ references: flowPatch.references ?? descRefs,
1023
+ ...resolveScopeLinkPatch(db, options.scope, options),
1024
+ });
1025
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1026
+ });
1027
+
1028
+ tasks.command('comment')
1029
+ .argument('<taskId>')
1030
+ .requiredOption('--body <body>')
1031
+ .option('--parent <commentId>', 'parent comment id for thread reply')
1032
+ .action(async (taskId, options) => {
1033
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1034
+ const row = getRow(db, `SELECT * FROM tasks WHERE record_id = ?`, [taskId]);
1035
+ if (!row) throw new Error(`Task not found: ${taskId}`);
1036
+ const task = parseRawRow(row);
1037
+ const envelope = outboundComment(config.appNpub, session, groupKeys, task, {
1038
+ recordId: crypto.randomUUID(),
1039
+ body: options.body,
1040
+ parentCommentId: options.parent ?? null,
1041
+ });
1042
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1043
+ });
1044
+
1045
+ tasks.command('reply')
1046
+ .argument('<commentId>')
1047
+ .requiredOption('--body <body>')
1048
+ .action(async (commentId, options) => {
1049
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1050
+ const parentComment = findCommentRow(db, commentId);
1051
+ if (!parentComment) throw new Error(`Comment not found: ${commentId}`);
1052
+ const taskRow = getRow(db, `SELECT * FROM tasks WHERE record_id = ?`, [parentComment.target_record_id]);
1053
+ if (!taskRow) throw new Error(`Target task not found: ${parentComment.target_record_id}`);
1054
+ const task = parseRawRow(taskRow);
1055
+ const envelope = outboundComment(config.appNpub, session, groupKeys, task, {
1056
+ recordId: crypto.randomUUID(),
1057
+ body: options.body,
1058
+ parentCommentId: commentId,
1059
+ });
1060
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1061
+ });
1062
+
1063
+ tasks.command('voice')
1064
+ .argument('<taskId>')
1065
+ .requiredOption('--file <path>')
1066
+ .option('--body <body>', 'optional text comment body', '')
1067
+ .option('--title <title>', 'audio note title')
1068
+ .option('--parent <commentId>', 'parent comment id for thread reply')
1069
+ .action(async (taskId, options) => {
1070
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1071
+ const row = getRow(db, `SELECT * FROM tasks WHERE record_id = ?`, [taskId]);
1072
+ if (!row) throw new Error(`Task not found: ${taskId}`);
1073
+ const task = parseRawRow(row);
1074
+ const commentId = crypto.randomUUID();
1075
+ const title = options.title || `Task Voice: ${new Date().toISOString()}`;
1076
+ const { audioEnvelope, attachment } = await createAudioAttachmentBatch({
1077
+ db,
1078
+ config,
1079
+ session,
1080
+ client,
1081
+ groupKeys,
1082
+ filePath: options.file,
1083
+ title,
1084
+ targetRecordId: commentId,
1085
+ targetRecordFamilyHash: recordFamilyHash(config.appNpub, 'comment'),
1086
+ targetGroupIds: task.group_ids || [],
1087
+ writeGroupId: task.board_group_id || task.group_ids?.[0] || null,
1088
+ });
1089
+ const commentEnvelope = outboundComment(config.appNpub, session, groupKeys, task, {
1090
+ recordId: commentId,
1091
+ body: options.body || '',
1092
+ parentCommentId: options.parent ?? null,
1093
+ attachments: [attachment],
1094
+ });
1095
+ printResult(await syncRecordsAndRefresh(client, config, session, [audioEnvelope, commentEnvelope]));
1096
+ });
1097
+
1098
+ tasks.command('comment-image')
1099
+ .argument('<taskId>')
1100
+ .requiredOption('--file <path>')
1101
+ .option('--body <body>', 'optional text before image', '')
1102
+ .option('--parent <commentId>', 'parent comment id for thread reply')
1103
+ .action(async (taskId, options) => {
1104
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1105
+ const row = getRow(db, `SELECT * FROM tasks WHERE record_id = ?`, [taskId]);
1106
+ if (!row) throw new Error(`Task not found: ${taskId}`);
1107
+ const task = parseRawRow(row);
1108
+ const uploaded = await uploadFileToStorage(client, options.file, {
1109
+ ownerNpub: config.workspaceOwnerNpub,
1110
+ accessGroupIds: resolveStorageAccessGroupIds(db, task.group_ids || []),
1111
+ contentType: detectMimeType(options.file, 'image/png'),
1112
+ fileName: defaultFileName(options.file, 'task-comment-image'),
1113
+ });
1114
+ const markdown = createStorageMarkdown(uploaded.object_id, uploaded.file_name);
1115
+ const body = options.body ? `${options.body}\n\n${markdown}` : markdown;
1116
+ const envelope = outboundComment(config.appNpub, session, groupKeys, task, {
1117
+ recordId: crypto.randomUUID(),
1118
+ body,
1119
+ parentCommentId: options.parent ?? null,
1120
+ });
1121
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1122
+ });
1123
+
1124
+ const chat = program.command('chat');
1125
+ chat.command('create')
1126
+ .requiredOption('--title <title>')
1127
+ .option('--group <groupRef>')
1128
+ .option('--participant <npub...>', 'participant npubs')
1129
+ .action(async (options) => {
1130
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1131
+ const primaryGroup = options.group
1132
+ ? findGroupByRef(db, options.group)
1133
+ : requirePrimaryGroup(db);
1134
+ if (!primaryGroup) throw new Error(`Group not found: ${options.group}`);
1135
+ const groupId = primaryGroup.group_id;
1136
+ const participantNpubs = [...new Set([session.npub, ...(options.participant || [])].filter(Boolean))];
1137
+ const envelope = outboundChannel(config.appNpub, session, groupKeys, {
1138
+ recordId: crypto.randomUUID(),
1139
+ ownerNpub: config.workspaceOwnerNpub,
1140
+ title: options.title,
1141
+ groupIds: [groupId],
1142
+ participantNpubs,
1143
+ version: 1,
1144
+ previousVersion: 0,
1145
+ writeGroupNpub: groupId,
1146
+ });
1147
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1148
+ });
1149
+
1150
+ chat.command('channels')
1151
+ .action(() => {
1152
+ const db = openDb();
1153
+ const rows = getRows(db, `SELECT * FROM channels WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
1154
+ if (program.opts().json) console.log(JSON.stringify(rows, null, 2));
1155
+ else rows.forEach((row) => console.log(`${row.record_id} | ${row.title}`));
1156
+ });
1157
+
1158
+ chat.command('messages')
1159
+ .argument('<channelId>')
1160
+ .option('--thread <messageId>')
1161
+ .action(async (channelId, options) => {
1162
+ const { client, db } = getClientAndState();
1163
+ const rows = options.thread
1164
+ ? getRows(db, `SELECT * FROM messages WHERE channel_id = ? AND (record_id = ? OR parent_message_id = ?) ORDER BY updated_at ASC`, [channelId, options.thread, options.thread])
1165
+ : getRows(db, `SELECT * FROM messages WHERE channel_id = ? AND parent_message_id IS NULL ORDER BY updated_at ASC`, [channelId]);
1166
+ const enriched = [];
1167
+ for (const row of rows) enriched.push(await withResolvedLinks(client, row, 'body'));
1168
+ if (program.opts().json) console.log(JSON.stringify(enriched, null, 2));
1169
+ else enriched.forEach((row) => console.log(`${row.record_id} | ${row.sender_npub} | ${row.body}`));
1170
+ });
1171
+
1172
+ chat.command('context')
1173
+ .option('--channel <channelId>', 'explicit channel context')
1174
+ .option('--thread <messageId>', 'explicit thread root message id')
1175
+ .option('--message <messageId>', 'explicit anchor message id')
1176
+ .option('--limit <count>', 'recent message count', '6')
1177
+ .option('--format <format>', 'runtime output format', 'json')
1178
+ .action((options) => {
1179
+ const db = openDb();
1180
+ const output = buildChatContextPayload(db, {
1181
+ channelId: options.channel,
1182
+ threadId: options.thread,
1183
+ messageId: options.message,
1184
+ limit: options.limit,
1185
+ });
1186
+ printRuntimeResult(output, options.format);
1187
+ });
1188
+
1189
+ chat.command('history')
1190
+ .option('--channel <channelId>', 'explicit channel context')
1191
+ .option('--thread <messageId>', 'explicit thread root message id')
1192
+ .option('--message <messageId>', 'explicit anchor message id')
1193
+ .option('--limit <count>', 'message count', '20')
1194
+ .option('--cursor <cursor>', 'history cursor from a previous response')
1195
+ .option('--format <format>', 'runtime output format', 'json')
1196
+ .action((options) => {
1197
+ const db = openDb();
1198
+ const output = buildChatHistoryPayload(db, {
1199
+ channelId: options.channel,
1200
+ threadId: options.thread,
1201
+ messageId: options.message,
1202
+ limit: options.limit,
1203
+ cursor: options.cursor,
1204
+ });
1205
+ printRuntimeResult(output, options.format);
1206
+ });
1207
+
1208
+ chat.command('search')
1209
+ .requiredOption('--query <query>', 'search query for the local workspace mirror')
1210
+ .option('--channel <channelId>', 'explicit channel context')
1211
+ .option('--thread <messageId>', 'explicit thread root message id')
1212
+ .option('--message <messageId>', 'explicit anchor message id')
1213
+ .option('--reference <reference...>', 'structured reference hints: message:, thread:, channel:, or mention:...')
1214
+ .option('--deep', 'include bounded thread-level deep retrieval hits')
1215
+ .option('--limit <count>', 'result count', '10')
1216
+ .option('--format <format>', 'runtime output format', 'json')
1217
+ .action((options) => {
1218
+ const db = openDb();
1219
+ const output = buildChatSearchPayload(db, {
1220
+ channelId: options.channel,
1221
+ threadId: options.thread,
1222
+ messageId: options.message,
1223
+ query: options.query,
1224
+ references: options.reference,
1225
+ deep: options.deep === true,
1226
+ limit: options.limit,
1227
+ });
1228
+ printRuntimeResult(output, options.format);
1229
+ });
1230
+
1231
+ chat.command('related')
1232
+ .option('--channel <channelId>', 'explicit channel context')
1233
+ .option('--thread <messageId>', 'explicit thread root message id')
1234
+ .option('--message <messageId>', 'explicit anchor message id')
1235
+ .option('--deep', 'include bounded supporting message snippets')
1236
+ .option('--limit <count>', 'related thread count', '5')
1237
+ .option('--format <format>', 'runtime output format', 'json')
1238
+ .action((options) => {
1239
+ const db = openDb();
1240
+ const output = buildChatRelatedPayload(db, {
1241
+ channelId: options.channel,
1242
+ threadId: options.thread,
1243
+ messageId: options.message,
1244
+ deep: options.deep === true,
1245
+ limit: options.limit,
1246
+ });
1247
+ printRuntimeResult(output, options.format);
1248
+ });
1249
+
1250
+ chat.command('send')
1251
+ .argument('<channelId>')
1252
+ .requiredOption('--body <body>')
1253
+ .option('--thread <messageId>')
1254
+ .action(async (channelId, options) => {
1255
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1256
+ const channelRow = getRow(db, `SELECT * FROM channels WHERE record_id = ?`, [channelId]);
1257
+ if (!channelRow) throw new Error(`Channel not found: ${channelId}`);
1258
+ const channel = parseRawRow(channelRow);
1259
+ const envelope = outboundChatMessage(config.appNpub, session, groupKeys, channel, {
1260
+ recordId: crypto.randomUUID(),
1261
+ body: options.body,
1262
+ parentMessageId: options.thread ?? null,
1263
+ });
1264
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1265
+ });
1266
+
1267
+ chat.command('reply-current')
1268
+ .requiredOption('--body <body>')
1269
+ .option('--channel <channelId>', 'explicit channel context')
1270
+ .option('--thread <messageId>', 'explicit thread root message id')
1271
+ .option('--message <messageId>', 'explicit anchor message id')
1272
+ .option('--skip-refresh', 'skip post-send workspace refresh and only publish the record')
1273
+ .option('--format <format>', 'runtime output format', 'json')
1274
+ .action(async (options) => {
1275
+ const { client, db, config, session, groupKeys } = await ensureWorkspaceKey(getClientAndState());
1276
+ const output = await sendChatReplyCurrent(db, {
1277
+ channelId: options.channel,
1278
+ threadId: options.thread,
1279
+ messageId: options.message,
1280
+ body: options.body,
1281
+ async sendReply({ channel_id: channelId, thread_id: threadId, body }) {
1282
+ const channelRow = getRow(db, `SELECT * FROM channels WHERE record_id = ?`, [channelId]);
1283
+ if (!channelRow) throw new Error(`Channel not found locally: ${channelId}`);
1284
+ const channel = parseRawRow(channelRow);
1285
+ const messageId = crypto.randomUUID();
1286
+ const envelope = outboundChatMessage(config.appNpub, session, groupKeys, channel, {
1287
+ recordId: messageId,
1288
+ body,
1289
+ parentMessageId: threadId,
1290
+ });
1291
+ if (options.skipRefresh) {
1292
+ await client.syncRecords([envelope]);
1293
+ } else {
1294
+ await syncRecordsAndRefresh(client, config, session, [envelope]);
1295
+ }
1296
+ return { message_id: messageId };
1297
+ },
1298
+ });
1299
+ printRuntimeResult(output, options.format);
1300
+ });
1301
+
1302
+ chat.command('reply')
1303
+ .argument('<channelId>')
1304
+ .requiredOption('--thread <messageId>')
1305
+ .requiredOption('--body <body>')
1306
+ .action(async (channelId, options) => {
1307
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1308
+ const channelRow = getRow(db, `SELECT * FROM channels WHERE record_id = ?`, [channelId]);
1309
+ if (!channelRow) throw new Error(`Channel not found: ${channelId}`);
1310
+ const channel = parseRawRow(channelRow);
1311
+ const envelope = outboundChatMessage(config.appNpub, session, groupKeys, channel, {
1312
+ recordId: crypto.randomUUID(),
1313
+ body: options.body,
1314
+ parentMessageId: options.thread,
1315
+ });
1316
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1317
+ });
1318
+
1319
+ chat.command('image')
1320
+ .argument('<channelId>')
1321
+ .requiredOption('--file <path>')
1322
+ .option('--body <body>', 'optional text before image', '')
1323
+ .option('--thread <messageId>')
1324
+ .action(async (channelId, options) => {
1325
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1326
+ const channelRow = getRow(db, `SELECT * FROM channels WHERE record_id = ?`, [channelId]);
1327
+ if (!channelRow) throw new Error(`Channel not found: ${channelId}`);
1328
+ const channel = parseRawRow(channelRow);
1329
+ const uploaded = await uploadFileToStorage(client, options.file, {
1330
+ ownerNpub: config.workspaceOwnerNpub,
1331
+ accessGroupIds: resolveStorageAccessGroupIds(db, channel.group_ids || []),
1332
+ contentType: detectMimeType(options.file, 'image/png'),
1333
+ fileName: defaultFileName(options.file, 'chat-image'),
1334
+ });
1335
+ const markdown = createStorageMarkdown(uploaded.object_id, uploaded.file_name);
1336
+ const body = options.body ? `${options.body}\n\n${markdown}` : markdown;
1337
+ const envelope = outboundChatMessage(config.appNpub, session, groupKeys, channel, {
1338
+ recordId: crypto.randomUUID(),
1339
+ body,
1340
+ parentMessageId: options.thread ?? null,
1341
+ });
1342
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1343
+ });
1344
+
1345
+ chat.command('voice')
1346
+ .argument('<channelId>')
1347
+ .requiredOption('--file <path>')
1348
+ .option('--body <body>', 'optional text message body', '')
1349
+ .option('--thread <messageId>')
1350
+ .option('--title <title>')
1351
+ .action(async (channelId, options) => {
1352
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1353
+ const channelRow = getRow(db, `SELECT * FROM channels WHERE record_id = ?`, [channelId]);
1354
+ if (!channelRow) throw new Error(`Channel not found: ${channelId}`);
1355
+ const channel = parseRawRow(channelRow);
1356
+ const messageId = crypto.randomUUID();
1357
+ const title = options.title || `Chat Voice: ${new Date().toISOString()}`;
1358
+ const { audioEnvelope, attachment } = await createAudioAttachmentBatch({
1359
+ db,
1360
+ config,
1361
+ session,
1362
+ client,
1363
+ groupKeys,
1364
+ filePath: options.file,
1365
+ title,
1366
+ targetRecordId: messageId,
1367
+ targetRecordFamilyHash: recordFamilyHash(config.appNpub, 'chat_message'),
1368
+ targetGroupIds: channel.group_ids || [],
1369
+ writeGroupId: channel.group_ids?.[0] || null,
1370
+ });
1371
+ const messageEnvelope = outboundChatMessage(config.appNpub, session, groupKeys, channel, {
1372
+ recordId: messageId,
1373
+ body: options.body || '',
1374
+ parentMessageId: options.thread ?? null,
1375
+ attachments: [attachment],
1376
+ });
1377
+ printResult(await syncRecordsAndRefresh(client, config, session, [audioEnvelope, messageEnvelope]));
1378
+ });
1379
+
1380
+ const directories = program.command('directories');
1381
+ directories.command('create')
1382
+ .requiredOption('--title <title>')
1383
+ .option('--parent-directory <directoryId>')
1384
+ .option('--group <groupRef>')
1385
+ .action(async (options) => {
1386
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1387
+ const parentRow = options.parentDirectory ? findDirectoryRow(db, options.parentDirectory) : null;
1388
+ if (options.parentDirectory && !parentRow) throw new Error(`Directory not found: ${options.parentDirectory}`);
1389
+ const { groupIds, shares } = resolveRecordSharesAndGroups({
1390
+ db,
1391
+ explicitGroupRef: options.group ?? null,
1392
+ inherited: parentRow ? (parseRawRow(parentRow) || parentRow) : null,
1393
+ });
1394
+ const envelope = outboundDirectory(config.appNpub, session, groupKeys, {
1395
+ record_id: crypto.randomUUID(),
1396
+ owner_npub: config.workspaceOwnerNpub,
1397
+ title: options.title,
1398
+ parent_directory_id: options.parentDirectory ?? null,
1399
+ shares,
1400
+ group_ids: groupIds,
1401
+ version: 0,
1402
+ });
1403
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1404
+ });
1405
+
1406
+ directories.command('list')
1407
+ .action(() => {
1408
+ const db = openDb();
1409
+ const rows = getRows(db, `SELECT * FROM directories WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
1410
+ const output = rows.map((row) => ({
1411
+ record_id: row.record_id,
1412
+ title: row.title,
1413
+ parent_directory_id: row.parent_directory_id,
1414
+ group_ids: jsonField(row.group_ids_json),
1415
+ updated_at: row.updated_at,
1416
+ }));
1417
+ printResult(output);
1418
+ });
1419
+
1420
+ directories.command('show')
1421
+ .argument('<directoryId>')
1422
+ .action((directoryId) => {
1423
+ const db = openDb();
1424
+ const row = findDirectoryRow(db, directoryId);
1425
+ if (!row) throw new Error(`Directory not found: ${directoryId}`);
1426
+ printResult(parseRawRow(row) || row);
1427
+ });
1428
+
1429
+ directories.command('update')
1430
+ .argument('<directoryId>')
1431
+ .option('--title <title>')
1432
+ .option('--parent-directory <directoryId>')
1433
+ .option('--clear-parent')
1434
+ .option('--group <groupRef>')
1435
+ .action(async (directoryId, options) => {
1436
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1437
+ const row = findDirectoryRow(db, directoryId);
1438
+ if (!row) throw new Error(`Directory not found: ${directoryId}`);
1439
+ const directory = parseRawRow(row) || row;
1440
+ const patch = {};
1441
+ if (options.title !== undefined) patch.title = options.title;
1442
+ if (options.parentDirectory !== undefined) patch.parent_directory_id = options.parentDirectory;
1443
+ if (options.clearParent) patch.parent_directory_id = null;
1444
+ if (options.group !== undefined) {
1445
+ const { groupIds, shares } = resolveRecordSharesAndGroups({ db, explicitGroupRef: options.group });
1446
+ patch.group_ids = groupIds;
1447
+ patch.shares = shares;
1448
+ }
1449
+ const envelope = outboundDirectory(config.appNpub, session, groupKeys, directory, patch);
1450
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1451
+ });
1452
+
1453
+ const docs = program.command('docs');
1454
+ docs.command('create')
1455
+ .requiredOption('--title <title>')
1456
+ .option('--content <content>', '', '')
1457
+ .option('--content-file <path>')
1458
+ .option('--group <groupRef>')
1459
+ .option('--parent-directory <directoryId>')
1460
+ .option('--scope <scopeId>')
1461
+ .action(async (options) => {
1462
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1463
+ const inherited = resolveCreateInheritedRecord(db, {
1464
+ parentDirectoryId: options.parentDirectory ?? null,
1465
+ scopeRef: options.scope ?? null,
1466
+ });
1467
+ const { groupIds, shares } = resolveRecordSharesAndGroups({
1468
+ db,
1469
+ explicitGroupRef: options.group,
1470
+ inherited,
1471
+ });
1472
+ const content = options.contentFile ? readFileSync(options.contentFile, 'utf8') : options.content;
1473
+ const scopePatch = options.scope ? resolveScopeLinkPatch(db, options.scope) : scopePatchFromRecord(inherited);
1474
+ const envelope = outboundDocument(config.appNpub, session, groupKeys, {
1475
+ record_id: crypto.randomUUID(),
1476
+ owner_npub: config.workspaceOwnerNpub,
1477
+ title: options.title,
1478
+ content: content ?? '',
1479
+ parent_directory_id: options.parentDirectory ?? null,
1480
+ ...scopePatch,
1481
+ shares,
1482
+ group_ids: groupIds,
1483
+ version: 0,
1484
+ signature_npub: session.npub,
1485
+ });
1486
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1487
+ });
1488
+
1489
+ docs.command('list')
1490
+ .action(() => {
1491
+ const db = openDb();
1492
+ const rows = getRows(db, `SELECT * FROM documents WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
1493
+ if (program.opts().json) console.log(JSON.stringify(rows, null, 2));
1494
+ else rows.forEach((row) => console.log(`${row.record_id} | ${row.title}`));
1495
+ });
1496
+
1497
+ docs.command('show')
1498
+ .argument('<docId>')
1499
+ .action(async (docId) => {
1500
+ const { client, db } = getClientAndState();
1501
+ const row = getRow(db, `SELECT * FROM documents WHERE record_id = ?`, [docId]);
1502
+ if (!row) throw new Error(`Document not found: ${docId}`);
1503
+ const enriched = await withResolvedLinks(client, row, 'content');
1504
+ if (program.opts().json) console.log(JSON.stringify(enriched, null, 2));
1505
+ else console.log(enriched);
1506
+ });
1507
+
1508
+ docs.command('comment')
1509
+ .argument('<docId>')
1510
+ .requiredOption('--body <body>')
1511
+ .option('--parent <commentId>', 'parent comment id for thread reply')
1512
+ .option('--line <line>', 'anchor line number', Number.parseInt)
1513
+ .action(async (docId, options) => {
1514
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1515
+ const row = getRow(db, `SELECT * FROM documents WHERE record_id = ?`, [docId]);
1516
+ if (!row) throw new Error(`Document not found: ${docId}`);
1517
+ const doc = parseRawRow(row);
1518
+ const envelope = outboundComment(config.appNpub, session, groupKeys, doc, {
1519
+ recordId: crypto.randomUUID(),
1520
+ body: options.body,
1521
+ parentCommentId: options.parent ?? null,
1522
+ anchorLineNumber: Number.isFinite(options.line) ? options.line : 1,
1523
+ });
1524
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1525
+ });
1526
+
1527
+ docs.command('reply')
1528
+ .argument('<commentId>')
1529
+ .requiredOption('--body <body>')
1530
+ .action(async (commentId, options) => {
1531
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1532
+ const parentComment = findCommentRow(db, commentId);
1533
+ if (!parentComment) throw new Error(`Comment not found: ${commentId}`);
1534
+ const docRow = getRow(db, `SELECT * FROM documents WHERE record_id = ?`, [parentComment.target_record_id]);
1535
+ if (!docRow) throw new Error(`Target document not found: ${parentComment.target_record_id}`);
1536
+ const doc = parseRawRow(docRow);
1537
+ const envelope = outboundComment(config.appNpub, session, groupKeys, doc, {
1538
+ recordId: crypto.randomUUID(),
1539
+ body: options.body,
1540
+ parentCommentId: commentId,
1541
+ });
1542
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1543
+ });
1544
+
1545
+ docs.command('comment-image')
1546
+ .argument('<docId>')
1547
+ .requiredOption('--file <path>')
1548
+ .option('--body <body>', 'optional text before image', '')
1549
+ .option('--parent <commentId>')
1550
+ .option('--line <line>', 'anchor line number', Number.parseInt)
1551
+ .action(async (docId, options) => {
1552
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1553
+ const row = getRow(db, `SELECT * FROM documents WHERE record_id = ?`, [docId]);
1554
+ if (!row) throw new Error(`Document not found: ${docId}`);
1555
+ const doc = parseRawRow(row);
1556
+ const uploaded = await uploadFileToStorage(client, options.file, {
1557
+ ownerNpub: config.workspaceOwnerNpub,
1558
+ accessGroupIds: resolveStorageAccessGroupIds(db, doc.group_ids || []),
1559
+ contentType: detectMimeType(options.file, 'image/png'),
1560
+ fileName: defaultFileName(options.file, 'doc-comment-image'),
1561
+ });
1562
+ const markdown = createStorageMarkdown(uploaded.object_id, uploaded.file_name);
1563
+ const body = options.body ? `${options.body}\n\n${markdown}` : markdown;
1564
+ const envelope = outboundComment(config.appNpub, session, groupKeys, doc, {
1565
+ recordId: crypto.randomUUID(),
1566
+ body,
1567
+ parentCommentId: options.parent ?? null,
1568
+ anchorLineNumber: Number.isFinite(options.line) ? options.line : null,
1569
+ });
1570
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1571
+ });
1572
+
1573
+ docs.command('voice')
1574
+ .argument('<docId>')
1575
+ .requiredOption('--file <path>')
1576
+ .option('--body <body>', 'optional text comment body', '')
1577
+ .option('--title <title>')
1578
+ .option('--parent <commentId>')
1579
+ .option('--line <line>', 'anchor line number', Number.parseInt)
1580
+ .action(async (docId, options) => {
1581
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1582
+ const row = getRow(db, `SELECT * FROM documents WHERE record_id = ?`, [docId]);
1583
+ if (!row) throw new Error(`Document not found: ${docId}`);
1584
+ const doc = parseRawRow(row);
1585
+ const commentId = crypto.randomUUID();
1586
+ const title = options.title || `Comment Voice: ${new Date().toISOString()}`;
1587
+ const { audioEnvelope, attachment } = await createAudioAttachmentBatch({
1588
+ db,
1589
+ config,
1590
+ session,
1591
+ client,
1592
+ groupKeys,
1593
+ filePath: options.file,
1594
+ title,
1595
+ targetRecordId: commentId,
1596
+ targetRecordFamilyHash: recordFamilyHash(config.appNpub, 'comment'),
1597
+ targetGroupIds: doc.group_ids || [],
1598
+ writeGroupId: doc.group_ids?.[0] || null,
1599
+ });
1600
+ const commentEnvelope = outboundComment(config.appNpub, session, groupKeys, doc, {
1601
+ recordId: commentId,
1602
+ body: options.body || '',
1603
+ parentCommentId: options.parent ?? null,
1604
+ anchorLineNumber: Number.isFinite(options.line) ? options.line : null,
1605
+ attachments: [attachment],
1606
+ });
1607
+ printResult(await syncRecordsAndRefresh(client, config, session, [audioEnvelope, commentEnvelope]));
1608
+ });
1609
+
1610
+ docs.command('update')
1611
+ .argument('<docId>')
1612
+ .option('--title <title>')
1613
+ .option('--content <content>')
1614
+ .option('--content-file <path>')
1615
+ .option('--scope <scopeId>')
1616
+ .option('--clear-scope')
1617
+ .action(async (docId, options) => {
1618
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1619
+ const row = getRow(db, `SELECT * FROM documents WHERE record_id = ?`, [docId]);
1620
+ if (!row) throw new Error(`Document not found: ${docId}`);
1621
+ const doc = parseRawRow(row);
1622
+ const content = options.contentFile
1623
+ ? readFileSync(options.contentFile, 'utf8')
1624
+ : (typeof options.content === 'string' ? options.content : doc.content);
1625
+ const envelope = outboundDocument(config.appNpub, session, groupKeys, doc, {
1626
+ title: options.title ?? doc.title,
1627
+ content,
1628
+ ...resolveScopeLinkPatch(db, options.scope, options),
1629
+ });
1630
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1631
+ });
1632
+
1633
+ const audio = program.command('audio');
1634
+ audio.command('list')
1635
+ .action(() => {
1636
+ const db = openDb();
1637
+ const rows = getRows(db, `SELECT * FROM audio_notes WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
1638
+ const output = rows.map((row) => ({
1639
+ record_id: row.record_id,
1640
+ target_record_id: row.target_record_id,
1641
+ title: row.title,
1642
+ transcript_status: row.transcript_status,
1643
+ updated_at: row.updated_at,
1644
+ }));
1645
+ printResult(output);
1646
+ });
1647
+
1648
+ audio.command('show')
1649
+ .argument('<audioNoteId>')
1650
+ .action((audioNoteId) => {
1651
+ const db = openDb();
1652
+ const row = getRow(db, `SELECT * FROM audio_notes WHERE record_id = ?`, [audioNoteId]);
1653
+ if (!row) throw new Error(`Audio note not found: ${audioNoteId}`);
1654
+ printResult(parseRawRow(row) || row);
1655
+ });
1656
+
1657
+ audio.command('update-transcript')
1658
+ .argument('<audioNoteId>')
1659
+ .requiredOption('--transcript <text>')
1660
+ .option('--summary <text>')
1661
+ .option('--preview <text>')
1662
+ .option('--status <status>', 'pending|processing|done|failed', 'done')
1663
+ .action(async (audioNoteId, options) => {
1664
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1665
+ const row = getRow(db, `SELECT * FROM audio_notes WHERE record_id = ?`, [audioNoteId]);
1666
+ if (!row) throw new Error(`Audio note not found: ${audioNoteId}`);
1667
+ const note = parseRawRow(row);
1668
+ const envelope = outboundAudioNote(config.appNpub, session, groupKeys, {
1669
+ recordId: note.record_id,
1670
+ ownerNpub: note.owner_npub,
1671
+ targetRecordId: note.target_record_id,
1672
+ targetRecordFamilyHash: note.target_record_family_hash,
1673
+ title: note.title,
1674
+ storageObjectId: note.storage_object_id,
1675
+ mimeType: note.mime_type,
1676
+ durationSeconds: note.duration_seconds,
1677
+ sizeBytes: note.size_bytes ?? 0,
1678
+ mediaEncryption: note.media_encryption ?? null,
1679
+ waveformPreview: note.waveform_preview ?? [],
1680
+ transcriptStatus: options.status || 'done',
1681
+ transcriptPreview: options.preview || String(options.transcript).trim().split(/\s+/).slice(0, 12).join(' '),
1682
+ transcript: options.transcript,
1683
+ summary: options.summary ?? note.summary ?? null,
1684
+ targetGroupIds: note.group_ids || [],
1685
+ version: (note.version ?? 1) + 1,
1686
+ previousVersion: note.version ?? 1,
1687
+ writeGroupNpub: note.group_ids?.[0] || null,
1688
+ });
1689
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1690
+ });
1691
+
1692
+ audio.command('transcribe')
1693
+ .argument('[audioNoteId]')
1694
+ .option('--pending', 'transcribe all pending audio notes when no id is provided')
1695
+ .option('--force', 're-transcribe even if transcript already exists')
1696
+ .option('--model <path>', 'path to a local whisper model', '/Users/mini/code/Sov Eng/ideapipe/whisper-models/ggml-medium.bin')
1697
+ .action(async (audioNoteId, options) => {
1698
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1699
+ const rows = audioNoteId
1700
+ ? [getRow(db, `SELECT * FROM audio_notes WHERE record_id = ?`, [audioNoteId])].filter(Boolean)
1701
+ : getRows(
1702
+ db,
1703
+ `SELECT * FROM audio_notes WHERE record_state != 'deleted' AND (? = 1 OR transcript_status = 'pending' OR transcript IS NULL OR transcript = '') ORDER BY updated_at DESC`,
1704
+ [options.force ? 1 : 0],
1705
+ );
1706
+
1707
+ if (rows.length === 0) {
1708
+ printResult([]);
1709
+ return;
1710
+ }
1711
+
1712
+ const output = [];
1713
+ for (const row of rows) {
1714
+ const note = parseRawRow(row);
1715
+ if (!note) continue;
1716
+ if (!options.force && note.transcript_status === 'done' && note.transcript) {
1717
+ output.push({
1718
+ record_id: note.record_id,
1719
+ skipped: true,
1720
+ reason: 'already transcribed',
1721
+ });
1722
+ continue;
1723
+ }
1724
+
1725
+ const audioBuffer = await downloadAndDecryptAudioNote(client, note);
1726
+ const transcript = transcribeAudioBuffer(audioBuffer, note.record_id, note.mime_type, options.model);
1727
+ if (!transcript) {
1728
+ output.push({
1729
+ record_id: note.record_id,
1730
+ skipped: true,
1731
+ reason: 'empty transcript',
1732
+ });
1733
+ continue;
1734
+ }
1735
+
1736
+ const preview = audioTranscriptPreview(transcript);
1737
+ const summary = audioTranscriptSummary(transcript);
1738
+ const envelope = outboundAudioNote(config.appNpub, session, groupKeys, {
1739
+ recordId: note.record_id,
1740
+ ownerNpub: note.owner_npub,
1741
+ targetRecordId: note.target_record_id,
1742
+ targetRecordFamilyHash: note.target_record_family_hash,
1743
+ title: note.title,
1744
+ storageObjectId: note.storage_object_id,
1745
+ mimeType: note.mime_type,
1746
+ durationSeconds: note.duration_seconds,
1747
+ sizeBytes: note.size_bytes ?? 0,
1748
+ mediaEncryption: note.media_encryption ?? null,
1749
+ waveformPreview: note.waveform_preview ?? [],
1750
+ transcriptStatus: 'done',
1751
+ transcriptPreview: preview,
1752
+ transcript,
1753
+ summary,
1754
+ targetGroupIds: note.group_ids || [],
1755
+ version: (note.version ?? 1) + 1,
1756
+ previousVersion: note.version ?? 1,
1757
+ writeGroupNpub: note.group_ids?.[0] || null,
1758
+ });
1759
+
1760
+ await client.syncRecords([envelope]);
1761
+ output.push({
1762
+ record_id: note.record_id,
1763
+ transcript,
1764
+ transcript_preview: preview,
1765
+ summary,
1766
+ });
1767
+ }
1768
+
1769
+ await syncWorkspace({ client, config, session, quiet: true });
1770
+ printResult(output);
1771
+ });
1772
+
1773
+ const reports = program.command('reports');
1774
+ reports.command('list')
1775
+ .option('--type <type>', 'metric|timeseries|table|text')
1776
+ .option('--scope <scopeId>')
1777
+ .option('--surface <surface>')
1778
+ .action((options) => {
1779
+ const db = openDb();
1780
+ const where = [`record_state != 'deleted'`];
1781
+ const params = [];
1782
+ if (options.type) {
1783
+ where.push(`declaration_type = ?`);
1784
+ params.push(normalizeReportType(options.type));
1785
+ }
1786
+ if (options.scope) {
1787
+ where.push(`(scope_id = ? OR scope_l1_id = ? OR scope_l2_id = ? OR scope_l3_id = ? OR scope_l4_id = ? OR scope_l5_id = ?)`);
1788
+ params.push(options.scope, options.scope, options.scope, options.scope, options.scope, options.scope);
1789
+ }
1790
+ if (options.surface) {
1791
+ where.push(`surface = ?`);
1792
+ params.push(options.surface);
1793
+ }
1794
+ const rows = getRows(
1795
+ db,
1796
+ `SELECT * FROM reports WHERE ${where.join(' AND ')} ORDER BY COALESCE(generated_at, updated_at) DESC`,
1797
+ params,
1798
+ );
1799
+ const output = rows.map((row) => reportFromRow(row));
1800
+ if (program.opts().json) console.log(JSON.stringify(output, null, 2));
1801
+ else output.forEach((report) => console.log(`${report.record_id} | ${report.declaration_type} | ${report.title} | ${report.generated_at || report.updated_at || ''} | ${reportSummary(report)}`));
1802
+ });
1803
+
1804
+ reports.command('show')
1805
+ .argument('<reportId>')
1806
+ .action((reportId) => {
1807
+ const db = openDb();
1808
+ const row = findReportRow(db, reportId);
1809
+ if (!row) throw new Error(`Report not found: ${reportId}`);
1810
+ const report = reportFromRow(row);
1811
+ if (program.opts().json) console.log(JSON.stringify(report, null, 2));
1812
+ else console.log(renderReportHuman(report));
1813
+ });
1814
+
1815
+ reports.command('create')
1816
+ .requiredOption('--title <title>')
1817
+ .requiredOption('--type <type>', 'metric|timeseries|table|text')
1818
+ .option('--payload-file <path>', 'JSON file containing the declaration payload')
1819
+ .option('--payload <json>', 'inline JSON payload')
1820
+ .option('--surface <surface>', 'render target surface', 'flightdeck')
1821
+ .option('--group <groupRef>')
1822
+ .option('--scope <scopeId>')
1823
+ .option('--generated-at <iso>')
1824
+ .action(async (options) => {
1825
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1826
+ const primaryGroup = options.group
1827
+ ? findGroupByRef(db, options.group)
1828
+ : requirePrimaryGroup(db);
1829
+ if (!primaryGroup) throw new Error(`Group not found: ${options.group}`);
1830
+ const envelope = outboundReport(config.appNpub, session, groupKeys, {
1831
+ record_id: crypto.randomUUID(),
1832
+ owner_npub: config.workspaceOwnerNpub,
1833
+ title: options.title,
1834
+ declaration_type: normalizeReportType(options.type),
1835
+ payload: resolveReportPayloadInput(options),
1836
+ surface: options.surface ?? 'flightdeck',
1837
+ generated_at: options.generatedAt ?? new Date().toISOString(),
1838
+ ...resolveScopeLinkPatch(db, options.scope),
1839
+ group_ids: [primaryGroup.group_id],
1840
+ version: 0,
1841
+ record_state: 'active',
1842
+ });
1843
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1844
+ });
1845
+
1846
+ reports.command('update')
1847
+ .argument('<reportId>')
1848
+ .option('--title <title>')
1849
+ .option('--type <type>', 'metric|timeseries|table|text')
1850
+ .option('--payload-file <path>', 'JSON file containing the declaration payload')
1851
+ .option('--payload <json>', 'inline JSON payload')
1852
+ .option('--surface <surface>')
1853
+ .option('--group <groupRef>')
1854
+ .option('--scope <scopeId>')
1855
+ .option('--clear-scope')
1856
+ .option('--generated-at <iso>')
1857
+ .action(async (reportId, options) => {
1858
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1859
+ const row = findReportRow(db, reportId);
1860
+ if (!row) throw new Error(`Report not found: ${reportId}`);
1861
+ const report = reportFromRow(row);
1862
+ const patch = {};
1863
+ if (options.title !== undefined) patch.title = options.title;
1864
+ if (options.type !== undefined) patch.declaration_type = normalizeReportType(options.type, report.declaration_type);
1865
+ if (options.surface !== undefined) patch.surface = options.surface;
1866
+ if (options.generatedAt !== undefined) patch.generated_at = options.generatedAt;
1867
+ if (options.payloadFile || options.payload !== undefined) {
1868
+ patch.payload = resolveReportPayloadInput(options, report.payload);
1869
+ }
1870
+ if (options.group !== undefined) {
1871
+ const group = findGroupByRef(db, options.group);
1872
+ if (!group) throw new Error(`Group not found: ${options.group}`);
1873
+ patch.group_ids = [group.group_id];
1874
+ }
1875
+ if (options.scope !== undefined || options.clearScope) {
1876
+ Object.assign(patch, resolveScopeLinkPatch(db, options.scope, options));
1877
+ }
1878
+ const envelope = outboundReport(config.appNpub, session, groupKeys, report, patch);
1879
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1880
+ });
1881
+
1882
+ reports.command('delete')
1883
+ .argument('<reportId>')
1884
+ .action(async (reportId) => {
1885
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1886
+ const row = findReportRow(db, reportId);
1887
+ if (!row) throw new Error(`Report not found: ${reportId}`);
1888
+ const report = reportFromRow(row);
1889
+ const envelope = outboundReport(config.appNpub, session, groupKeys, report, {
1890
+ record_state: 'deleted',
1891
+ });
1892
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1893
+ });
1894
+
1895
+ const scopes = program.command('scopes');
1896
+ scopes.command('create')
1897
+ .requiredOption('--title <title>')
1898
+ .option('--description <description>')
1899
+ .option('--parent <scopeId>')
1900
+ .option('--group <groupRef>')
1901
+ .action(async (options) => {
1902
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1903
+ const { groupIds, shares } = resolveRecordSharesAndGroups({ db, explicitGroupRef: options.group ?? null });
1904
+ const scopeId = crypto.randomUUID();
1905
+ const hierarchy = resolveScopeHierarchy(db, scopeId, { parentId: options.parent ?? null });
1906
+ const envelope = outboundScope(config.appNpub, session, groupKeys, {
1907
+ record_id: scopeId,
1908
+ owner_npub: config.workspaceOwnerNpub,
1909
+ title: options.title,
1910
+ description: options.description ?? '',
1911
+ ...hierarchy,
1912
+ shares,
1913
+ group_ids: groupIds,
1914
+ version: 0,
1915
+ });
1916
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1917
+ });
1918
+
1919
+ scopes.command('list')
1920
+ .action(() => {
1921
+ const db = openDb();
1922
+ const rows = getRows(db, `SELECT * FROM scopes WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
1923
+ const output = rows.map((row) => ({
1924
+ record_id: row.record_id,
1925
+ level: row.level,
1926
+ title: row.title,
1927
+ parent_id: row.parent_id,
1928
+ l1_id: row.l1_id,
1929
+ l2_id: row.l2_id,
1930
+ l3_id: row.l3_id,
1931
+ l4_id: row.l4_id,
1932
+ l5_id: row.l5_id,
1933
+ group_ids: jsonField(row.group_ids_json),
1934
+ updated_at: row.updated_at,
1935
+ }));
1936
+ printResult(output);
1937
+ });
1938
+
1939
+ scopes.command('show')
1940
+ .argument('<scopeId>')
1941
+ .action((scopeId) => {
1942
+ const db = openDb();
1943
+ const row = findScopeRow(db, scopeId);
1944
+ if (!row) throw new Error(`Scope not found: ${scopeId}`);
1945
+ printResult(parseRawRow(row) || row);
1946
+ });
1947
+
1948
+ scopes.command('update')
1949
+ .argument('<scopeId>')
1950
+ .option('--title <title>')
1951
+ .option('--description <description>')
1952
+ .option('--parent <scopeId>')
1953
+ .option('--clear-parent')
1954
+ .option('--group <groupRef>')
1955
+ .action(async (scopeId, options) => {
1956
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1957
+ const row = findScopeRow(db, scopeId);
1958
+ if (!row) throw new Error(`Scope not found: ${scopeId}`);
1959
+ const scope = parseRawRow(row) || row;
1960
+ const patch = {};
1961
+ if (options.title !== undefined) patch.title = options.title;
1962
+ if (options.description !== undefined) patch.description = options.description;
1963
+ if (options.group !== undefined) {
1964
+ const { groupIds, shares } = resolveRecordSharesAndGroups({ db, explicitGroupRef: options.group });
1965
+ patch.group_ids = groupIds;
1966
+ patch.shares = shares;
1967
+ }
1968
+ if (options.parent !== undefined || options.clearParent) {
1969
+ const parentId = options.clearParent ? null : (options.parent ?? scope.parent_id ?? null);
1970
+ Object.assign(patch, resolveScopeHierarchy(db, scopeId, { parentId }));
1971
+ }
1972
+ const envelope = outboundScope(config.appNpub, session, groupKeys, scope, patch);
1973
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
1974
+ });
1975
+
1976
+ const migrate = program.command('migrate');
1977
+ migrate.command('scope-lineage')
1978
+ .description('Migrate legacy scope fields (product/project/deliverable) to generic l1-l5 lineage')
1979
+ .option('--dry-run', 'Show what would be migrated without making changes')
1980
+ .action(async (options) => {
1981
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
1982
+ const dryRun = options.dryRun === true;
1983
+
1984
+ // Migrate scope records (parents first, ordered by depth)
1985
+ const scopeRows = getRows(db, `SELECT * FROM scopes WHERE record_state != 'deleted' ORDER BY level ASC, updated_at ASC`);
1986
+ const scopeMap = new Map();
1987
+ const migratedScopes = [];
1988
+
1989
+ // Build scope map for lineage lookups
1990
+ for (const row of scopeRows) {
1991
+ const scope = parseRawRow(row) || row;
1992
+ scopeMap.set(scope.record_id, scope);
1993
+ }
1994
+
1995
+ // Process scopes by depth (l1 first, then l2, etc.)
1996
+ const depthOrder = ['l1', 'l2', 'l3', 'l4', 'l5'];
1997
+ const normalizedScopes = scopeRows.map((row) => {
1998
+ const scope = parseRawRow(row) || row;
1999
+ const level = normalizeScopeLevel(scope.level) ?? scope.level;
2000
+ return { ...scope, level };
2001
+ });
2002
+ normalizedScopes.sort((a, b) => {
2003
+ const da = depthOrder.indexOf(a.level);
2004
+ const db2 = depthOrder.indexOf(b.level);
2005
+ return (da === -1 ? 99 : da) - (db2 === -1 ? 99 : db2);
2006
+ });
2007
+
2008
+ for (const scope of normalizedScopes) {
2009
+ // Re-derive lineage from parent
2010
+ const parent = scope.parent_id ? scopeMap.get(scope.parent_id) : null;
2011
+ const lineage = computeScopeLineage(scope.record_id, scope.level, parent);
2012
+
2013
+ // Check if anything changed
2014
+ const changed = scope.level !== lineage.level
2015
+ || scope.l1_id !== lineage.l1_id
2016
+ || scope.l2_id !== lineage.l2_id
2017
+ || scope.l3_id !== lineage.l3_id
2018
+ || scope.l4_id !== lineage.l4_id
2019
+ || scope.l5_id !== lineage.l5_id;
2020
+
2021
+ if (!changed) {
2022
+ // Update the map so children can use canonical lineage
2023
+ scopeMap.set(scope.record_id, { ...scope, ...lineage });
2024
+ continue;
2025
+ }
2026
+
2027
+ const updated = { ...scope, ...lineage };
2028
+ scopeMap.set(scope.record_id, updated);
2029
+ migratedScopes.push(updated);
2030
+ }
2031
+
2032
+ if (!dryRun && migratedScopes.length > 0) {
2033
+ const envelopes = migratedScopes.map((scope) =>
2034
+ outboundScope(config.appNpub, session, groupKeys, scope)
2035
+ );
2036
+ for (let i = 0; i < envelopes.length; i += 20) {
2037
+ const batch = envelopes.slice(i, i + 20);
2038
+ await syncRecordsAndRefresh(client, config, session, batch);
2039
+ }
2040
+ }
2041
+
2042
+ // Migrate scoped records (tasks, documents, directories, reports)
2043
+ const scopedTables = [
2044
+ { table: 'tasks', outbound: outboundTask },
2045
+ { table: 'documents', outbound: outboundDocument },
2046
+ { table: 'directories', outbound: outboundDirectory },
2047
+ ];
2048
+ const migratedRecords = { scopes: migratedScopes.length };
2049
+
2050
+ for (const { table, outbound } of scopedTables) {
2051
+ const rows = getRows(db, `SELECT * FROM ${table} WHERE record_state != 'deleted' AND scope_id IS NOT NULL`);
2052
+ const toMigrate = [];
2053
+
2054
+ for (const row of rows) {
2055
+ const record = parseRawRow(row) || row;
2056
+ const scope = scopeMap.get(record.scope_id);
2057
+ if (!scope) continue;
2058
+
2059
+ const tags = buildScopeTags(scope);
2060
+ const changed = record.scope_l1_id !== tags.scope_l1_id
2061
+ || record.scope_l2_id !== tags.scope_l2_id
2062
+ || record.scope_l3_id !== tags.scope_l3_id
2063
+ || record.scope_l4_id !== tags.scope_l4_id
2064
+ || record.scope_l5_id !== tags.scope_l5_id;
2065
+
2066
+ if (changed) {
2067
+ toMigrate.push({ ...record, ...tags });
2068
+ }
2069
+ }
2070
+
2071
+ if (!dryRun && toMigrate.length > 0) {
2072
+ const envelopes = toMigrate.map((record) => outbound(config.appNpub, session, groupKeys, record));
2073
+ for (let i = 0; i < envelopes.length; i += 20) {
2074
+ const batch = envelopes.slice(i, i + 20);
2075
+ await syncRecordsAndRefresh(client, config, session, batch);
2076
+ }
2077
+ }
2078
+
2079
+ migratedRecords[table] = toMigrate.length;
2080
+ }
2081
+
2082
+ // Channels use a destructured outbound signature
2083
+ const channelRows = getRows(db, `SELECT * FROM channels WHERE record_state != 'deleted'`);
2084
+ const channelsToMigrate = [];
2085
+ for (const row of channelRows) {
2086
+ const record = parseRawRow(row) || row;
2087
+ if (!record.scope_id) continue;
2088
+ const scope = scopeMap.get(record.scope_id);
2089
+ if (!scope) continue;
2090
+ const tags = buildScopeTags(scope);
2091
+ const changed = record.scope_l1_id !== tags.scope_l1_id
2092
+ || record.scope_l2_id !== tags.scope_l2_id
2093
+ || record.scope_l3_id !== tags.scope_l3_id
2094
+ || record.scope_l4_id !== tags.scope_l4_id
2095
+ || record.scope_l5_id !== tags.scope_l5_id;
2096
+ if (changed) {
2097
+ channelsToMigrate.push({ ...record, ...tags });
2098
+ }
2099
+ }
2100
+ if (!dryRun && channelsToMigrate.length > 0) {
2101
+ const envelopes = channelsToMigrate.map((record) => outboundChannel(config.appNpub, session, groupKeys, {
2102
+ recordId: record.record_id,
2103
+ ownerNpub: record.owner_npub,
2104
+ title: record.title,
2105
+ groupIds: record.group_ids || [],
2106
+ participantNpubs: record.participant_npubs || [],
2107
+ scopeId: record.scope_id,
2108
+ scopeL1Id: record.scope_l1_id,
2109
+ scopeL2Id: record.scope_l2_id,
2110
+ scopeL3Id: record.scope_l3_id,
2111
+ scopeL4Id: record.scope_l4_id,
2112
+ scopeL5Id: record.scope_l5_id,
2113
+ version: record.version ?? 1,
2114
+ previousVersion: (record.version ?? 1) - 1,
2115
+ }));
2116
+ for (let i = 0; i < envelopes.length; i += 20) {
2117
+ const batch = envelopes.slice(i, i + 20);
2118
+ await syncRecordsAndRefresh(client, config, session, batch);
2119
+ }
2120
+ }
2121
+ migratedRecords.channels = channelsToMigrate.length;
2122
+
2123
+ // Reports use a different outbound signature
2124
+ const reportRows = getRows(db, `SELECT * FROM reports WHERE record_state != 'deleted' AND scope_id IS NOT NULL`);
2125
+ const reportsToMigrate = [];
2126
+ for (const row of reportRows) {
2127
+ const record = parseRawRow(row) || row;
2128
+ const scope = scopeMap.get(record.scope_id);
2129
+ if (!scope) continue;
2130
+ const tags = buildScopeTags(scope);
2131
+ const changed = record.scope_l1_id !== tags.scope_l1_id
2132
+ || record.scope_l2_id !== tags.scope_l2_id
2133
+ || record.scope_l3_id !== tags.scope_l3_id
2134
+ || record.scope_l4_id !== tags.scope_l4_id
2135
+ || record.scope_l5_id !== tags.scope_l5_id;
2136
+ if (changed) {
2137
+ reportsToMigrate.push({ ...record, ...tags, scope_level: scope.level });
2138
+ }
2139
+ }
2140
+ if (!dryRun && reportsToMigrate.length > 0) {
2141
+ const envelopes = reportsToMigrate.map((record) => outboundReport(config.appNpub, session, groupKeys, record));
2142
+ for (let i = 0; i < envelopes.length; i += 20) {
2143
+ const batch = envelopes.slice(i, i + 20);
2144
+ await syncRecordsAndRefresh(client, config, session, batch);
2145
+ }
2146
+ }
2147
+ migratedRecords.reports = reportsToMigrate.length;
2148
+
2149
+ printResult({ dryRun, migrated: migratedRecords });
2150
+ });
2151
+
2152
+ migrate.command('owner-payloads')
2153
+ .description('Re-encrypt owner payloads from real identity to workspace session key')
2154
+ .option('--dry-run', 'Show what would be migrated without making changes')
2155
+ .option('--batch-size <size>', 'Records per sync batch', '20')
2156
+ .action(async (options) => {
2157
+ const { client, config, session, realSession, wsSession, groupKeys } = await refreshClientAndState();
2158
+ const dryRun = options.dryRun === true;
2159
+ const batchSize = Number(options.batchSize) || 20;
2160
+
2161
+ if (!wsSession) {
2162
+ throw new Error('No workspace session key available. Run sync first to bootstrap the workspace key.');
2163
+ }
2164
+
2165
+ console.log(`Migrating owner payloads for workspace ${config.workspaceOwnerNpub}`);
2166
+ console.log(` Real identity: ${realSession.npub}`);
2167
+ console.log(` Workspace key: ${wsSession.npub}`);
2168
+ if (dryRun) console.log(' Mode: DRY RUN (no changes will be written)');
2169
+
2170
+ const families = [
2171
+ 'channel', 'chat_message', 'task', 'document', 'directory',
2172
+ 'comment', 'reaction', 'audio_note', 'scope', 'schedule', 'report',
2173
+ ];
2174
+
2175
+ const migrated = {};
2176
+ let totalMigrated = 0;
2177
+ let totalSkipped = 0;
2178
+ let totalErrors = 0;
2179
+
2180
+ for (const collection of families) {
2181
+ const hash = recordFamilyHash(config.appNpub, collection);
2182
+ const result = await client.fetchRecords(hash);
2183
+ const toMigrate = [];
2184
+
2185
+ for (const record of result.records ?? []) {
2186
+ if (record.signature_npub !== realSession.npub) {
2187
+ totalSkipped++;
2188
+ continue;
2189
+ }
2190
+ const ownerCiphertext = record.owner_payload?.ciphertext ?? record.owner_payload;
2191
+ if (!ownerCiphertext) {
2192
+ totalSkipped++;
2193
+ continue;
2194
+ }
2195
+ let payload;
2196
+ try {
2197
+ payload = decryptRecordPayload(record, realSession, groupKeys);
2198
+ } catch {
2199
+ totalSkipped++;
2200
+ continue;
2201
+ }
2202
+ toMigrate.push({ record, payload });
2203
+ }
2204
+
2205
+ migrated[collection] = toMigrate.length;
2206
+ totalMigrated += toMigrate.length;
2207
+ if (dryRun || toMigrate.length === 0) continue;
2208
+
2209
+ for (let i = 0; i < toMigrate.length; i += batchSize) {
2210
+ const batch = toMigrate.slice(i, i + batchSize);
2211
+ const envelopes = [];
2212
+
2213
+ for (const { record, payload } of batch) {
2214
+ try {
2215
+ const plaintext = JSON.stringify(payload);
2216
+
2217
+ // Re-encrypt group payloads with workspace key as the signer
2218
+ const reEncryptedGroupPayloads = (record.group_payloads || []).map((gp) => {
2219
+ const keyEntry = groupKeys.get(gp.group_id) || groupKeys.get(gp.group_npub);
2220
+ if (!keyEntry?.secret) return gp;
2221
+ return {
2222
+ group_id: gp.group_id,
2223
+ group_npub: gp.group_npub,
2224
+ group_epoch: keyEntry.keyVersion || gp.group_epoch,
2225
+ ciphertext: JSON.stringify({
2226
+ encrypted_by_npub: session.npub,
2227
+ ciphertext: encryptForNpub(session.secret, keyEntry.groupNpub, plaintext),
2228
+ }),
2229
+ write: gp.write ?? true,
2230
+ };
2231
+ });
2232
+
2233
+ envelopes.push({
2234
+ record_id: record.record_id,
2235
+ owner_npub: record.owner_npub,
2236
+ record_family_hash: hash,
2237
+ version: (record.version ?? 1) + 1,
2238
+ previous_version: record.version ?? 1,
2239
+ signature_npub: session.npub,
2240
+ write_group_id: record.write_group_id || undefined,
2241
+ write_group_npub: record.write_group_npub || undefined,
2242
+ owner_payload: encryptOwnerPayload(record.owner_npub, plaintext, session),
2243
+ group_payloads: reEncryptedGroupPayloads,
2244
+ });
2245
+ } catch (err) {
2246
+ console.warn(` Error re-encrypting ${collection} ${record.record_id}: ${err.message}`);
2247
+ totalErrors++;
2248
+ }
2249
+ }
2250
+
2251
+ if (envelopes.length > 0) {
2252
+ try {
2253
+ await client.syncRecords(envelopes);
2254
+ console.log(` Synced ${envelopes.length} ${collection} records (batch ${Math.floor(i / batchSize) + 1})`);
2255
+ } catch (err) {
2256
+ console.error(` Batch sync failed for ${collection}: ${err.message}`);
2257
+ totalErrors += envelopes.length;
2258
+ }
2259
+ }
2260
+ }
2261
+ }
2262
+
2263
+ printResult({
2264
+ dryRun,
2265
+ migrated,
2266
+ totals: { migrated: totalMigrated, skipped: totalSkipped, errors: totalErrors },
2267
+ });
2268
+ });
2269
+
2270
+ const storage = program.command('storage');
2271
+ storage.command('upload')
2272
+ .argument('<filePath>')
2273
+ .option('--group <groupRef...>')
2274
+ .option('--owner <npub>')
2275
+ .option('--markdown', 'print markdown image reference if image')
2276
+ .action(async (filePath, options) => {
2277
+ const { client, db, config } = await refreshClientAndState();
2278
+ const primaryGroup = getPrimaryGroup(db);
2279
+ const accessGroupIds = options.group?.length
2280
+ ? resolveStorageAccessGroupIds(db, options.group)
2281
+ : (primaryGroup?.group_id ? [primaryGroup.group_id] : []);
2282
+ const uploaded = await uploadFileToStorage(client, filePath, {
2283
+ ownerNpub: options.owner || config.workspaceOwnerNpub,
2284
+ accessGroupIds,
2285
+ contentType: detectMimeType(filePath),
2286
+ fileName: defaultFileName(filePath, 'upload'),
2287
+ });
2288
+ const output = {
2289
+ object_id: uploaded.object_id,
2290
+ file_name: uploaded.file_name,
2291
+ content_type: uploaded.content_type,
2292
+ size_bytes: uploaded.size_bytes,
2293
+ sha256_hex: uploaded.sha256_hex,
2294
+ storage_markdown: createStorageMarkdown(uploaded.object_id, uploaded.file_name),
2295
+ };
2296
+ printResult(options.markdown && !program.opts().json ? output.storage_markdown : output);
2297
+ });
2298
+
2299
+ const schedules = program.command('schedules');
2300
+ schedules.command('create')
2301
+ .requiredOption('--title <title>')
2302
+ .option('--description <description>')
2303
+ .option('--start <HH:MM>', 'time_start')
2304
+ .option('--end <HH:MM>', 'time_end')
2305
+ .option('--days <days>', 'comma-separated days e.g. mon,tue,wed')
2306
+ .option('--timezone <tz>', 'IANA timezone', 'Australia/Perth')
2307
+ .option('--assign <groupRef>', 'assigned group')
2308
+ .option('--repeat <repeat>', 'daily|weekly|once', 'daily')
2309
+ .option('--board <groupRef>', 'group reference')
2310
+ .action(async (options) => {
2311
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
2312
+ const assignedGroup = options.assign
2313
+ ? findGroupByRef(db, options.assign)
2314
+ : null;
2315
+ const primaryGroup = assignedGroup || (options.board
2316
+ ? findGroupByRef(db, options.board)
2317
+ : requirePrimaryGroup(db));
2318
+ if (!primaryGroup) throw new Error(`Group not found: ${options.board}`);
2319
+ const groupId = primaryGroup.group_id;
2320
+ const shares = buildDefaultGroupShares(primaryGroup, primaryGroup.name || '');
2321
+ const days = options.days ? options.days.split(',').map((d) => d.trim().toLowerCase()).filter(Boolean) : [];
2322
+ const envelope = outboundSchedule(config.appNpub, session, groupKeys, {
2323
+ record_id: crypto.randomUUID(),
2324
+ owner_npub: config.workspaceOwnerNpub,
2325
+ title: options.title,
2326
+ description: options.description ?? '',
2327
+ time_start: options.start ?? null,
2328
+ time_end: options.end ?? null,
2329
+ days,
2330
+ timezone: options.timezone,
2331
+ assigned_group_id: groupId,
2332
+ active: true,
2333
+ last_run: null,
2334
+ repeat: options.repeat,
2335
+ shares,
2336
+ group_ids: [groupId],
2337
+ board_group_id: groupId,
2338
+ version: 0,
2339
+ });
2340
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2341
+ });
2342
+
2343
+ schedules.command('list')
2344
+ .action(() => {
2345
+ const db = openDb();
2346
+ const rows = getRows(db, `SELECT * FROM schedules WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
2347
+ const output = rows.map((row) => ({
2348
+ record_id: row.record_id,
2349
+ title: row.title,
2350
+ time_start: row.time_start,
2351
+ time_end: row.time_end,
2352
+ days: jsonField(row.days_json),
2353
+ timezone: row.timezone,
2354
+ assigned_group_id: row.assigned_group_id ?? null,
2355
+ active: Boolean(row.active),
2356
+ repeat: row.repeat,
2357
+ last_run: row.last_run,
2358
+ updated_at: row.updated_at,
2359
+ }));
2360
+ if (program.opts().json) console.log(JSON.stringify(output, null, 2));
2361
+ else output.forEach((row) => console.log(`${row.record_id} | ${row.active ? 'active' : 'inactive'} | ${row.title} | ${row.time_start ?? ''}-${row.time_end ?? ''} | ${(row.days || []).join(',')}`));
2362
+ });
2363
+
2364
+ schedules.command('show')
2365
+ .argument('<scheduleId>')
2366
+ .action((scheduleId) => {
2367
+ const db = openDb();
2368
+ const row = getRow(db, `SELECT * FROM schedules WHERE record_id = ?`, [scheduleId]);
2369
+ if (!row) throw new Error(`Schedule not found: ${scheduleId}`);
2370
+ const parsed = parseRawRow(row) || row;
2371
+ if (program.opts().json) console.log(JSON.stringify(parsed, null, 2));
2372
+ else console.log(parsed);
2373
+ });
2374
+
2375
+ schedules.command('update')
2376
+ .argument('<scheduleId>')
2377
+ .option('--title <title>')
2378
+ .option('--description <description>')
2379
+ .option('--start <HH:MM>')
2380
+ .option('--end <HH:MM>')
2381
+ .option('--days <days>', 'comma-separated days')
2382
+ .option('--timezone <tz>')
2383
+ .option('--assign <groupRef>')
2384
+ .option('--clear-assignee')
2385
+ .option('--active <bool>', 'true or false')
2386
+ .option('--repeat <repeat>')
2387
+ .option('--last-run <iso>')
2388
+ .action(async (scheduleId, options) => {
2389
+ let { client, db, config, session, groupKeys } = getClientAndState();
2390
+ await syncWorkspace({ client, config, session, quiet: true });
2391
+ ({ client, db, config, session, groupKeys } = getClientAndState());
2392
+ const row = getRow(db, `SELECT * FROM schedules WHERE record_id = ?`, [scheduleId]);
2393
+ if (!row) throw new Error(`Schedule not found: ${scheduleId}`);
2394
+ const schedule = JSON.parse(row.raw_json);
2395
+ const patch = {};
2396
+ if (options.title !== undefined) patch.title = options.title;
2397
+ if (options.description !== undefined) patch.description = options.description;
2398
+ if (options.start !== undefined) patch.time_start = options.start;
2399
+ if (options.end !== undefined) patch.time_end = options.end;
2400
+ if (options.days !== undefined) patch.days = options.days.split(',').map((d) => d.trim().toLowerCase()).filter(Boolean);
2401
+ if (options.timezone !== undefined) patch.timezone = options.timezone;
2402
+ if (options.assign !== undefined || options.clearAssignee) {
2403
+ const assignedGroup = options.assign ? findGroupByRef(db, options.assign) : null;
2404
+ patch.assigned_group_id = options.clearAssignee
2405
+ ? null
2406
+ : assignedGroup?.group_id ?? null;
2407
+ if (options.assign && !patch.assigned_group_id) throw new Error(`Group not found: ${options.assign}`);
2408
+ if (patch.assigned_group_id) {
2409
+ patch.group_ids = [patch.assigned_group_id];
2410
+ patch.shares = buildDefaultGroupShares(assignedGroup, assignedGroup?.name || '');
2411
+ }
2412
+ }
2413
+ if (options.active !== undefined) patch.active = options.active === 'true';
2414
+ if (options.repeat !== undefined) patch.repeat = options.repeat;
2415
+ if (options.lastRun !== undefined) patch.last_run = options.lastRun;
2416
+ const envelope = outboundSchedule(config.appNpub, session, groupKeys, schedule, patch);
2417
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2418
+ });
2419
+
2420
+ schedules.command('toggle')
2421
+ .argument('<scheduleId>')
2422
+ .action(async (scheduleId) => {
2423
+ let { client, db, config, session, groupKeys } = getClientAndState();
2424
+ await syncWorkspace({ client, config, session, quiet: true });
2425
+ ({ client, db, config, session, groupKeys } = getClientAndState());
2426
+ const row = getRow(db, `SELECT * FROM schedules WHERE record_id = ?`, [scheduleId]);
2427
+ if (!row) throw new Error(`Schedule not found: ${scheduleId}`);
2428
+ const schedule = JSON.parse(row.raw_json);
2429
+ const envelope = outboundSchedule(config.appNpub, session, groupKeys, schedule, {
2430
+ active: !schedule.active,
2431
+ });
2432
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2433
+ });
2434
+
2435
+ // ── Flows ─────────────────────────────────────────────────────
2436
+
2437
+ const flows = program.command('flows');
2438
+ flows.command('list')
2439
+ .option('--scope <scopeId>')
2440
+ .action(() => {
2441
+ const db = openDb();
2442
+ const rows = getRows(db, `SELECT * FROM flows WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
2443
+ const output = rows.map((row) => ({
2444
+ record_id: row.record_id,
2445
+ title: row.title,
2446
+ description: row.description,
2447
+ next_flow_id: row.next_flow_id,
2448
+ scope_id: row.scope_id,
2449
+ updated_at: row.updated_at,
2450
+ }));
2451
+ if (program.opts().json) console.log(JSON.stringify(output, null, 2));
2452
+ else output.forEach((row) => console.log(`${row.record_id} | ${row.title}`));
2453
+ });
2454
+
2455
+ flows.command('get')
2456
+ .argument('<flowId>')
2457
+ .action((flowId) => {
2458
+ const db = openDb();
2459
+ const row = getRow(db, `SELECT * FROM flows WHERE record_id = ?`, [flowId]);
2460
+ if (!row) throw new Error(`Flow not found: ${flowId}`);
2461
+ const parsed = materializeFlowRow(row);
2462
+ if (program.opts().json) console.log(JSON.stringify(parsed, null, 2));
2463
+ else console.log(parsed);
2464
+ });
2465
+
2466
+ flows.command('create')
2467
+ .requiredOption('--title <title>')
2468
+ .option('--description <description>')
2469
+ .option('--steps-json <stepsJson>', 'JSON array of step objects')
2470
+ .option('--next-flow-id <nextFlowId>')
2471
+ .option('--group <groupRef>')
2472
+ .option('--scope <scopeId>')
2473
+ .action(async (options) => {
2474
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
2475
+ const primaryGroup = options.group
2476
+ ? findGroupByRef(db, options.group)
2477
+ : requirePrimaryGroup(db);
2478
+ if (!primaryGroup) throw new Error(`Group not found: ${options.group}`);
2479
+ const recordId = crypto.randomUUID();
2480
+ const groupId = primaryGroup.group_id;
2481
+ const shares = buildDefaultGroupShares(primaryGroup, primaryGroup.name || '');
2482
+ const scopePatch = resolveScopeLinkPatch(db, options.scope);
2483
+ const steps = options.stepsJson ? canonicalizeFlowSteps(parseJsonText(options.stepsJson, 'steps JSON')) : [];
2484
+ const envelope = outboundFlow(config.appNpub, session, groupKeys, {
2485
+ record_id: recordId,
2486
+ owner_npub: config.workspaceOwnerNpub,
2487
+ title: options.title,
2488
+ description: options.description ?? '',
2489
+ steps,
2490
+ next_flow_id: options.nextFlowId ?? null,
2491
+ ...scopePatch,
2492
+ shares,
2493
+ group_ids: [groupId],
2494
+ record_state: 'active',
2495
+ version: 0,
2496
+ });
2497
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2498
+ });
2499
+
2500
+ flows.command('update')
2501
+ .argument('<flowId>')
2502
+ .option('--title <title>')
2503
+ .option('--description <description>')
2504
+ .option('--steps-json <stepsJson>', 'JSON array of step objects')
2505
+ .option('--next-flow-id <nextFlowId>')
2506
+ .action(async (flowId, options) => {
2507
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
2508
+ const row = getRow(db, `SELECT * FROM flows WHERE record_id = ?`, [flowId]);
2509
+ if (!row) throw new Error(`Flow not found: ${flowId}`);
2510
+ const flow = materializeFlowRow(row);
2511
+
2512
+ const patch = {};
2513
+ if (options.title !== undefined) patch.title = options.title;
2514
+ if (options.description !== undefined) patch.description = options.description;
2515
+ if (options.stepsJson !== undefined) patch.steps = canonicalizeFlowSteps(parseJsonText(options.stepsJson, 'steps JSON'));
2516
+ if (options.nextFlowId !== undefined) patch.next_flow_id = options.nextFlowId || null;
2517
+
2518
+ const envelope = outboundFlow(config.appNpub, session, groupKeys, flow, patch);
2519
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2520
+ });
2521
+
2522
+ flows.command('canonicalize')
2523
+ .option('--flow <flowId>', 'limit migration to a single flow')
2524
+ .option('--apply', 'write canonicalized steps back to the workspace')
2525
+ .action(async (options) => {
2526
+ const db = openDb();
2527
+ const rows = options.flow
2528
+ ? [getRow(db, `SELECT * FROM flows WHERE record_id = ?`, [options.flow])].filter(Boolean)
2529
+ : getRows(db, `SELECT * FROM flows WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
2530
+ if (options.flow && rows.length === 0) throw new Error(`Flow not found: ${options.flow}`);
2531
+
2532
+ const plans = rows.map((row) => {
2533
+ const flow = materializeFlowRow(row);
2534
+ const canonicalSteps = canonicalizeFlowSteps(flow.steps);
2535
+ const changed = JSON.stringify(canonicalSteps) !== JSON.stringify(flow.steps || []);
2536
+ const removedFields = [...new Set((flow.steps || []).flatMap((step) => {
2537
+ const canonical = canonicalizeFlowStep(step);
2538
+ const beforeKeys = Object.keys(step || {});
2539
+ const afterKeys = new Set(Object.keys(canonical || {}));
2540
+ return beforeKeys.filter((key) => !afterKeys.has(key));
2541
+ }))].sort();
2542
+ return {
2543
+ flow,
2544
+ canonicalSteps,
2545
+ changed,
2546
+ summary: {
2547
+ record_id: flow.record_id,
2548
+ title: flow.title,
2549
+ changed,
2550
+ step_count: canonicalSteps.length,
2551
+ removed_fields: removedFields,
2552
+ },
2553
+ };
2554
+ });
2555
+
2556
+ if (!options.apply) {
2557
+ printResult(plans.map((plan) => plan.summary));
2558
+ return;
2559
+ }
2560
+
2561
+ const changedPlans = plans.filter((plan) => plan.changed);
2562
+ if (changedPlans.length === 0) {
2563
+ printResult(plans.map((plan) => plan.summary));
2564
+ return;
2565
+ }
2566
+
2567
+ const { client, config, session, groupKeys } = await refreshClientAndState();
2568
+ const envelopes = changedPlans.map(({ flow, canonicalSteps }) => (
2569
+ outboundFlow(config.appNpub, session, groupKeys, flow, { steps: canonicalSteps })
2570
+ ));
2571
+ printResult(await syncRecordsAndRefresh(client, config, session, await Promise.all(envelopes)));
2572
+ });
2573
+
2574
+ flows.command('delete')
2575
+ .argument('<flowId>')
2576
+ .action(async (flowId) => {
2577
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
2578
+ const row = getRow(db, `SELECT * FROM flows WHERE record_id = ?`, [flowId]);
2579
+ if (!row) throw new Error(`Flow not found: ${flowId}`);
2580
+ const flow = materializeFlowRow(row);
2581
+ const envelope = outboundFlow(config.appNpub, session, groupKeys, flow, {
2582
+ record_state: 'deleted',
2583
+ });
2584
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2585
+ });
2586
+
2587
+ // ── Approvals ─────────────────────────────────────────────────
2588
+
2589
+ const approvals = program.command('approvals');
2590
+ approvals.command('list')
2591
+ .option('--status <status>', 'filter by status (pending, approved, rejected, needs_revision)')
2592
+ .option('--scope <scopeId>')
2593
+ .action((options) => {
2594
+ const db = openDb();
2595
+ const rows = options.status
2596
+ ? getRows(db, `SELECT * FROM approvals WHERE status = ? AND record_state != 'deleted' ORDER BY updated_at DESC`, [options.status])
2597
+ : getRows(db, `SELECT * FROM approvals WHERE record_state != 'deleted' ORDER BY updated_at DESC`);
2598
+ const output = rows.map((row) => ({
2599
+ record_id: row.record_id,
2600
+ title: row.title,
2601
+ status: row.status,
2602
+ flow_id: row.flow_id,
2603
+ flow_step: row.flow_step,
2604
+ approval_mode: row.approval_mode,
2605
+ confidence_score: row.confidence_score,
2606
+ updated_at: row.updated_at,
2607
+ }));
2608
+ if (program.opts().json) console.log(JSON.stringify(output, null, 2));
2609
+ else output.forEach((row) => console.log(`${row.record_id} | ${row.status} | ${row.title}`));
2610
+ });
2611
+
2612
+ approvals.command('get')
2613
+ .argument('<approvalId>')
2614
+ .action((approvalId) => {
2615
+ const db = openDb();
2616
+ const row = getRow(db, `SELECT * FROM approvals WHERE record_id = ?`, [approvalId]);
2617
+ if (!row) throw new Error(`Approval not found: ${approvalId}`);
2618
+ const parsed = parseRawRow(row) || row;
2619
+ if (!parsed.task_ids) parsed.task_ids = jsonField(row.task_ids_json, []);
2620
+ if (!parsed.artifact_refs) parsed.artifact_refs = jsonField(row.artifact_refs_json, []);
2621
+ if (program.opts().json) console.log(JSON.stringify(parsed, null, 2));
2622
+ else console.log(parsed);
2623
+ });
2624
+
2625
+ approvals.command('create')
2626
+ .requiredOption('--title <title>')
2627
+ .option('--flow-id <flowId>')
2628
+ .option('--flow-run-id <flowRunId>')
2629
+ .option('--flow-step <step>')
2630
+ .option('--task-ids <taskIds...>', 'task IDs this approval gates')
2631
+ .option('--approver-whitelist <npubs...>', 'npub whitelist allowed to approve')
2632
+ .option('--brief <brief>')
2633
+ .option('--confidence <score>', 'confidence score 0-1')
2634
+ .option('--approval-mode <mode>', 'manual or agent', 'manual')
2635
+ .option('--artifact-refs-json <json>', 'JSON array of artifact reference objects')
2636
+ .option('--group <groupRef>')
2637
+ .option('--scope <scopeId>')
2638
+ .action(async (options) => {
2639
+ const { client, db, config, session, groupKeys } = await refreshClientAndState();
2640
+ const recordId = crypto.randomUUID();
2641
+ const inherited = resolveCreateInheritedRecord(db, {
2642
+ flowId: options.flowId ?? null,
2643
+ taskIds: options.taskIds ?? [],
2644
+ scopeRef: options.scope ?? null,
2645
+ });
2646
+ const { groupIds, shares } = resolveRecordSharesAndGroups({
2647
+ db,
2648
+ explicitGroupRef: options.group,
2649
+ inherited,
2650
+ });
2651
+ const scopePatch = options.scope ? resolveScopeLinkPatch(db, options.scope) : scopePatchFromRecord(inherited);
2652
+ const artifactRefs = options.artifactRefsJson ? parseJsonText(options.artifactRefsJson, 'artifact refs JSON') : [];
2653
+ const envelope = outboundApproval(config.appNpub, session, groupKeys, {
2654
+ record_id: recordId,
2655
+ owner_npub: config.workspaceOwnerNpub,
2656
+ title: options.title,
2657
+ flow_id: options.flowId ?? null,
2658
+ flow_run_id: options.flowRunId ?? null,
2659
+ flow_step: maybeParseInt(options.flowStep),
2660
+ task_ids: options.taskIds ?? [],
2661
+ status: 'pending',
2662
+ approval_mode: options.approvalMode ?? 'manual',
2663
+ approver_whitelist: options.approverWhitelist ?? [],
2664
+ brief: options.brief ?? '',
2665
+ confidence_score: options.confidence != null ? Number(options.confidence) : null,
2666
+ approved_by: null,
2667
+ approved_at: null,
2668
+ decision_note: null,
2669
+ agent_review_by: null,
2670
+ agent_review_note: null,
2671
+ artifact_refs: artifactRefs,
2672
+ revision_task_id: null,
2673
+ ...scopePatch,
2674
+ shares,
2675
+ group_ids: groupIds,
2676
+ record_state: 'active',
2677
+ version: 0,
2678
+ });
2679
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2680
+ });
2681
+
2682
+ approvals.command('approve')
2683
+ .argument('<approvalId>')
2684
+ .option('--note <note>', 'decision note')
2685
+ .action(async (approvalId, options) => {
2686
+ let { client, db, config, session, groupKeys } = getClientAndState();
2687
+ await syncWorkspace({ client, config, session, quiet: true });
2688
+ ({ client, db, config, session, groupKeys } = getClientAndState());
2689
+ const row = getRow(db, `SELECT * FROM approvals WHERE record_id = ?`, [approvalId]);
2690
+ if (!row) throw new Error(`Approval not found: ${approvalId}`);
2691
+ const approval = JSON.parse(row.raw_json);
2692
+ if (!approval.group_ids) approval.group_ids = jsonField(row.group_ids_json, []);
2693
+ const envelope = outboundApproval(config.appNpub, session, groupKeys, approval, {
2694
+ status: 'approved',
2695
+ approved_by: session.npub,
2696
+ approved_at: new Date().toISOString(),
2697
+ decision_note: options.note ?? null,
2698
+ });
2699
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2700
+ });
2701
+
2702
+ approvals.command('reject')
2703
+ .argument('<approvalId>')
2704
+ .option('--note <note>', 'decision note')
2705
+ .action(async (approvalId, options) => {
2706
+ let { client, db, config, session, groupKeys } = getClientAndState();
2707
+ await syncWorkspace({ client, config, session, quiet: true });
2708
+ ({ client, db, config, session, groupKeys } = getClientAndState());
2709
+ const row = getRow(db, `SELECT * FROM approvals WHERE record_id = ?`, [approvalId]);
2710
+ if (!row) throw new Error(`Approval not found: ${approvalId}`);
2711
+ const approval = JSON.parse(row.raw_json);
2712
+ if (!approval.group_ids) approval.group_ids = jsonField(row.group_ids_json, []);
2713
+ const envelope = outboundApproval(config.appNpub, session, groupKeys, approval, {
2714
+ status: 'rejected',
2715
+ approved_by: session.npub,
2716
+ approved_at: new Date().toISOString(),
2717
+ decision_note: options.note ?? null,
2718
+ });
2719
+ printResult(await syncRecordsAndRefresh(client, config, session, [envelope]));
2720
+ });
2721
+
2722
+ approvals.command('improve')
2723
+ .argument('<approvalId>')
2724
+ .option('--note <note>', 'revision note for the agent')
2725
+ .action(async (approvalId, options) => {
2726
+ let { client, db, config, session, groupKeys } = getClientAndState();
2727
+ await syncWorkspace({ client, config, session, quiet: true });
2728
+ ({ client, db, config, session, groupKeys } = getClientAndState());
2729
+ const row = getRow(db, `SELECT * FROM approvals WHERE record_id = ?`, [approvalId]);
2730
+ if (!row) throw new Error(`Approval not found: ${approvalId}`);
2731
+ const approval = JSON.parse(row.raw_json);
2732
+ if (!approval.group_ids) approval.group_ids = jsonField(row.group_ids_json, []);
2733
+ if (!approval.shares) approval.shares = jsonField(row.shares_json, []);
2734
+ // Create a revision task for the agent to pick up
2735
+ const revisionTaskId = crypto.randomUUID();
2736
+ const { groupIds, shares } = resolveRecordSharesAndGroups({ db, inherited: approval });
2737
+ const boardGroupId = groupIds[0] ?? null;
2738
+ const taskEnvelope = outboundTask(config.appNpub, session, groupKeys, {
2739
+ record_id: revisionTaskId,
2740
+ owner_npub: config.workspaceOwnerNpub,
2741
+ title: `Revision: ${approval.title || 'approval'}`,
2742
+ description: options.note ?? 'Revision requested',
2743
+ state: 'ready',
2744
+ priority: 'sand',
2745
+ flow_id: approval.flow_id ?? null,
2746
+ flow_run_id: approval.flow_run_id ?? null,
2747
+ flow_step: approval.flow_step ?? null,
2748
+ board_group_id: boardGroupId,
2749
+ ...scopePatchFromRecord(approval),
2750
+ shares,
2751
+ group_ids: groupIds,
2752
+ version: 0,
2753
+ });
2754
+ const approvalEnvelope = outboundApproval(config.appNpub, session, groupKeys, approval, {
2755
+ status: 'needs_revision',
2756
+ approved_by: session.npub,
2757
+ approved_at: new Date().toISOString(),
2758
+ decision_note: options.note ?? null,
2759
+ revision_task_id: revisionTaskId,
2760
+ });
2761
+ printResult(await syncRecordsAndRefresh(client, config, session, [taskEnvelope, approvalEnvelope]));
2762
+ });
2763
+
2764
+ program.parseAsync(process.argv).catch((error) => {
2765
+ console.error(error instanceof Error ? error.message : String(error));
2766
+ process.exit(1);
2767
+ });