@lifestreamdynamics/vault-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +759 -0
- package/dist/client.d.ts +12 -0
- package/dist/client.js +79 -0
- package/dist/commands/admin.d.ts +2 -0
- package/dist/commands/admin.js +263 -0
- package/dist/commands/audit.d.ts +2 -0
- package/dist/commands/audit.js +119 -0
- package/dist/commands/auth.d.ts +2 -0
- package/dist/commands/auth.js +256 -0
- package/dist/commands/config.d.ts +2 -0
- package/dist/commands/config.js +130 -0
- package/dist/commands/connectors.d.ts +2 -0
- package/dist/commands/connectors.js +224 -0
- package/dist/commands/docs.d.ts +2 -0
- package/dist/commands/docs.js +194 -0
- package/dist/commands/hooks.d.ts +2 -0
- package/dist/commands/hooks.js +159 -0
- package/dist/commands/keys.d.ts +2 -0
- package/dist/commands/keys.js +165 -0
- package/dist/commands/publish.d.ts +2 -0
- package/dist/commands/publish.js +138 -0
- package/dist/commands/search.d.ts +2 -0
- package/dist/commands/search.js +61 -0
- package/dist/commands/shares.d.ts +2 -0
- package/dist/commands/shares.js +121 -0
- package/dist/commands/subscription.d.ts +2 -0
- package/dist/commands/subscription.js +166 -0
- package/dist/commands/sync.d.ts +2 -0
- package/dist/commands/sync.js +565 -0
- package/dist/commands/teams.d.ts +2 -0
- package/dist/commands/teams.js +322 -0
- package/dist/commands/user.d.ts +2 -0
- package/dist/commands/user.js +48 -0
- package/dist/commands/vaults.d.ts +2 -0
- package/dist/commands/vaults.js +157 -0
- package/dist/commands/versions.d.ts +2 -0
- package/dist/commands/versions.js +219 -0
- package/dist/commands/webhooks.d.ts +2 -0
- package/dist/commands/webhooks.js +181 -0
- package/dist/config.d.ts +24 -0
- package/dist/config.js +88 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +63 -0
- package/dist/lib/credential-manager.d.ts +48 -0
- package/dist/lib/credential-manager.js +101 -0
- package/dist/lib/encrypted-config.d.ts +20 -0
- package/dist/lib/encrypted-config.js +102 -0
- package/dist/lib/keychain.d.ts +8 -0
- package/dist/lib/keychain.js +82 -0
- package/dist/lib/migration.d.ts +31 -0
- package/dist/lib/migration.js +92 -0
- package/dist/lib/profiles.d.ts +43 -0
- package/dist/lib/profiles.js +104 -0
- package/dist/sync/config.d.ts +32 -0
- package/dist/sync/config.js +100 -0
- package/dist/sync/conflict.d.ts +30 -0
- package/dist/sync/conflict.js +60 -0
- package/dist/sync/daemon-worker.d.ts +1 -0
- package/dist/sync/daemon-worker.js +128 -0
- package/dist/sync/daemon.d.ts +44 -0
- package/dist/sync/daemon.js +174 -0
- package/dist/sync/diff.d.ts +43 -0
- package/dist/sync/diff.js +166 -0
- package/dist/sync/engine.d.ts +41 -0
- package/dist/sync/engine.js +233 -0
- package/dist/sync/ignore.d.ts +16 -0
- package/dist/sync/ignore.js +72 -0
- package/dist/sync/remote-poller.d.ts +23 -0
- package/dist/sync/remote-poller.js +145 -0
- package/dist/sync/state.d.ts +32 -0
- package/dist/sync/state.js +98 -0
- package/dist/sync/types.d.ts +68 -0
- package/dist/sync/types.js +4 -0
- package/dist/sync/watcher.d.ts +23 -0
- package/dist/sync/watcher.js +207 -0
- package/dist/utils/flags.d.ts +18 -0
- package/dist/utils/flags.js +31 -0
- package/dist/utils/format.d.ts +2 -0
- package/dist/utils/format.js +22 -0
- package/dist/utils/output.d.ts +87 -0
- package/dist/utils/output.js +229 -0
- package/package.json +62 -0
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { LifestreamVaultClient } from '@lifestreamdynamics/vault-sdk';
|
|
2
|
+
/**
|
|
3
|
+
* Create an SDK client from CLI configuration.
|
|
4
|
+
* Supports both API key and JWT (access + refresh token) authentication.
|
|
5
|
+
* When using JWT tokens, auto-refresh is enabled and new tokens are persisted.
|
|
6
|
+
*/
|
|
7
|
+
export declare function getClient(): LifestreamVaultClient;
|
|
8
|
+
/**
|
|
9
|
+
* Create an SDK client from async config resolution (secure credential manager).
|
|
10
|
+
* This resolves credentials from keychain/encrypted storage.
|
|
11
|
+
*/
|
|
12
|
+
export declare function getClientAsync(): Promise<LifestreamVaultClient>;
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { LifestreamVaultClient } from '@lifestreamdynamics/vault-sdk';
|
|
2
|
+
import { loadConfig, loadConfigAsync, getCredentialManager } from './config.js';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
/**
|
|
5
|
+
* Create an SDK client from CLI configuration.
|
|
6
|
+
* Supports both API key and JWT (access + refresh token) authentication.
|
|
7
|
+
* When using JWT tokens, auto-refresh is enabled and new tokens are persisted.
|
|
8
|
+
*/
|
|
9
|
+
export function getClient() {
|
|
10
|
+
const config = loadConfig();
|
|
11
|
+
// JWT auth mode: use access + refresh tokens
|
|
12
|
+
if (config.accessToken) {
|
|
13
|
+
return new LifestreamVaultClient({
|
|
14
|
+
baseUrl: config.apiUrl,
|
|
15
|
+
accessToken: config.accessToken,
|
|
16
|
+
refreshToken: config.refreshToken,
|
|
17
|
+
onTokenRefresh: async (tokens) => {
|
|
18
|
+
// Persist refreshed tokens to secure storage
|
|
19
|
+
try {
|
|
20
|
+
const cm = getCredentialManager();
|
|
21
|
+
await cm.saveCredentials({
|
|
22
|
+
accessToken: tokens.accessToken,
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
catch {
|
|
26
|
+
// Best-effort persistence; don't break the request
|
|
27
|
+
}
|
|
28
|
+
},
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
// API key auth mode
|
|
32
|
+
if (config.apiKey) {
|
|
33
|
+
return new LifestreamVaultClient({
|
|
34
|
+
baseUrl: config.apiUrl,
|
|
35
|
+
apiKey: config.apiKey,
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
console.error(chalk.red('No credentials configured.'));
|
|
39
|
+
console.error('Run: lsvault auth login --api-key <key>');
|
|
40
|
+
console.error(' or: lsvault auth login --email <email>');
|
|
41
|
+
console.error('Or set LSVAULT_API_KEY environment variable');
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Create an SDK client from async config resolution (secure credential manager).
|
|
46
|
+
* This resolves credentials from keychain/encrypted storage.
|
|
47
|
+
*/
|
|
48
|
+
export async function getClientAsync() {
|
|
49
|
+
const config = await loadConfigAsync();
|
|
50
|
+
if (config.accessToken) {
|
|
51
|
+
return new LifestreamVaultClient({
|
|
52
|
+
baseUrl: config.apiUrl,
|
|
53
|
+
accessToken: config.accessToken,
|
|
54
|
+
refreshToken: config.refreshToken,
|
|
55
|
+
onTokenRefresh: async (tokens) => {
|
|
56
|
+
try {
|
|
57
|
+
const cm = getCredentialManager();
|
|
58
|
+
await cm.saveCredentials({
|
|
59
|
+
accessToken: tokens.accessToken,
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
// Best-effort
|
|
64
|
+
}
|
|
65
|
+
},
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
if (config.apiKey) {
|
|
69
|
+
return new LifestreamVaultClient({
|
|
70
|
+
baseUrl: config.apiUrl,
|
|
71
|
+
apiKey: config.apiKey,
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
console.error(chalk.red('No credentials configured.'));
|
|
75
|
+
console.error('Run: lsvault auth login --api-key <key>');
|
|
76
|
+
console.error(' or: lsvault auth login --email <email>');
|
|
77
|
+
console.error('Or set LSVAULT_API_KEY environment variable');
|
|
78
|
+
process.exit(1);
|
|
79
|
+
}
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { getClient } from '../client.js';
|
|
3
|
+
import { addGlobalFlags, resolveFlags } from '../utils/flags.js';
|
|
4
|
+
import { createOutput, handleError } from '../utils/output.js';
|
|
5
|
+
import { formatBytes, formatUptime } from '../utils/format.js';
|
|
6
|
+
export function registerAdminCommands(program) {
|
|
7
|
+
const admin = program.command('admin').description('System administration (requires admin role)');
|
|
8
|
+
// ── System Stats ────────────────────────────────────────────────────
|
|
9
|
+
const stats = admin.command('stats').description('View system-wide statistics and metrics');
|
|
10
|
+
const statsAction = async (_opts) => {
|
|
11
|
+
const flags = resolveFlags(_opts);
|
|
12
|
+
const out = createOutput(flags);
|
|
13
|
+
out.startSpinner('Fetching system stats...');
|
|
14
|
+
try {
|
|
15
|
+
const client = getClient();
|
|
16
|
+
const data = await client.admin.getStats();
|
|
17
|
+
out.stopSpinner();
|
|
18
|
+
out.record({
|
|
19
|
+
totalUsers: data.totalUsers,
|
|
20
|
+
activeUsers: data.activeUsers,
|
|
21
|
+
totalVaults: data.totalVaults,
|
|
22
|
+
totalDocuments: data.totalDocuments,
|
|
23
|
+
totalStorageBytes: flags.output === 'text' ? formatBytes(data.totalStorageBytes) : data.totalStorageBytes,
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
catch (err) {
|
|
27
|
+
handleError(out, err, 'Failed to fetch system stats');
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
addGlobalFlags(stats.command('overview')
|
|
31
|
+
.description('Show system-wide statistics'))
|
|
32
|
+
.action(statsAction);
|
|
33
|
+
addGlobalFlags(stats)
|
|
34
|
+
.action(statsAction);
|
|
35
|
+
addGlobalFlags(stats.command('timeseries')
|
|
36
|
+
.description('Show timeseries data for a metric')
|
|
37
|
+
.requiredOption('--metric <metric>', 'Metric: signups, documents, or storage')
|
|
38
|
+
.requiredOption('--period <period>', 'Period: 7d, 30d, or 90d'))
|
|
39
|
+
.action(async (_opts) => {
|
|
40
|
+
const flags = resolveFlags(_opts);
|
|
41
|
+
const out = createOutput(flags);
|
|
42
|
+
out.startSpinner('Fetching timeseries data...');
|
|
43
|
+
try {
|
|
44
|
+
const client = getClient();
|
|
45
|
+
const data = await client.admin.getTimeseries(String(_opts.metric), String(_opts.period));
|
|
46
|
+
out.stopSpinner();
|
|
47
|
+
if (flags.output === 'text') {
|
|
48
|
+
out.status(`${chalk.dim('Metric:')} ${data.metric} ${chalk.dim('Period:')} ${data.period}`);
|
|
49
|
+
}
|
|
50
|
+
out.list(data.data.map((point) => ({
|
|
51
|
+
date: point.date,
|
|
52
|
+
value: point.value,
|
|
53
|
+
})), {
|
|
54
|
+
emptyMessage: 'No data points found.',
|
|
55
|
+
columns: [
|
|
56
|
+
{ key: 'date', header: 'Date' },
|
|
57
|
+
{ key: 'value', header: 'Value' },
|
|
58
|
+
],
|
|
59
|
+
textFn: (p) => {
|
|
60
|
+
const bar = '#'.repeat(Math.min(Number(p.value), 50));
|
|
61
|
+
return ` ${String(p.date)} ${String(p.value).padStart(6)} ${chalk.cyan(bar)}`;
|
|
62
|
+
},
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
catch (err) {
|
|
66
|
+
handleError(out, err, 'Failed to fetch timeseries data');
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
// ── User Management ─────────────────────────────────────────────────
|
|
70
|
+
const users = admin.command('users').description('List, inspect, and update user accounts');
|
|
71
|
+
addGlobalFlags(users.command('list')
|
|
72
|
+
.description('List users')
|
|
73
|
+
.option('--page <number>', 'Page number', parseInt)
|
|
74
|
+
.option('--limit <number>', 'Results per page', parseInt)
|
|
75
|
+
.option('--search <query>', 'Search by name or email')
|
|
76
|
+
.option('--tier <tier>', 'Filter by tier (free, pro, business)'))
|
|
77
|
+
.action(async (_opts) => {
|
|
78
|
+
const flags = resolveFlags(_opts);
|
|
79
|
+
const out = createOutput(flags);
|
|
80
|
+
out.startSpinner('Fetching users...');
|
|
81
|
+
try {
|
|
82
|
+
const client = getClient();
|
|
83
|
+
const result = await client.admin.listUsers({
|
|
84
|
+
page: _opts.page,
|
|
85
|
+
limit: _opts.limit,
|
|
86
|
+
search: _opts.search,
|
|
87
|
+
tier: _opts.tier,
|
|
88
|
+
});
|
|
89
|
+
out.stopSpinner();
|
|
90
|
+
if (flags.output === 'text') {
|
|
91
|
+
out.status(`${chalk.dim('Total:')} ${result.total} ${chalk.dim('Page:')} ${result.page} ${chalk.dim('Limit:')} ${result.limit}`);
|
|
92
|
+
}
|
|
93
|
+
out.list(result.users.map(u => ({
|
|
94
|
+
email: u.email,
|
|
95
|
+
id: u.id,
|
|
96
|
+
name: u.name || '',
|
|
97
|
+
role: u.role,
|
|
98
|
+
subscriptionTier: u.subscriptionTier,
|
|
99
|
+
isActive: u.isActive,
|
|
100
|
+
})), {
|
|
101
|
+
emptyMessage: 'No users found.',
|
|
102
|
+
columns: [
|
|
103
|
+
{ key: 'email', header: 'Email' },
|
|
104
|
+
{ key: 'name', header: 'Name' },
|
|
105
|
+
{ key: 'role', header: 'Role' },
|
|
106
|
+
{ key: 'subscriptionTier', header: 'Tier' },
|
|
107
|
+
{ key: 'isActive', header: 'Active' },
|
|
108
|
+
],
|
|
109
|
+
textFn: (u) => {
|
|
110
|
+
const active = u.isActive ? chalk.green('active') : chalk.red('inactive');
|
|
111
|
+
const name = u.name || chalk.dim('no name');
|
|
112
|
+
return ` ${chalk.cyan(String(u.email))} ${chalk.dim(`(${String(u.id)})`)} -- ${name} -- ${chalk.magenta(String(u.role))} -- ${String(u.subscriptionTier)} -- ${active}`;
|
|
113
|
+
},
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
catch (err) {
|
|
117
|
+
handleError(out, err, 'Failed to fetch users');
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
addGlobalFlags(users.command('get')
|
|
121
|
+
.description('Get user details')
|
|
122
|
+
.argument('<userId>', 'User ID'))
|
|
123
|
+
.action(async (userId, _opts) => {
|
|
124
|
+
const flags = resolveFlags(_opts);
|
|
125
|
+
const out = createOutput(flags);
|
|
126
|
+
out.startSpinner('Fetching user...');
|
|
127
|
+
try {
|
|
128
|
+
const client = getClient();
|
|
129
|
+
const user = await client.admin.getUser(userId);
|
|
130
|
+
out.stopSpinner();
|
|
131
|
+
out.record({
|
|
132
|
+
email: user.email,
|
|
133
|
+
id: user.id,
|
|
134
|
+
name: user.name,
|
|
135
|
+
role: user.role,
|
|
136
|
+
isActive: user.isActive,
|
|
137
|
+
subscriptionTier: user.subscriptionTier,
|
|
138
|
+
vaultCount: user.vaultCount,
|
|
139
|
+
documentCount: user.documentCount,
|
|
140
|
+
storageBytes: flags.output === 'text' ? formatBytes(user.storageBytes) : user.storageBytes,
|
|
141
|
+
createdAt: user.createdAt,
|
|
142
|
+
updatedAt: user.updatedAt,
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
catch (err) {
|
|
146
|
+
handleError(out, err, 'Failed to fetch user');
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
addGlobalFlags(users.command('update')
|
|
150
|
+
.description('Update a user')
|
|
151
|
+
.argument('<userId>', 'User ID')
|
|
152
|
+
.option('--role <role>', 'Set role (user or admin)')
|
|
153
|
+
.option('--active', 'Set user as active')
|
|
154
|
+
.option('--inactive', 'Set user as inactive'))
|
|
155
|
+
.action(async (userId, _opts) => {
|
|
156
|
+
const flags = resolveFlags(_opts);
|
|
157
|
+
const out = createOutput(flags);
|
|
158
|
+
const params = {};
|
|
159
|
+
if (_opts.role)
|
|
160
|
+
params.role = _opts.role;
|
|
161
|
+
if (_opts.active)
|
|
162
|
+
params.isActive = true;
|
|
163
|
+
if (_opts.inactive)
|
|
164
|
+
params.isActive = false;
|
|
165
|
+
if (Object.keys(params).length === 0) {
|
|
166
|
+
out.error('No updates specified. Use --role, --active, or --inactive.');
|
|
167
|
+
process.exitCode = 2;
|
|
168
|
+
return;
|
|
169
|
+
}
|
|
170
|
+
out.startSpinner('Updating user...');
|
|
171
|
+
try {
|
|
172
|
+
const client = getClient();
|
|
173
|
+
const updated = await client.admin.updateUser(userId, params);
|
|
174
|
+
out.success(`User updated: ${chalk.cyan(updated.email)} -- ${chalk.magenta(updated.role)} -- ${updated.isActive ? chalk.green('active') : chalk.red('inactive')}`, {
|
|
175
|
+
email: updated.email,
|
|
176
|
+
role: updated.role,
|
|
177
|
+
isActive: updated.isActive,
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
catch (err) {
|
|
181
|
+
handleError(out, err, 'Failed to update user');
|
|
182
|
+
}
|
|
183
|
+
});
|
|
184
|
+
// ── Activity ────────────────────────────────────────────────────────
|
|
185
|
+
addGlobalFlags(admin.command('activity')
|
|
186
|
+
.description('Show recent system-wide activity events')
|
|
187
|
+
.option('--limit <number>', 'Number of entries (default: 20)', parseInt))
|
|
188
|
+
.action(async (_opts) => {
|
|
189
|
+
const flags = resolveFlags(_opts);
|
|
190
|
+
const out = createOutput(flags);
|
|
191
|
+
out.startSpinner('Fetching activity...');
|
|
192
|
+
try {
|
|
193
|
+
const client = getClient();
|
|
194
|
+
const activity = await client.admin.getActivity(_opts.limit);
|
|
195
|
+
out.stopSpinner();
|
|
196
|
+
out.list(activity.map(a => ({
|
|
197
|
+
createdAt: a.createdAt,
|
|
198
|
+
type: a.type,
|
|
199
|
+
userId: a.userId,
|
|
200
|
+
path: a.path || null,
|
|
201
|
+
})), {
|
|
202
|
+
emptyMessage: 'No recent activity.',
|
|
203
|
+
columns: [
|
|
204
|
+
{ key: 'createdAt', header: 'Time' },
|
|
205
|
+
{ key: 'type', header: 'Type' },
|
|
206
|
+
{ key: 'userId', header: 'User' },
|
|
207
|
+
{ key: 'path', header: 'Path' },
|
|
208
|
+
],
|
|
209
|
+
textFn: (a) => {
|
|
210
|
+
const pathStr = a.path || chalk.dim('n/a');
|
|
211
|
+
return ` ${String(a.createdAt)} ${chalk.magenta(String(a.type).padEnd(8))} ${chalk.dim(String(a.userId))} ${pathStr}`;
|
|
212
|
+
},
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
catch (err) {
|
|
216
|
+
handleError(out, err, 'Failed to fetch activity');
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
// ── Subscriptions ───────────────────────────────────────────────────
|
|
220
|
+
addGlobalFlags(admin.command('subscriptions')
|
|
221
|
+
.description('Show subscription tier distribution across all users'))
|
|
222
|
+
.action(async (_opts) => {
|
|
223
|
+
const flags = resolveFlags(_opts);
|
|
224
|
+
const out = createOutput(flags);
|
|
225
|
+
out.startSpinner('Fetching subscription summary...');
|
|
226
|
+
try {
|
|
227
|
+
const client = getClient();
|
|
228
|
+
const summary = await client.admin.getSubscriptionSummary();
|
|
229
|
+
out.stopSpinner();
|
|
230
|
+
out.record({
|
|
231
|
+
free: summary.free,
|
|
232
|
+
pro: summary.pro,
|
|
233
|
+
business: summary.business,
|
|
234
|
+
total: summary.total,
|
|
235
|
+
});
|
|
236
|
+
}
|
|
237
|
+
catch (err) {
|
|
238
|
+
handleError(out, err, 'Failed to fetch subscription summary');
|
|
239
|
+
}
|
|
240
|
+
});
|
|
241
|
+
// ── Health ──────────────────────────────────────────────────────────
|
|
242
|
+
addGlobalFlags(admin.command('health')
|
|
243
|
+
.description('Check database, Redis, and overall system health'))
|
|
244
|
+
.action(async (_opts) => {
|
|
245
|
+
const flags = resolveFlags(_opts);
|
|
246
|
+
const out = createOutput(flags);
|
|
247
|
+
out.startSpinner('Checking system health...');
|
|
248
|
+
try {
|
|
249
|
+
const client = getClient();
|
|
250
|
+
const health = await client.admin.getHealth();
|
|
251
|
+
out.stopSpinner();
|
|
252
|
+
out.record({
|
|
253
|
+
status: health.status,
|
|
254
|
+
database: health.database,
|
|
255
|
+
redis: health.redis,
|
|
256
|
+
uptime: flags.output === 'text' ? formatUptime(health.uptime) : health.uptime,
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
catch (err) {
|
|
260
|
+
handleError(out, err, 'Failed to check system health');
|
|
261
|
+
}
|
|
262
|
+
});
|
|
263
|
+
}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import fs from 'node:fs';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import os from 'node:os';
|
|
5
|
+
import { AuditLogger } from '@lifestreamdynamics/vault-sdk';
|
|
6
|
+
import { addGlobalFlags, resolveFlags } from '../utils/flags.js';
|
|
7
|
+
import { createOutput, handleError } from '../utils/output.js';
|
|
8
|
+
const DEFAULT_LOG_PATH = path.join(os.homedir(), '.lsvault', 'audit.log');
|
|
9
|
+
export function registerAuditCommands(program) {
|
|
10
|
+
const audit = program.command('audit').description('View and export local API request audit logs');
|
|
11
|
+
addGlobalFlags(audit.command('log')
|
|
12
|
+
.description('View recent audit log entries with optional filters')
|
|
13
|
+
.option('--tail <n>', 'Show last N entries', parseInt)
|
|
14
|
+
.option('--status <code>', 'Filter by HTTP status code', parseInt)
|
|
15
|
+
.option('--since <date>', 'Show entries since date (ISO 8601)')
|
|
16
|
+
.option('--until <date>', 'Show entries until date (ISO 8601)')
|
|
17
|
+
.option('--log-path <path>', 'Path to audit log file')
|
|
18
|
+
.addHelpText('after', `
|
|
19
|
+
EXAMPLES
|
|
20
|
+
lsvault audit log --tail 20
|
|
21
|
+
lsvault audit log --status 401 --since 2025-01-01
|
|
22
|
+
lsvault audit log --since 2025-01-01 --until 2025-01-31`))
|
|
23
|
+
.action(async (_opts) => {
|
|
24
|
+
const flags = resolveFlags(_opts);
|
|
25
|
+
const out = createOutput(flags);
|
|
26
|
+
try {
|
|
27
|
+
const logger = new AuditLogger({ logPath: String(_opts.logPath || DEFAULT_LOG_PATH) });
|
|
28
|
+
const entries = logger.readEntries({
|
|
29
|
+
tail: _opts.tail,
|
|
30
|
+
status: _opts.status,
|
|
31
|
+
since: _opts.since,
|
|
32
|
+
until: _opts.until,
|
|
33
|
+
});
|
|
34
|
+
out.list(entries.map(e => ({
|
|
35
|
+
timestamp: e.timestamp,
|
|
36
|
+
method: e.method,
|
|
37
|
+
path: e.path,
|
|
38
|
+
status: e.status,
|
|
39
|
+
durationMs: e.durationMs,
|
|
40
|
+
})), {
|
|
41
|
+
emptyMessage: 'No audit log entries found.',
|
|
42
|
+
columns: [
|
|
43
|
+
{ key: 'timestamp', header: 'Timestamp' },
|
|
44
|
+
{ key: 'method', header: 'Method' },
|
|
45
|
+
{ key: 'path', header: 'Path' },
|
|
46
|
+
{ key: 'status', header: 'Status' },
|
|
47
|
+
{ key: 'durationMs', header: 'Duration (ms)' },
|
|
48
|
+
],
|
|
49
|
+
textFn: (e) => {
|
|
50
|
+
const statusColor = getStatusColor(Number(e.status));
|
|
51
|
+
return `${chalk.dim(String(e.timestamp))} ${chalk.bold(String(e.method).padEnd(7))} ${String(e.path)} ${statusColor(String(e.status))} ${chalk.dim(`${String(e.durationMs)}ms`)}`;
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
if (flags.output === 'text' && entries.length > 0) {
|
|
55
|
+
out.status(chalk.dim(`\n${entries.length} entries shown`));
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
catch (err) {
|
|
59
|
+
handleError(out, err, 'Failed to read audit log');
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
addGlobalFlags(audit.command('export')
|
|
63
|
+
.description('Export audit log entries to a CSV file or stdout')
|
|
64
|
+
.option('--format <format>', 'Export format (csv)', 'csv')
|
|
65
|
+
.option('--file <file>', 'Output file path')
|
|
66
|
+
.option('--status <code>', 'Filter by HTTP status code', parseInt)
|
|
67
|
+
.option('--since <date>', 'Show entries since date (ISO 8601)')
|
|
68
|
+
.option('--until <date>', 'Show entries until date (ISO 8601)')
|
|
69
|
+
.option('--log-path <path>', 'Path to audit log file'))
|
|
70
|
+
.action(async (_opts) => {
|
|
71
|
+
const flags = resolveFlags(_opts);
|
|
72
|
+
const out = createOutput(flags);
|
|
73
|
+
try {
|
|
74
|
+
if (_opts.format !== 'csv') {
|
|
75
|
+
out.error(`Unsupported format: ${String(_opts.format)}. Only 'csv' is supported.`);
|
|
76
|
+
process.exitCode = 2;
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
const logger = new AuditLogger({ logPath: String(_opts.logPath || DEFAULT_LOG_PATH) });
|
|
80
|
+
const entries = logger.readEntries({
|
|
81
|
+
status: _opts.status,
|
|
82
|
+
since: _opts.since,
|
|
83
|
+
until: _opts.until,
|
|
84
|
+
});
|
|
85
|
+
if (entries.length === 0) {
|
|
86
|
+
out.status('No audit log entries to export.');
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
const csv = logger.exportCsv(entries);
|
|
90
|
+
if (_opts.file) {
|
|
91
|
+
const outputPath = String(_opts.file);
|
|
92
|
+
const outputDir = path.dirname(outputPath);
|
|
93
|
+
if (!fs.existsSync(outputDir)) {
|
|
94
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
95
|
+
}
|
|
96
|
+
fs.writeFileSync(outputPath, csv, 'utf-8');
|
|
97
|
+
out.success(`Exported ${entries.length} entries to ${outputPath}`, {
|
|
98
|
+
entries: entries.length,
|
|
99
|
+
path: outputPath,
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
out.raw(csv);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
catch (err) {
|
|
107
|
+
handleError(out, err, 'Failed to export audit log');
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
function getStatusColor(status) {
|
|
112
|
+
if (status >= 500)
|
|
113
|
+
return chalk.red;
|
|
114
|
+
if (status >= 400)
|
|
115
|
+
return chalk.yellow;
|
|
116
|
+
if (status >= 300)
|
|
117
|
+
return chalk.cyan;
|
|
118
|
+
return chalk.green;
|
|
119
|
+
}
|