@ghl-ai/aw 0.1.36-beta.99 → 0.1.37-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +23 -4
- package/commands/init.mjs +1 -1
- package/commands/nuke.mjs +2 -2
- package/commands/pull.mjs +49 -80
- package/commands/push.mjs +47 -12
- package/commands/telemetry.mjs +31 -0
- package/constants.mjs +4 -1
- package/fmt.mjs +14 -0
- package/git.mjs +21 -13
- package/hooks.mjs +14 -5
- package/mcp.mjs +6 -0
- package/package.json +3 -2
- package/telemetry.mjs +233 -0
package/cli.mjs
CHANGED
|
@@ -4,8 +4,9 @@ import { readFileSync } from 'node:fs';
|
|
|
4
4
|
import { join, dirname } from 'node:path';
|
|
5
5
|
import { fileURLToPath } from 'node:url';
|
|
6
6
|
import * as fmt from './fmt.mjs';
|
|
7
|
-
import { chalk } from './fmt.mjs';
|
|
7
|
+
import { chalk, CancelError } from './fmt.mjs';
|
|
8
8
|
import { checkForUpdate, notifyUpdate } from './update.mjs';
|
|
9
|
+
import { startSpan } from './telemetry.mjs';
|
|
9
10
|
|
|
10
11
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
11
12
|
const VERSION = JSON.parse(readFileSync(join(__dirname, 'package.json'), 'utf8')).version;
|
|
@@ -20,6 +21,7 @@ const COMMANDS = {
|
|
|
20
21
|
link: () => import('./commands/link-project.mjs').then(m => m.linkProjectCommand),
|
|
21
22
|
nuke: () => import('./commands/nuke.mjs').then(m => m.nukeCommand),
|
|
22
23
|
daemon: () => import('./commands/daemon.mjs').then(m => m.daemonCommand),
|
|
24
|
+
telemetry: () => import('./commands/telemetry.mjs').then(m => m.telemetryCommand),
|
|
23
25
|
};
|
|
24
26
|
|
|
25
27
|
function parseArgs(argv) {
|
|
@@ -101,6 +103,11 @@ function printHelp() {
|
|
|
101
103
|
cmd('aw daemon uninstall', 'Stop the background daemon'),
|
|
102
104
|
cmd('aw daemon status', 'Check if daemon is running'),
|
|
103
105
|
|
|
106
|
+
sec('Settings'),
|
|
107
|
+
cmd('aw telemetry status', 'Show telemetry status'),
|
|
108
|
+
cmd('aw telemetry disable', 'Opt out of anonymous analytics'),
|
|
109
|
+
cmd('aw telemetry enable', 'Re-enable analytics'),
|
|
110
|
+
|
|
104
111
|
sec('Examples'),
|
|
105
112
|
'',
|
|
106
113
|
` ${chalk.dim('# Pull content from registry using path')}`,
|
|
@@ -147,9 +154,21 @@ export async function run(argv) {
|
|
|
147
154
|
}
|
|
148
155
|
|
|
149
156
|
if (command && COMMANDS[command]) {
|
|
157
|
+
const span = await startSpan(command, args);
|
|
158
|
+
span.notice();
|
|
150
159
|
args._updateCheck = updateCheck;
|
|
151
|
-
|
|
152
|
-
|
|
160
|
+
try {
|
|
161
|
+
const handler = await COMMANDS[command]();
|
|
162
|
+
await handler(args);
|
|
163
|
+
await span.end({ status: 'completed' });
|
|
164
|
+
} catch (err) {
|
|
165
|
+
if (err instanceof CancelError) {
|
|
166
|
+
await span.end({ status: 'cancelled', error_type: 'CancelError' });
|
|
167
|
+
process.exit(err.exitCode ?? 1);
|
|
168
|
+
}
|
|
169
|
+
await span.end({ status: 'failed', error_type: err.constructor.name });
|
|
170
|
+
throw err;
|
|
171
|
+
}
|
|
153
172
|
notifyUpdate(await updateCheck);
|
|
154
173
|
return;
|
|
155
174
|
}
|
|
@@ -159,5 +178,5 @@ export async function run(argv) {
|
|
|
159
178
|
process.exit(0);
|
|
160
179
|
}
|
|
161
180
|
|
|
162
|
-
fmt.
|
|
181
|
+
fmt.cancelAndExit(`Unknown command: ${command}`);
|
|
163
182
|
}
|
package/commands/init.mjs
CHANGED
|
@@ -74,7 +74,7 @@ function installIdeTasks() {
|
|
|
74
74
|
{
|
|
75
75
|
label: 'aw: sync registry',
|
|
76
76
|
type: 'shell',
|
|
77
|
-
command: 'aw init --silent',
|
|
77
|
+
command: 'AW_TRIGGER=ide:task aw init --silent',
|
|
78
78
|
presentation: { reveal: 'silent', panel: 'shared', close: true },
|
|
79
79
|
runOptions: { runOn: 'folderOpen' },
|
|
80
80
|
problemMatcher: [],
|
package/commands/nuke.mjs
CHANGED
|
@@ -204,8 +204,8 @@ function removeIdeTasks() {
|
|
|
204
204
|
|
|
205
205
|
export async function nukeCommand(args) {
|
|
206
206
|
// Catch unhandled errors and surface them instead of letting clack show generic "Something went wrong"
|
|
207
|
-
process.on('uncaughtException', (e) => { fmt.
|
|
208
|
-
process.on('unhandledRejection', (e) => { fmt.
|
|
207
|
+
process.on('uncaughtException', (e) => { fmt.cancelAndExit(`Unexpected error: ${e.message}`); });
|
|
208
|
+
process.on('unhandledRejection', (e) => { fmt.cancelAndExit(`Unexpected error: ${e?.message ?? e}`); });
|
|
209
209
|
|
|
210
210
|
fmt.intro('aw nuke');
|
|
211
211
|
|
package/commands/pull.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// commands/pull.mjs — Pull content from registry using persistent git clone
|
|
2
2
|
|
|
3
|
-
import {
|
|
4
|
-
import { join,
|
|
3
|
+
import { existsSync, lstatSync } from 'node:fs';
|
|
4
|
+
import { join, extname } from 'node:path';
|
|
5
5
|
import { homedir } from 'node:os';
|
|
6
6
|
import { exec as execCb } from 'node:child_process';
|
|
7
7
|
import { promisify } from 'node:util';
|
|
@@ -11,7 +11,7 @@ import * as config from '../config.mjs';
|
|
|
11
11
|
import * as fmt from '../fmt.mjs';
|
|
12
12
|
import { chalk } from '../fmt.mjs';
|
|
13
13
|
import { fetchAndMerge, addToSparseCheckout, removeFromSparseCheckout, syncWorktreeSparseCheckout, isValidClone, findNearestWorktree, rebaseOntoOriginMain } from '../git.mjs';
|
|
14
|
-
import { REGISTRY_DIR, REGISTRY_REPO, REGISTRY_URL
|
|
14
|
+
import { REGISTRY_DIR, REGISTRY_REPO, REGISTRY_URL } from '../constants.mjs';
|
|
15
15
|
import { linkWorkspace } from '../link.mjs';
|
|
16
16
|
import { generateCommands, copyInstructions } from '../integrate.mjs';
|
|
17
17
|
|
|
@@ -25,7 +25,7 @@ export async function pullCommand(args) {
|
|
|
25
25
|
const silent = args['--silent'] === true || args._silent === true;
|
|
26
26
|
|
|
27
27
|
const log = {
|
|
28
|
-
cancel: silent ? () => {
|
|
28
|
+
cancel: silent ? (msg) => { throw new fmt.CancelError(msg || 'silent cancel', { exitCode: 0 }); } : fmt.cancel,
|
|
29
29
|
logInfo: silent ? () => {} : fmt.logInfo,
|
|
30
30
|
logSuccess: silent ? () => {} : fmt.logSuccess,
|
|
31
31
|
logStep: silent ? () => {} : fmt.logStep,
|
|
@@ -74,14 +74,48 @@ export async function pullCommand(args) {
|
|
|
74
74
|
}
|
|
75
75
|
}
|
|
76
76
|
|
|
77
|
-
// Guard: if a rebase or merge is already in progress on awHome,
|
|
77
|
+
// Guard: if a rebase or merge is already in progress on awHome, try to continue it.
|
|
78
78
|
const awGitDir = join(AW_HOME, '.git');
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
79
|
+
const rebaseInProgress = existsSync(join(awGitDir, 'rebase-merge')) || existsSync(join(awGitDir, 'rebase-apply'));
|
|
80
|
+
const mergeInProgress = existsSync(join(awGitDir, 'MERGE_HEAD'));
|
|
81
|
+
if (rebaseInProgress || mergeInProgress) {
|
|
82
|
+
// Check for still-unresolved files (conflict markers present in index)
|
|
83
|
+
let unresolved = [];
|
|
84
|
+
try {
|
|
85
|
+
const { stdout } = await exec(`git -C "${AW_HOME}" diff --name-only --diff-filter=U`);
|
|
86
|
+
unresolved = stdout.trim().split('\n').filter(Boolean);
|
|
87
|
+
} catch { /* best effort */ }
|
|
88
|
+
|
|
89
|
+
if (unresolved.length > 0) {
|
|
90
|
+
// Still has conflicts — user needs to finish resolving
|
|
91
|
+
log.logWarn(`Rebase paused — resolve conflicts in your IDE, then run \`aw pull\` again.`);
|
|
92
|
+
if (!silent) fmt.outro(chalk.yellow('Pull skipped'));
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// All conflicts resolved (files are staged) — continue the rebase automatically
|
|
97
|
+
try {
|
|
98
|
+
await exec(`git -C "${AW_HOME}" rebase --continue`, { env: { ...process.env, GIT_EDITOR: 'true' } });
|
|
99
|
+
log.logStep('Rebase continued after conflict resolution.');
|
|
100
|
+
// Force-push if on a push branch so origin stays in sync
|
|
101
|
+
const { stdout: branchOut } = await exec(`git -C "${AW_HOME}" rev-parse --abbrev-ref HEAD`);
|
|
102
|
+
const resumedBranch = branchOut.trim();
|
|
103
|
+
if (['upload/', 'remove/', 'sync/'].some(p => resumedBranch.startsWith(p))) {
|
|
104
|
+
try { await exec(`git -C "${AW_HOME}" push --force-with-lease origin "${resumedBranch}"`); } catch { /* non-blocking */ }
|
|
105
|
+
}
|
|
106
|
+
} catch {
|
|
107
|
+
// Could happen if there are more conflicting commits in the rebase sequence,
|
|
108
|
+
// or if the resolved changes result in an empty commit (skip it).
|
|
109
|
+
try {
|
|
110
|
+
await exec(`git -C "${AW_HOME}" rebase --skip`);
|
|
111
|
+
log.logStep('Empty commit skipped during rebase continuation.');
|
|
112
|
+
} catch {
|
|
113
|
+
log.logWarn('Rebase continuation failed — check `~/.aw` and resolve manually.');
|
|
114
|
+
if (!silent) fmt.outro(chalk.yellow('Pull skipped'));
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
// Fall through to re-link IDE dirs after successful rebase continuation
|
|
85
119
|
}
|
|
86
120
|
|
|
87
121
|
// Fetch + merge latest
|
|
@@ -89,7 +123,7 @@ export async function pullCommand(args) {
|
|
|
89
123
|
s.start('Fetching latest from registry...');
|
|
90
124
|
let fetchResult = { updated: false, conflicts: [] };
|
|
91
125
|
try {
|
|
92
|
-
fetchResult = await fetchAndMerge(AW_HOME);
|
|
126
|
+
fetchResult = await fetchAndMerge(AW_HOME, { silent });
|
|
93
127
|
s.stop(fetchResult.updated ? 'Registry updated' : 'Already up to date');
|
|
94
128
|
} catch (e) {
|
|
95
129
|
s.stop(chalk.yellow('Fetch failed'));
|
|
@@ -113,13 +147,13 @@ export async function pullCommand(args) {
|
|
|
113
147
|
|
|
114
148
|
if (fetchResult.conflicts.length > 0) {
|
|
115
149
|
if (!silent) {
|
|
116
|
-
// Interactive mode:
|
|
117
|
-
|
|
150
|
+
// Interactive mode: rebase is paused with conflict markers in the working tree.
|
|
151
|
+
// Leave it for the user to resolve in their IDE, then re-run `aw pull`.
|
|
118
152
|
log.logWarn(`Merge conflict in: ${fetchResult.conflicts.join(', ')}`);
|
|
119
153
|
log.logWarn('Merge aborted — your branch is unchanged. Resolve conflicts and run `aw pull` again.');
|
|
120
154
|
return;
|
|
121
155
|
}
|
|
122
|
-
// Silent mode:
|
|
156
|
+
// Silent mode: rebase was already aborted in fetchAndMerge; just report.
|
|
123
157
|
log.logWarn(`Conflicts in: ${fetchResult.conflicts.join(', ')}`);
|
|
124
158
|
}
|
|
125
159
|
|
|
@@ -169,9 +203,6 @@ export async function pullCommand(args) {
|
|
|
169
203
|
}
|
|
170
204
|
}
|
|
171
205
|
|
|
172
|
-
// Sync content/ → platform/docs/
|
|
173
|
-
syncDocs(AW_HOME, GLOBAL_AW_DIR);
|
|
174
|
-
|
|
175
206
|
// Re-link IDE dirs
|
|
176
207
|
if (!args._skipIntegrate) {
|
|
177
208
|
const projectRegistryDir = cwd !== HOME ? join(cwd, '.aw', REGISTRY_DIR) : null;
|
|
@@ -198,16 +229,6 @@ export async function pullCommand(args) {
|
|
|
198
229
|
}
|
|
199
230
|
}
|
|
200
231
|
|
|
201
|
-
/**
|
|
202
|
-
* Sync ~/.aw/content/ markdown files → ~/.aw_registry/platform/docs/
|
|
203
|
-
*/
|
|
204
|
-
function syncDocs(awHome, globalAwDir) {
|
|
205
|
-
const contentSrc = join(awHome, DOCS_SOURCE_DIR);
|
|
206
|
-
if (!existsSync(contentSrc)) return;
|
|
207
|
-
const docsDest = join(globalAwDir, 'platform', 'docs');
|
|
208
|
-
syncMarkdownTree(contentSrc, docsDest);
|
|
209
|
-
}
|
|
210
|
-
|
|
211
232
|
/**
|
|
212
233
|
* pullAsync — kept for backward compat; now delegates to pullCommand.
|
|
213
234
|
*/
|
|
@@ -216,58 +237,6 @@ export async function pullAsync(args) {
|
|
|
216
237
|
return { pattern: args._positional?.[0] || '', actions: [], conflictCount: 0 };
|
|
217
238
|
}
|
|
218
239
|
|
|
219
|
-
/**
|
|
220
|
-
* Collect all .md file paths (relative) in a directory tree.
|
|
221
|
-
*/
|
|
222
|
-
function collectMarkdownPaths(dir, base) {
|
|
223
|
-
const paths = new Set();
|
|
224
|
-
if (!existsSync(dir)) return paths;
|
|
225
|
-
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
226
|
-
if (entry.name.startsWith('.')) continue;
|
|
227
|
-
const full = join(dir, entry.name);
|
|
228
|
-
if (entry.isDirectory()) {
|
|
229
|
-
for (const p of collectMarkdownPaths(full, base)) paths.add(p);
|
|
230
|
-
} else if (entry.name.endsWith('.md')) {
|
|
231
|
-
paths.add(relative(base, full));
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
return paths;
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
/**
|
|
238
|
-
* Sync .md files from src to dest: copy new/changed, delete removed, prune empty dirs.
|
|
239
|
-
*/
|
|
240
|
-
function syncMarkdownTree(src, dest) {
|
|
241
|
-
mkdirSync(dest, { recursive: true });
|
|
242
|
-
|
|
243
|
-
const remotePaths = collectMarkdownPaths(src, src);
|
|
244
|
-
const localPaths = collectMarkdownPaths(dest, dest);
|
|
245
|
-
|
|
246
|
-
for (const rel of remotePaths) {
|
|
247
|
-
const srcPath = join(src, rel);
|
|
248
|
-
const destPath = join(dest, rel);
|
|
249
|
-
mkdirSync(join(dest, rel, '..'), { recursive: true });
|
|
250
|
-
copyFileSync(srcPath, destPath);
|
|
251
|
-
}
|
|
252
|
-
|
|
253
|
-
for (const rel of localPaths) {
|
|
254
|
-
if (!remotePaths.has(rel)) {
|
|
255
|
-
const destPath = join(dest, rel);
|
|
256
|
-
try { unlinkSync(destPath); } catch { /* best effort */ }
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
function pruneEmpty(dir) {
|
|
261
|
-
if (!existsSync(dir)) return;
|
|
262
|
-
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
263
|
-
if (entry.isDirectory()) pruneEmpty(join(dir, entry.name));
|
|
264
|
-
}
|
|
265
|
-
try {
|
|
266
|
-
if (readdirSync(dir).length === 0 && dir !== dest) rmdirSync(dir);
|
|
267
|
-
} catch { /* best effort */ }
|
|
268
|
-
}
|
|
269
|
-
pruneEmpty(dest);
|
|
270
|
-
}
|
|
271
240
|
|
|
272
241
|
function registerMcp(namespace) {
|
|
273
242
|
const mcpUrl = process.env.GHL_MCP_URL;
|
package/commands/push.mjs
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
// commands/push.mjs — Push local agents/skills to registry via PR using persistent git clone
|
|
2
2
|
|
|
3
3
|
import { existsSync, statSync, readFileSync, appendFileSync } from 'node:fs';
|
|
4
|
-
import { join } from 'node:path';
|
|
4
|
+
import { join, dirname } from 'node:path';
|
|
5
|
+
import { fileURLToPath } from 'node:url';
|
|
5
6
|
import { exec as execCb, execFile as execFileCb } from 'node:child_process';
|
|
6
7
|
import { promisify } from 'node:util';
|
|
7
8
|
import { homedir } from 'node:os';
|
|
@@ -25,6 +26,9 @@ import {
|
|
|
25
26
|
logAheadOfMain,
|
|
26
27
|
} from '../git.mjs';
|
|
27
28
|
|
|
29
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
30
|
+
const VERSION = JSON.parse(readFileSync(join(__dirname, '..', 'package.json'), 'utf8')).version;
|
|
31
|
+
|
|
28
32
|
const PUSHABLE_TYPES = ['agents', 'skills', 'commands', 'evals'];
|
|
29
33
|
|
|
30
34
|
// ── PR content generation ────────────────────────────────────────────
|
|
@@ -187,11 +191,14 @@ function generateCommitMsg(files) {
|
|
|
187
191
|
const deletedParts = Object.entries(groupBy(deleted, 'type')).map(([t, items]) => `${items.length} ${singular(t, items.length)} removed`);
|
|
188
192
|
const countParts = [...addedParts, ...deletedParts];
|
|
189
193
|
|
|
194
|
+
const version = VERSION;
|
|
195
|
+
const trailer = `\n\nGenerated-By: aw/${version}`;
|
|
196
|
+
|
|
190
197
|
if (files.length === 1) {
|
|
191
198
|
const f = files[0];
|
|
192
|
-
return `registry: ${f.deleted ? 'remove' : 'add'} ${f.type}/${f.slug} ${f.deleted ? 'from' : 'to'} ${f.namespace}`;
|
|
199
|
+
return `registry: ${f.deleted ? 'remove' : 'add'} ${f.type}/${f.slug} ${f.deleted ? 'from' : 'to'} ${f.namespace}${trailer}`;
|
|
193
200
|
}
|
|
194
|
-
return `registry: sync ${files.length} files (${countParts.join(', ')})`;
|
|
201
|
+
return `registry: sync ${files.length} files (${countParts.join(', ')})${trailer}`;
|
|
195
202
|
}
|
|
196
203
|
|
|
197
204
|
// ── Batch file collection from folder ────────────────────────────────
|
|
@@ -319,7 +326,7 @@ async function createOrUpdatePR(awHome, branch, prTitle, prBody) {
|
|
|
319
326
|
// - Always creates a new branch from current state, commits, pushes, stays there.
|
|
320
327
|
// - Every aw push = one new branch + one new PR. No force-push, no reuse.
|
|
321
328
|
// Global flow (worktreeFlow=false): same but returns to main after push.
|
|
322
|
-
async function doPush(files, awHome, dryRun, worktreeFlow = false, preStaged = false) {
|
|
329
|
+
async function doPush(files, awHome, dryRun, worktreeFlow = false, preStaged = false, extraPaths = []) {
|
|
323
330
|
const added = files.filter(f => !f.deleted);
|
|
324
331
|
const deleted = files.filter(f => f.deleted);
|
|
325
332
|
|
|
@@ -369,6 +376,10 @@ async function doPush(files, awHome, dryRun, worktreeFlow = false, preStaged = f
|
|
|
369
376
|
if (newNamespaces.length > 0 && existsSync(codeownersPath)) {
|
|
370
377
|
pathsToStage.push('CODEOWNERS');
|
|
371
378
|
}
|
|
379
|
+
// Also stage any extra paths (content/, CODEOWNERS manual edits) passed from the caller
|
|
380
|
+
for (const p of extraPaths) {
|
|
381
|
+
if (!pathsToStage.includes(p)) pathsToStage.push(p);
|
|
382
|
+
}
|
|
372
383
|
|
|
373
384
|
const commitMsg = generateCommitMsg(files);
|
|
374
385
|
const prTitle = generatePrTitle(files, awHome);
|
|
@@ -449,10 +460,31 @@ export async function pushCommand(args) {
|
|
|
449
460
|
|
|
450
461
|
// No args = staged files first (git commit behaviour), else auto-detect all changes
|
|
451
462
|
if (!input) {
|
|
463
|
+
// Extra paths outside .aw_registry/ that aw also manages: content/ and CODEOWNERS.
|
|
464
|
+
// Detect staged variants for staged-mode and unstaged variants for auto-mode.
|
|
465
|
+
const getExtraStagedPaths = async () => {
|
|
466
|
+
try {
|
|
467
|
+
const { stdout } = await exec(`git -C "${awHome}" diff --cached --name-only -- content/ CODEOWNERS`);
|
|
468
|
+
return stdout.trim().split('\n').filter(Boolean);
|
|
469
|
+
} catch { return []; }
|
|
470
|
+
};
|
|
471
|
+
const getExtraChangedPaths = async () => {
|
|
472
|
+
try {
|
|
473
|
+
const { stdout } = await exec(`git -C "${awHome}" status --porcelain -- content/ CODEOWNERS`);
|
|
474
|
+
// git status --porcelain prefix is XY (2 chars) + optional space + path.
|
|
475
|
+
// Staged-only files: `M path` (2-char prefix); unstaged files: ` M path` (3-char prefix).
|
|
476
|
+
// slice(2).trimStart() handles both cases correctly.
|
|
477
|
+
return stdout.trim().split('\n').filter(Boolean)
|
|
478
|
+
.map(l => l.slice(2).trimStart())
|
|
479
|
+
.filter(Boolean);
|
|
480
|
+
} catch { return []; }
|
|
481
|
+
};
|
|
482
|
+
|
|
452
483
|
// ── Staged mode: use whatever is in the index ──────────────────────
|
|
453
484
|
const staged = getStagedFiles(awHome, REGISTRY_DIR);
|
|
485
|
+
const extraStaged = await getExtraStagedPaths();
|
|
454
486
|
|
|
455
|
-
if (staged.length > 0) {
|
|
487
|
+
if (staged.length > 0 || extraStaged.length > 0) {
|
|
456
488
|
const files = staged.map(f => {
|
|
457
489
|
const meta = parseRegistryPath(f.registryPath);
|
|
458
490
|
const parts = f.registryPath.split('/');
|
|
@@ -465,26 +497,28 @@ export async function pushCommand(args) {
|
|
|
465
497
|
deleted: f.deleted,
|
|
466
498
|
};
|
|
467
499
|
});
|
|
468
|
-
|
|
469
|
-
|
|
500
|
+
const totalCount = files.length + extraStaged.length;
|
|
501
|
+
fmt.logInfo(`${chalk.dim('mode:')} staged (${totalCount} file${totalCount > 1 ? 's' : ''})`);
|
|
502
|
+
await doPush(files, awHome, dryRun, worktreeFlow, true, extraStaged);
|
|
470
503
|
return;
|
|
471
504
|
}
|
|
472
505
|
|
|
473
506
|
// ── Auto mode: stage all changes in .aw_registry/ ─────────────────
|
|
474
507
|
const changes = detectChanges(awHome, REGISTRY_DIR);
|
|
508
|
+
const extraChanged = await getExtraChangedPaths();
|
|
475
509
|
const allEntries = [
|
|
476
510
|
...changes.modified.map(e => ({ ...e, deleted: false })),
|
|
477
511
|
...changes.untracked.map(e => ({ ...e, deleted: false })),
|
|
478
512
|
...changes.deleted.map(e => ({ ...e, deleted: true })),
|
|
479
513
|
];
|
|
480
514
|
|
|
481
|
-
if (allEntries.length === 0 && commitsAheadOfMain(awHome) > 0) {
|
|
515
|
+
if (allEntries.length === 0 && extraChanged.length === 0 && commitsAheadOfMain(awHome) > 0) {
|
|
482
516
|
fmt.logInfo(`${chalk.dim('mode:')} auto (no new changes — branching current state)`);
|
|
483
517
|
await doPush([], awHome, dryRun, worktreeFlow, false);
|
|
484
518
|
return;
|
|
485
519
|
}
|
|
486
520
|
|
|
487
|
-
if (allEntries.length === 0) {
|
|
521
|
+
if (allEntries.length === 0 && extraChanged.length === 0) {
|
|
488
522
|
fmt.cancel('Nothing to push — no staged or modified files.\n\n Stage files in your IDE or use `aw status` to see changes.');
|
|
489
523
|
return;
|
|
490
524
|
}
|
|
@@ -503,7 +537,7 @@ export async function pushCommand(args) {
|
|
|
503
537
|
};
|
|
504
538
|
});
|
|
505
539
|
|
|
506
|
-
if (files.length === 0) {
|
|
540
|
+
if (files.length === 0 && extraChanged.length === 0) {
|
|
507
541
|
if (commitsAheadOfMain(awHome) > 0) {
|
|
508
542
|
fmt.logInfo(`${chalk.dim('mode:')} auto (no new changes — branching current state)`);
|
|
509
543
|
await doPush([], awHome, dryRun, worktreeFlow, false);
|
|
@@ -513,8 +547,9 @@ export async function pushCommand(args) {
|
|
|
513
547
|
return;
|
|
514
548
|
}
|
|
515
549
|
|
|
516
|
-
|
|
517
|
-
|
|
550
|
+
const totalCount = files.length + extraChanged.length;
|
|
551
|
+
fmt.logInfo(`${chalk.dim('mode:')} auto (${totalCount} file${totalCount > 1 ? 's' : ''} — stage specific files to push a subset)`);
|
|
552
|
+
await doPush(files, awHome, dryRun, worktreeFlow, false, extraChanged);
|
|
518
553
|
return;
|
|
519
554
|
}
|
|
520
555
|
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
// commands/telemetry.mjs — `aw telemetry [enable|disable|status]`
|
|
2
|
+
|
|
3
|
+
import { enableTelemetry, disableTelemetry, getStatus } from '../telemetry.mjs';
|
|
4
|
+
import * as fmt from '../fmt.mjs';
|
|
5
|
+
import { chalk } from '../fmt.mjs';
|
|
6
|
+
|
|
7
|
+
export async function telemetryCommand(args) {
|
|
8
|
+
const sub = args._positional?.[0];
|
|
9
|
+
|
|
10
|
+
if (sub === 'disable') {
|
|
11
|
+
disableTelemetry();
|
|
12
|
+
fmt.logSuccess('Telemetry disabled. No anonymous usage data will be sent.');
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
if (sub === 'enable') {
|
|
17
|
+
enableTelemetry();
|
|
18
|
+
fmt.logSuccess('Telemetry enabled. Anonymous usage stats help improve aw.');
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// status (default)
|
|
23
|
+
const status = getStatus();
|
|
24
|
+
fmt.intro('aw telemetry');
|
|
25
|
+
fmt.logStep(`Status: ${status.enabled ? chalk.green('enabled') : chalk.red('disabled')}`);
|
|
26
|
+
fmt.logStep(`Machine ID: ${chalk.dim(status.machine_id)}`);
|
|
27
|
+
fmt.logStep(`Config: ${chalk.dim(status.config_path)}`);
|
|
28
|
+
fmt.logMessage('');
|
|
29
|
+
fmt.logMessage(` ${chalk.dim('aw telemetry disable')} — opt out of anonymous analytics`);
|
|
30
|
+
fmt.logMessage(` ${chalk.dim('aw telemetry enable')} — re-enable analytics`);
|
|
31
|
+
}
|
package/constants.mjs
CHANGED
|
@@ -4,7 +4,7 @@ import { homedir } from 'node:os';
|
|
|
4
4
|
import { join } from 'node:path';
|
|
5
5
|
|
|
6
6
|
/** Base branch for PRs and sync checkout */
|
|
7
|
-
export const REGISTRY_BASE_BRANCH = process.env.AW_REGISTRY_BASE_BRANCH || '
|
|
7
|
+
export const REGISTRY_BASE_BRANCH = process.env.AW_REGISTRY_BASE_BRANCH || 'main';
|
|
8
8
|
|
|
9
9
|
/** Default registry repository */
|
|
10
10
|
export const REGISTRY_REPO = 'GoHighLevel/platform-docs';
|
|
@@ -24,3 +24,6 @@ export const DOCS_SOURCE_DIR = 'content';
|
|
|
24
24
|
|
|
25
25
|
/** Persistent git clone root — ~/.aw/ */
|
|
26
26
|
export const AW_HOME = join(homedir(), '.aw');
|
|
27
|
+
|
|
28
|
+
/** Telemetry endpoint — override with AW_TELEMETRY_URL env var */
|
|
29
|
+
export const TELEMETRY_URL = process.env.AW_TELEMETRY_URL || 'https://services.leadconnectorhq.com/v1/events';
|
package/fmt.mjs
CHANGED
|
@@ -72,7 +72,21 @@ export const isCancel = p.isCancel;
|
|
|
72
72
|
|
|
73
73
|
export const spinner = () => p.spinner();
|
|
74
74
|
|
|
75
|
+
export class CancelError extends Error {
|
|
76
|
+
constructor(message, { exitCode = 1 } = {}) {
|
|
77
|
+
super(message);
|
|
78
|
+
this.name = 'CancelError';
|
|
79
|
+
this.exitCode = exitCode;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
75
83
|
export function cancel(msg) {
|
|
84
|
+
p.cancel(msg);
|
|
85
|
+
throw new CancelError(msg);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/** Hard exit — for use in process exception handlers where throwing is unsafe */
|
|
89
|
+
export function cancelAndExit(msg) {
|
|
76
90
|
p.cancel(msg);
|
|
77
91
|
process.exit(1);
|
|
78
92
|
}
|
package/git.mjs
CHANGED
|
@@ -293,22 +293,30 @@ export async function fetchAndMerge(awHome, { silent = true } = {}) {
|
|
|
293
293
|
let updated = false;
|
|
294
294
|
const conflicts = [];
|
|
295
295
|
|
|
296
|
-
// ── 3
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
//
|
|
306
|
-
// For push branches: stacks our local commits on top of latest remote.
|
|
307
|
-
// For base branch: only reached if local commits exist (unusual).
|
|
308
|
-
// Never uses --no-edit merge — that disables sparse checkout on blob:none.
|
|
296
|
+
// ── 3 + 4. Rebase onto remote REGISTRY_BASE_BRANCH ──────────────────────
|
|
297
|
+
// Handles both cases in one path:
|
|
298
|
+
// • No local commits (base branch, clean tree) → fast-forward via rebase
|
|
299
|
+
// • Local commits (push branch) → rebases on top
|
|
300
|
+
// • Uncommitted local changes → rebase refuses to run,
|
|
301
|
+
// sync is skipped this run, changes are preserved
|
|
302
|
+
//
|
|
303
|
+
// We avoid `merge --ff-only` and `merge --no-edit` entirely: both trigger a
|
|
304
|
+
// git 2.46+ bug on blob:none + no-cone sparse-checkout repos that silently
|
|
305
|
+
// drops bare-name patterns (e.g. "content", "CODEOWNERS") when HEAD advances.
|
|
309
306
|
try {
|
|
310
307
|
await exec(`git -C "${awHome}" rebase origin/${REGISTRY_BASE_BRANCH}`);
|
|
311
308
|
updated = true;
|
|
309
|
+
// Push branch rebase rewrites commit SHAs — force-push so origin/upload/...
|
|
310
|
+
// stays in sync with the rebased local branch. Without this, VS Code and
|
|
311
|
+
// plain `git pull` show a false divergence ("2↑ 1↓") that can't be resolved
|
|
312
|
+
// without specifying a reconcile strategy.
|
|
313
|
+
// --force-with-lease is safer than --force: it refuses to overwrite if
|
|
314
|
+
// someone else pushed to the remote tracking branch since our last fetch.
|
|
315
|
+
if (isPushBranch) {
|
|
316
|
+
try {
|
|
317
|
+
await exec(`git -C "${awHome}" push --force-with-lease origin "${currentBranch}"`);
|
|
318
|
+
} catch { /* non-blocking — divergence will be resolved on next aw push */ }
|
|
319
|
+
}
|
|
312
320
|
} catch {
|
|
313
321
|
try {
|
|
314
322
|
const { stdout } = await exec(`git -C "${awHome}" diff --name-only --diff-filter=U`);
|
package/hooks.mjs
CHANGED
|
@@ -51,15 +51,21 @@ exit 0
|
|
|
51
51
|
|
|
52
52
|
const POST_MERGE = makeDispatcher('post-merge', `\
|
|
53
53
|
if command -v aw >/dev/null 2>&1; then
|
|
54
|
-
aw
|
|
54
|
+
# Unset ALL git env vars so aw's "git -C ~/.aw" runs against the correct repo,
|
|
55
|
+
# not the project repo that triggered this hook. Using git's own list of
|
|
56
|
+
# local env vars is more robust than hardcoding specific names.
|
|
57
|
+
unset $(git rev-parse --local-env-vars 2>/dev/null)
|
|
58
|
+
AW_TRIGGER=hook:post-merge aw pull --silent >/dev/null 2>&1 &
|
|
55
59
|
fi`);
|
|
56
60
|
|
|
57
61
|
const POST_CHECKOUT = makeDispatcher('post-checkout', `\
|
|
62
|
+
# Unset ALL git env vars so aw's "git -C ~/.aw" runs against the correct repo.
|
|
63
|
+
unset $(git rev-parse --local-env-vars 2>/dev/null)
|
|
58
64
|
if [ -d "$HOME/.aw" ] && [ ! -d ".aw" ] && [ -d ".git" ] && command -v aw >/dev/null 2>&1; then
|
|
59
|
-
aw link >/dev/null 2>&1 &
|
|
65
|
+
AW_TRIGGER=hook:post-checkout aw link >/dev/null 2>&1 &
|
|
60
66
|
fi
|
|
61
67
|
if command -v aw >/dev/null 2>&1; then
|
|
62
|
-
aw
|
|
68
|
+
AW_TRIGGER=hook:post-checkout aw pull --silent >/dev/null 2>&1 &
|
|
63
69
|
fi`);
|
|
64
70
|
|
|
65
71
|
// post-commit: written separately — needs different guard logic than other hooks.
|
|
@@ -68,13 +74,16 @@ fi`);
|
|
|
68
74
|
const POST_COMMIT = `#!/bin/sh
|
|
69
75
|
# aw: global post-commit dispatcher (installed by aw init)
|
|
70
76
|
|
|
77
|
+
# Unset ALL git env vars so aw's "git -C ~/.aw" runs against the correct repo.
|
|
78
|
+
unset $(git rev-parse --local-env-vars 2>/dev/null)
|
|
79
|
+
|
|
71
80
|
# Skip temp sparse checkouts
|
|
72
81
|
case "$(pwd)" in /tmp/aw-*|/var/folders/*/aw-*) exit 0 ;; esac
|
|
73
82
|
|
|
74
83
|
# Committing inside .aw/ worktree itself → refresh from parent project dir
|
|
75
84
|
case "$(pwd)" in */.aw)
|
|
76
85
|
if command -v aw >/dev/null 2>&1; then
|
|
77
|
-
(cd "$(dirname "$(pwd)")" && aw link >/dev/null 2>&1) &
|
|
86
|
+
(cd "$(dirname "$(pwd)")" && AW_TRIGGER=hook:post-commit aw link >/dev/null 2>&1) &
|
|
78
87
|
fi
|
|
79
88
|
exit 0
|
|
80
89
|
;;
|
|
@@ -82,7 +91,7 @@ esac
|
|
|
82
91
|
|
|
83
92
|
# Committing in a project that has a .aw/ worktree → refresh symlinks
|
|
84
93
|
if [ -f ".aw/.git" ] && command -v aw >/dev/null 2>&1; then
|
|
85
|
-
aw link >/dev/null 2>&1 &
|
|
94
|
+
AW_TRIGGER=hook:post-commit aw link >/dev/null 2>&1 &
|
|
86
95
|
fi
|
|
87
96
|
|
|
88
97
|
# Chain to previous hooksPath
|
package/mcp.mjs
CHANGED
|
@@ -297,10 +297,16 @@ export async function setupMcp(cwd, namespace, { silent = false } = {}) {
|
|
|
297
297
|
}
|
|
298
298
|
|
|
299
299
|
// ── Codex: ~/.codex/config.toml (TOML format) ──
|
|
300
|
+
// Also merge into the ECC source file (~/.aw-ecc/.codex/config.toml) so
|
|
301
|
+
// that when installAwEcc re-copies it on subsequent inits the ghl-ai entry
|
|
302
|
+
// survives — without this, each re-init overwrites ~/.codex/config.toml
|
|
303
|
+
// from the ECC source which doesn't have the ghl-ai block.
|
|
300
304
|
const codexTomlPath = join(HOME, '.codex', 'config.toml');
|
|
301
305
|
if (mergeTomlMcpServer(codexTomlPath, 'ghl-ai', ghlAiServerLocal)) {
|
|
302
306
|
updatedFiles.push(codexTomlPath);
|
|
303
307
|
}
|
|
308
|
+
const eccCodexTomlPath = join(HOME, '.aw-ecc', '.codex', 'config.toml');
|
|
309
|
+
mergeTomlMcpServer(eccCodexTomlPath, 'ghl-ai', ghlAiServerLocal);
|
|
304
310
|
|
|
305
311
|
// Deduplicate
|
|
306
312
|
const unique = [...new Set(updatedFiles)];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ghl-ai/aw",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.37-beta.10",
|
|
4
4
|
"description": "Agentic Workspace CLI — pull, push & manage agents, skills and commands from the registry",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": "bin.js",
|
|
@@ -23,7 +23,8 @@
|
|
|
23
23
|
"apply.mjs",
|
|
24
24
|
"update.mjs",
|
|
25
25
|
"hooks.mjs",
|
|
26
|
-
"ecc.mjs"
|
|
26
|
+
"ecc.mjs",
|
|
27
|
+
"telemetry.mjs"
|
|
27
28
|
],
|
|
28
29
|
"engines": {
|
|
29
30
|
"node": ">=18.0.0"
|
package/telemetry.mjs
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
// telemetry.mjs — Anonymous usage analytics. Zero new dependencies.
|
|
2
|
+
//
|
|
3
|
+
// Span-based: each command sends command_started + command_completed/command_failed,
|
|
4
|
+
// linked by a run_id. Orphaned started events = stuck/killed commands.
|
|
5
|
+
//
|
|
6
|
+
// Opt out: AW_TELEMETRY_DISABLED=1, DO_NOT_TRACK=1, or `aw telemetry disable`.
|
|
7
|
+
|
|
8
|
+
import { createHash, randomUUID } from 'node:crypto';
|
|
9
|
+
import { hostname, userInfo, platform, arch, release } from 'node:os';
|
|
10
|
+
import { readFileSync, writeFileSync, mkdirSync, existsSync, readdirSync } from 'node:fs';
|
|
11
|
+
import { join, dirname } from 'node:path';
|
|
12
|
+
import { fileURLToPath } from 'node:url';
|
|
13
|
+
import { execSync } from 'node:child_process';
|
|
14
|
+
import { TELEMETRY_URL, AW_HOME } from './constants.mjs';
|
|
15
|
+
|
|
16
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
17
|
+
const VERSION = JSON.parse(readFileSync(join(__dirname, 'package.json'), 'utf8')).version;
|
|
18
|
+
|
|
19
|
+
const CONFIG_PATH = join(AW_HOME, '.telemetry');
|
|
20
|
+
|
|
21
|
+
// ── Config ──────────────────────────────────────────────────────────
|
|
22
|
+
|
|
23
|
+
export function generateMachineId() {
|
|
24
|
+
const raw = `${hostname()}:${userInfo().username}`;
|
|
25
|
+
return createHash('sha256').update(raw).digest('hex');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function loadConfig() {
|
|
29
|
+
try {
|
|
30
|
+
if (existsSync(CONFIG_PATH)) {
|
|
31
|
+
return JSON.parse(readFileSync(CONFIG_PATH, 'utf8'));
|
|
32
|
+
}
|
|
33
|
+
} catch { /* corrupt file — recreate */ }
|
|
34
|
+
|
|
35
|
+
const config = {
|
|
36
|
+
machine_id: generateMachineId(),
|
|
37
|
+
enabled: true,
|
|
38
|
+
noticed: false,
|
|
39
|
+
};
|
|
40
|
+
saveConfig(config);
|
|
41
|
+
return config;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export function saveConfig(config) {
|
|
45
|
+
try {
|
|
46
|
+
mkdirSync(dirname(CONFIG_PATH), { recursive: true });
|
|
47
|
+
writeFileSync(CONFIG_PATH, JSON.stringify(config, null, 2) + '\n');
|
|
48
|
+
} catch { /* best effort — don't break CLI */ }
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// ── Opt-out detection ───────────────────────────────────────────────
|
|
52
|
+
|
|
53
|
+
export function isDisabled(config) {
|
|
54
|
+
if (process.env.DO_NOT_TRACK === '1') return true;
|
|
55
|
+
if (process.env.AW_TELEMETRY_DISABLED === '1') return true;
|
|
56
|
+
if (config && config.enabled === false) return true;
|
|
57
|
+
return false;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// ── Environment collection ──────────────────────────────────────────
|
|
61
|
+
|
|
62
|
+
function getRegistryHead() {
|
|
63
|
+
try {
|
|
64
|
+
return execSync('git -C "' + AW_HOME + '" rev-parse --short HEAD', {
|
|
65
|
+
encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'], timeout: 2000,
|
|
66
|
+
}).trim();
|
|
67
|
+
} catch { return null; }
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
function detectIDEs() {
|
|
71
|
+
const ides = [];
|
|
72
|
+
const cwd = process.cwd();
|
|
73
|
+
const checks = [
|
|
74
|
+
['.cursor', 'cursor'],
|
|
75
|
+
['.vscode', 'vscode'],
|
|
76
|
+
['.claude', 'claude'],
|
|
77
|
+
['.codex', 'codex'],
|
|
78
|
+
];
|
|
79
|
+
for (const [dir, name] of checks) {
|
|
80
|
+
try {
|
|
81
|
+
if (existsSync(join(cwd, dir))) ides.push(name);
|
|
82
|
+
} catch { /* skip */ }
|
|
83
|
+
}
|
|
84
|
+
return ides;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
function getNamespace() {
|
|
88
|
+
try {
|
|
89
|
+
const cfgPath = join(AW_HOME, '.aw_registry', '.sync-config.json');
|
|
90
|
+
if (existsSync(cfgPath)) {
|
|
91
|
+
const cfg = JSON.parse(readFileSync(cfgPath, 'utf8'));
|
|
92
|
+
return cfg.namespace || null;
|
|
93
|
+
}
|
|
94
|
+
} catch { /* skip */ }
|
|
95
|
+
return null;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function projectHash() {
|
|
99
|
+
return createHash('sha256').update(process.cwd()).digest('hex').slice(0, 8);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function getGitConfig(key) {
|
|
103
|
+
try {
|
|
104
|
+
return execSync(`git config --global ${key}`, {
|
|
105
|
+
encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'], timeout: 1000,
|
|
106
|
+
}).trim() || null;
|
|
107
|
+
} catch { return null; }
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
export function collectEnv(config) {
|
|
111
|
+
return {
|
|
112
|
+
machine_id: config.machine_id,
|
|
113
|
+
aw_version: VERSION,
|
|
114
|
+
node_version: process.version,
|
|
115
|
+
os: platform(),
|
|
116
|
+
arch: arch(),
|
|
117
|
+
is_ci: !!(process.env.CI || process.env.CONTINUOUS_INTEGRATION || process.env.BUILD_NUMBER),
|
|
118
|
+
namespace: getNamespace(),
|
|
119
|
+
ides_detected: detectIDEs(),
|
|
120
|
+
project_hash: projectHash(),
|
|
121
|
+
trigger: process.env.AW_TRIGGER || 'interactive',
|
|
122
|
+
registry_head: getRegistryHead(),
|
|
123
|
+
github_user: getGitConfig('user.name'),
|
|
124
|
+
github_email: getGitConfig('user.email'),
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// ── Network ─────────────────────────────────────────────────────────
|
|
129
|
+
|
|
130
|
+
export async function send(payload) {
|
|
131
|
+
try {
|
|
132
|
+
await fetch(TELEMETRY_URL, {
|
|
133
|
+
method: 'POST',
|
|
134
|
+
headers: { 'Content-Type': 'application/json' },
|
|
135
|
+
body: JSON.stringify(payload),
|
|
136
|
+
signal: AbortSignal.timeout(3000),
|
|
137
|
+
});
|
|
138
|
+
} catch {
|
|
139
|
+
// Swallow ALL errors — telemetry must never break the CLI
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// ── Span API ────────────────────────────────────────────────────────
|
|
144
|
+
|
|
145
|
+
export async function startSpan(command, args) {
|
|
146
|
+
const config = loadConfig();
|
|
147
|
+
const disabled = isDisabled(config);
|
|
148
|
+
const runId = randomUUID();
|
|
149
|
+
const startTime = Date.now();
|
|
150
|
+
|
|
151
|
+
// Extract flags and positional args for the payload
|
|
152
|
+
const flags = [];
|
|
153
|
+
const positional = [];
|
|
154
|
+
if (args) {
|
|
155
|
+
for (const [key, val] of Object.entries(args)) {
|
|
156
|
+
if (key === '_positional') {
|
|
157
|
+
positional.push(...(val || []));
|
|
158
|
+
} else if (key.startsWith('-') || key.startsWith('--')) {
|
|
159
|
+
if (val === true) flags.push(key);
|
|
160
|
+
else flags.push(`${key}=${val}`);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
const env = disabled ? null : collectEnv(config);
|
|
166
|
+
|
|
167
|
+
// Await command_started so it always lands before the command runs
|
|
168
|
+
if (!disabled) {
|
|
169
|
+
await send({
|
|
170
|
+
event: 'command_started',
|
|
171
|
+
run_id: runId,
|
|
172
|
+
timestamp: new Date().toISOString(),
|
|
173
|
+
env,
|
|
174
|
+
command: { name: command, args: positional, flags },
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
return {
|
|
179
|
+
runId,
|
|
180
|
+
|
|
181
|
+
/** Show one-time first-run notice (only in interactive/TTY mode) */
|
|
182
|
+
notice() {
|
|
183
|
+
if (disabled || config.noticed) return;
|
|
184
|
+
if (args?.['--silent'] || !process.stderr.isTTY) return;
|
|
185
|
+
console.error('\u2139 Telemetry is on \u2014 anonymous usage stats help improve aw. Opt out: aw telemetry disable');
|
|
186
|
+
config.noticed = true;
|
|
187
|
+
saveConfig(config);
|
|
188
|
+
},
|
|
189
|
+
|
|
190
|
+
/** End the span with outcome */
|
|
191
|
+
async end({ status = 'completed', error_type = null, data = {} } = {}) {
|
|
192
|
+
if (disabled) return;
|
|
193
|
+
const duration_ms = Date.now() - startTime;
|
|
194
|
+
// Re-read registry_head so completed event reflects post-command state
|
|
195
|
+
const endEnv = { ...env, registry_head: getRegistryHead() };
|
|
196
|
+
await send({
|
|
197
|
+
event: status === 'completed' ? 'command_completed'
|
|
198
|
+
: status === 'cancelled' ? 'command_cancelled'
|
|
199
|
+
: 'command_failed',
|
|
200
|
+
run_id: runId,
|
|
201
|
+
timestamp: new Date().toISOString(),
|
|
202
|
+
env: endEnv,
|
|
203
|
+
command: { name: command, args: positional, flags },
|
|
204
|
+
outcome: { status, duration_ms, error_type, data },
|
|
205
|
+
});
|
|
206
|
+
},
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// ── Telemetry management (for `aw telemetry` command) ───────────────
|
|
211
|
+
|
|
212
|
+
export function enableTelemetry() {
|
|
213
|
+
const config = loadConfig();
|
|
214
|
+
config.enabled = true;
|
|
215
|
+
saveConfig(config);
|
|
216
|
+
return config;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
export function disableTelemetry() {
|
|
220
|
+
const config = loadConfig();
|
|
221
|
+
config.enabled = false;
|
|
222
|
+
saveConfig(config);
|
|
223
|
+
return config;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
export function getStatus() {
|
|
227
|
+
const config = loadConfig();
|
|
228
|
+
return {
|
|
229
|
+
enabled: !isDisabled(config),
|
|
230
|
+
machine_id: config.machine_id,
|
|
231
|
+
config_path: CONFIG_PATH,
|
|
232
|
+
};
|
|
233
|
+
}
|