atris 2.6.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/README.md +124 -34
  2. package/atris/CLAUDE.md +5 -1
  3. package/atris/atris.md +4 -0
  4. package/atris/features/README.md +24 -0
  5. package/atris/skills/autopilot/SKILL.md +74 -75
  6. package/atris/skills/endgame/SKILL.md +179 -0
  7. package/atris/skills/flow/SKILL.md +121 -0
  8. package/atris/skills/improve/SKILL.md +84 -0
  9. package/atris/skills/loop/SKILL.md +72 -0
  10. package/atris/skills/wiki/SKILL.md +61 -0
  11. package/atris/team/executor/MEMBER.md +10 -4
  12. package/atris/team/navigator/MEMBER.md +2 -0
  13. package/atris/team/validator/MEMBER.md +8 -5
  14. package/atris.md +33 -0
  15. package/bin/atris.js +210 -41
  16. package/commands/activate.js +28 -2
  17. package/commands/align.js +720 -0
  18. package/commands/auth.js +75 -2
  19. package/commands/autopilot.js +1213 -270
  20. package/commands/browse.js +100 -0
  21. package/commands/business.js +785 -12
  22. package/commands/clean.js +107 -2
  23. package/commands/computer.js +429 -0
  24. package/commands/context-sync.js +78 -8
  25. package/commands/experiments.js +351 -0
  26. package/commands/feedback.js +150 -0
  27. package/commands/fleet.js +395 -0
  28. package/commands/fork.js +127 -0
  29. package/commands/init.js +50 -1
  30. package/commands/learn.js +407 -0
  31. package/commands/lifecycle.js +94 -0
  32. package/commands/loop.js +114 -0
  33. package/commands/publish.js +129 -0
  34. package/commands/pull.js +434 -48
  35. package/commands/push.js +312 -164
  36. package/commands/review.js +149 -0
  37. package/commands/run.js +76 -43
  38. package/commands/serve.js +360 -0
  39. package/commands/setup.js +1 -1
  40. package/commands/soul.js +381 -0
  41. package/commands/status.js +119 -1
  42. package/commands/sync.js +147 -1
  43. package/commands/terminal.js +201 -0
  44. package/commands/wiki.js +376 -0
  45. package/commands/workflow.js +191 -74
  46. package/commands/workspace-clean.js +3 -3
  47. package/lib/endstate.js +259 -0
  48. package/lib/learnings.js +235 -0
  49. package/lib/manifest.js +1 -0
  50. package/lib/todo.js +9 -5
  51. package/lib/wiki.js +578 -0
  52. package/package.json +2 -2
  53. package/utils/api.js +48 -36
  54. package/utils/auth.js +1 -0
package/commands/pull.js CHANGED
@@ -8,9 +8,51 @@ const { getLogPath } = require('../lib/file-ops');
8
8
  const { parseJournalSections, mergeSections, reconstructJournal } = require('../lib/journal');
9
9
  const { loadBusinesses } = require('./business');
10
10
  const { loadManifest, saveManifest, computeFileHash, buildManifest, computeLocalHashes, threeWayCompare } = require('../lib/manifest');
11
+ const { normalizeWikiOnlyPrefix } = require('../lib/wiki');
12
+
13
+ function pruneEmptyParentDirs(filePath, stopDir) {
14
+ let current = path.dirname(filePath);
15
+ const boundary = path.resolve(stopDir);
16
+ while (current.startsWith(boundary) && current !== boundary) {
17
+ try {
18
+ if (fs.readdirSync(current).length > 0) break;
19
+ fs.rmdirSync(current);
20
+ current = path.dirname(current);
21
+ } catch {
22
+ break;
23
+ }
24
+ }
25
+ }
11
26
 
12
27
  async function pullAtris() {
13
- const arg = process.argv[3];
28
+ let arg = process.argv[3];
29
+
30
+ if (arg === '--help') {
31
+ console.log('Usage: atris pull [business] [--into <path>] [--only <prefix>] [--keep-local] [--timeout <seconds>]');
32
+ console.log('');
33
+ console.log(' Pull is force-overwrite by default. Cloud is the source of truth.');
34
+ console.log(' Local files that conflict with cloud are replaced by the cloud version.');
35
+ console.log('');
36
+ console.log(' atris pull Pull into current business workspace');
37
+ console.log(' atris pull doordash Pull a business into ./doordash or --into <path>');
38
+ console.log(' atris pull doordash --into /tmp/doordash');
39
+ console.log(' atris pull doordash --only atris/wiki/');
40
+ console.log(' atris pull --keep-local Preserve conflicting local edits as .remote files (legacy)');
41
+ return;
42
+ }
43
+
44
+ // Auto-detect business from .atris/business.json in current dir
45
+ if (!arg || arg.startsWith('--')) {
46
+ const bizFile = path.join(process.cwd(), '.atris', 'business.json');
47
+ if (fs.existsSync(bizFile)) {
48
+ try {
49
+ const biz = JSON.parse(fs.readFileSync(bizFile, 'utf8'));
50
+ if (biz.slug || biz.name) {
51
+ return pullBusiness(biz.slug || biz.name);
52
+ }
53
+ } catch {}
54
+ }
55
+ }
14
56
 
15
57
  // If a business name is given, do a business pull
16
58
  if (arg && arg !== '--help' && !arg.startsWith('--')) {
@@ -78,44 +120,68 @@ async function pullBusiness(slug) {
78
120
  process.exit(1);
79
121
  }
80
122
 
81
- const force = process.argv.includes('--force');
123
+ // Pull is force-overwrite by default (cloud = source of truth).
124
+ // --keep-local opts back into the legacy three-way merge with .remote conflict files.
125
+ // --force is still accepted as an alias for the default for muscle-memory.
126
+ const force = !process.argv.includes('--keep-local');
82
127
 
83
128
  // Parse --only flag: comma-separated directory prefixes to filter
84
- const onlyArg = process.argv.find(a => a.startsWith('--only='));
85
- const onlyPrefixes = onlyArg
86
- ? onlyArg.slice('--only='.length).split(',').map(p => {
87
- // Normalize: strip leading slash, ensure trailing slash for dirs
129
+ // Supports both --only=team/,context/ and --only team/,context/
130
+ let onlyRaw = null;
131
+ const onlyEqArg = process.argv.find(a => a.startsWith('--only='));
132
+ if (onlyEqArg) {
133
+ onlyRaw = onlyEqArg.slice('--only='.length);
134
+ } else {
135
+ const onlyIdx = process.argv.indexOf('--only');
136
+ if (onlyIdx !== -1 && process.argv[onlyIdx + 1] && !process.argv[onlyIdx + 1].startsWith('-')) {
137
+ onlyRaw = process.argv[onlyIdx + 1];
138
+ }
139
+ }
140
+ const onlyPrefixes = onlyRaw
141
+ ? onlyRaw.split(',').map(p => {
88
142
  let norm = p.replace(/^\//, '');
143
+ const wikiPrefix = normalizeWikiOnlyPrefix(norm);
144
+ if (wikiPrefix) return wikiPrefix;
89
145
  if (norm && !norm.endsWith('/') && !norm.includes('.')) norm += '/';
90
146
  return norm;
91
147
  }).filter(Boolean)
92
148
  : null;
93
149
 
94
- // Parse --timeout flag: override default 120s timeout
95
- const timeoutArg = process.argv.find(a => a.startsWith('--timeout='));
96
- const timeoutMs = timeoutArg
97
- ? parseInt(timeoutArg.slice('--timeout='.length), 10) * 1000
98
- : 120000;
150
+ // Parse --timeout flag: override default 300s timeout
151
+ // Supports both --timeout=60 and --timeout 60
152
+ let timeoutSec = 300;
153
+ const timeoutEqArg = process.argv.find(a => a.startsWith('--timeout='));
154
+ if (timeoutEqArg) {
155
+ timeoutSec = parseInt(timeoutEqArg.slice('--timeout='.length), 10);
156
+ } else {
157
+ const timeoutIdx = process.argv.indexOf('--timeout');
158
+ if (timeoutIdx !== -1 && process.argv[timeoutIdx + 1]) {
159
+ timeoutSec = parseInt(process.argv[timeoutIdx + 1], 10);
160
+ }
161
+ }
162
+ const timeoutMs = timeoutSec * 1000;
99
163
 
100
164
  // Determine output directory
101
165
  const intoIdx = process.argv.indexOf('--into');
102
166
  let outputDir;
103
167
  if (intoIdx !== -1 && process.argv[intoIdx + 1]) {
104
168
  outputDir = path.resolve(process.argv[intoIdx + 1]);
169
+ } else if (fs.existsSync(path.join(process.cwd(), '.atris', 'business.json'))) {
170
+ // Inside a pulled workspace — pull into current dir (no nesting)
171
+ outputDir = process.cwd();
172
+ } else if (fs.existsSync(path.join(process.cwd(), 'atris')) && fs.statSync(path.join(process.cwd(), 'atris')).isDirectory()) {
173
+ // Inside an atris init'd workspace — merge business into current dir
174
+ outputDir = process.cwd();
105
175
  } else {
106
- const atrisDir = path.join(process.cwd(), 'atris');
107
- if (fs.existsSync(atrisDir)) {
108
- outputDir = path.join(atrisDir, slug);
109
- } else {
110
- outputDir = path.join(process.cwd(), slug);
111
- }
176
+ // Default: ./{slug}/ in current directory
177
+ outputDir = path.join(process.cwd(), slug);
112
178
  }
113
179
 
114
180
  // Resolve business ID — always refresh from API to avoid stale workspace_id
115
181
  let businessId, workspaceId, businessName, resolvedSlug;
116
182
  const businesses = loadBusinesses();
117
183
 
118
- const listResult = await apiRequestJson('/businesses/', { method: 'GET', token: creds.token });
184
+ const listResult = await apiRequestJson('/business/', { method: 'GET', token: creds.token });
119
185
  if (!listResult.ok) {
120
186
  // Fall back to local cache if API fails
121
187
  if (businesses[slug]) {
@@ -157,24 +223,143 @@ async function pullBusiness(slug) {
157
223
  process.exit(1);
158
224
  }
159
225
 
226
+ // Auto-wake the EC2 computer if --auto-wake is set.
227
+ // Without this, pull silently serves stale data from agent_files cache when
228
+ // the computer is asleep — the bug that confused us all night.
229
+ const autoWake = process.argv.includes('--auto-wake');
230
+ if (autoWake) {
231
+ const statusResult = await apiRequestJson(`/business/${businessId}/ai-computer/status`, { method: 'GET', token: creds.token });
232
+ const computerStatus = statusResult.ok && statusResult.data ? statusResult.data.status : 'unknown';
233
+ if (computerStatus !== 'running' || !(statusResult.data && statusResult.data.endpoint)) {
234
+ process.stdout.write(' Waking EC2 computer... ');
235
+ await apiRequestJson(`/business/${businessId}/ai-computer/wake`, { method: 'POST', token: creds.token });
236
+ const wakeStart = Date.now();
237
+ while (Date.now() - wakeStart < 90000) {
238
+ await new Promise((r) => setTimeout(r, 3000));
239
+ const s = await apiRequestJson(`/business/${businessId}/ai-computer/status`, { method: 'GET', token: creds.token });
240
+ if (s.ok && s.data && s.data.status === 'running' && s.data.endpoint) {
241
+ const elapsed = Math.floor((Date.now() - wakeStart) / 1000);
242
+ console.log(`awake (${elapsed}s)`);
243
+ break;
244
+ }
245
+ }
246
+ }
247
+ }
248
+
160
249
  // Load manifest (last sync state)
161
250
  const manifest = loadManifest(resolvedSlug || slug);
162
251
  const timeSince = manifest ? _timeSince(manifest.last_sync) : null;
163
252
 
164
253
  console.log('');
165
254
  console.log(`Pulling ${businessName}...` + (timeSince ? ` (last synced ${timeSince})` : ''));
166
- console.log(' Fetching workspace...');
167
255
 
168
- // Get remote snapshot (large workspaces can take 60s+)
169
- const result = await apiRequestJson(
170
- `/businesses/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true`,
171
- { method: 'GET', token: creds.token, timeoutMs }
172
- );
256
+ // Loading indicator with elapsed time
257
+ const startTime = Date.now();
258
+ const spinner = ['|', '/', '-', '\\'];
259
+ let spinIdx = 0;
260
+ const loading = setInterval(() => {
261
+ const elapsed = Math.floor((Date.now() - startTime) / 1000);
262
+ process.stdout.write(`\r Fetching workspace... ${spinner[spinIdx++ % 4]} ${elapsed}s`);
263
+ }, 250);
264
+
265
+ // Smart pull: if we have a manifest (not first sync), fetch hashes first, then only changed content
266
+ const hasManifest = manifest && manifest.files && Object.keys(manifest.files).length > 0 && !force;
267
+ let result;
268
+
269
+ const pathsParam = onlyPrefixes ? `&paths=${encodeURIComponent(onlyPrefixes.map(p => p.replace(/\/$/, '')).join(','))}` : '';
270
+
271
+ if (hasManifest) {
272
+ // Phase 1: fetch hashes only (fast — no file content transferred)
273
+ const hashUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=false${pathsParam}`;
274
+ const hashResult = await apiRequestJson(hashUrl, { method: 'GET', token: creds.token, timeoutMs });
275
+
276
+ if (hashResult.ok && hashResult.data && hashResult.data.files) {
277
+ // Diff against manifest to find changed files
278
+ const remoteHashes = {};
279
+ for (const f of hashResult.data.files) {
280
+ if (f.path && f.hash) remoteHashes[f.path] = f.hash;
281
+ }
282
+ const changedPaths = [];
283
+ const manifestFiles = manifest.files || {};
284
+ for (const [p, hash] of Object.entries(remoteHashes)) {
285
+ const prev = manifestFiles[p];
286
+ if (!prev || prev.hash !== hash) changedPaths.push(p);
287
+ }
288
+
289
+ if (changedPaths.length === 0) {
290
+ clearInterval(loading);
291
+ process.stdout.write(`\r Checked ${Object.keys(remoteHashes).length} files in ${Math.floor((Date.now() - startTime) / 1000)}s.${' '.repeat(10)}\n`);
292
+ // Still need full result for diff logic below — build it from hash-only data
293
+ result = { ok: true, data: { files: hashResult.data.files } };
294
+ } else {
295
+ // Phase 2: fetch ONLY changed files via batch endpoint (not full snapshot)
296
+ clearInterval(loading);
297
+ const checkSec = Math.floor((Date.now() - startTime) / 1000);
298
+ console.log(`\r Checked in ${checkSec}s — ${changedPaths.length} changed, ${Object.keys(remoteHashes).length - changedPaths.length} unchanged.${' '.repeat(10)}`);
299
+
300
+ const startPhase2 = Date.now();
301
+ const loading2 = setInterval(() => {
302
+ const elapsed = Math.floor((Date.now() - startPhase2) / 1000);
303
+ process.stdout.write(`\r Fetching ${changedPaths.length} changed files... ${spinner[spinIdx++ % 4]} ${elapsed}s`);
304
+ }, 250);
305
+
306
+ // Try batch file read first (fast — only changed files)
307
+ const batchUrl = `/business/${businessId}/workspaces/${workspaceId}/files/batch`;
308
+ const batchResult = await apiRequestJson(batchUrl, {
309
+ method: 'POST',
310
+ token: creds.token,
311
+ body: { paths: changedPaths },
312
+ timeoutMs,
313
+ });
314
+
315
+ clearInterval(loading2);
316
+ const phase2Sec = Math.floor((Date.now() - startPhase2) / 1000);
317
+
318
+ if (batchResult.ok && batchResult.data && batchResult.data.files) {
319
+ process.stdout.write(`\r Fetched ${batchResult.data.files.length} files in ${phase2Sec}s.${' '.repeat(10)}\n`);
320
+ // Merge: hash-only results + content for changed files
321
+ const contentMap = {};
322
+ for (const f of batchResult.data.files) {
323
+ if (f.path) contentMap[f.path] = f;
324
+ }
325
+ // Build merged file list: all hash-only entries + inject content for changed ones
326
+ const mergedFiles = hashResult.data.files.map(f => {
327
+ const withContent = contentMap[f.path];
328
+ return withContent || f;
329
+ });
330
+ result = { ok: true, data: { files: mergedFiles } };
331
+ } else {
332
+ // Batch not available — fall back to full snapshot
333
+ process.stdout.write(`\r Batch unavailable, fetching full snapshot...${' '.repeat(10)}\n`);
334
+ const contentUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true${pathsParam}`;
335
+ result = await apiRequestJson(contentUrl, { method: 'GET', token: creds.token, timeoutMs });
336
+ const fullSec = Math.floor((Date.now() - startPhase2) / 1000);
337
+ process.stdout.write(`\r Fetched in ${fullSec}s.${' '.repeat(20)}\n`);
338
+ }
339
+ }
340
+ } else {
341
+ // Hash-only fetch failed — fall back to full snapshot
342
+ const fullUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true${pathsParam}`;
343
+ result = await apiRequestJson(fullUrl, { method: 'GET', token: creds.token, timeoutMs });
344
+ clearInterval(loading);
345
+ process.stdout.write(`\r Fetched in ${Math.floor((Date.now() - startTime) / 1000)}s.${' '.repeat(20)}\n`);
346
+ }
347
+ } else {
348
+ // First sync or --force — full snapshot with content
349
+ const snapshotUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true${pathsParam}`;
350
+ result = await apiRequestJson(snapshotUrl, { method: 'GET', token: creds.token, timeoutMs });
351
+ clearInterval(loading);
352
+ const totalSec = Math.floor((Date.now() - startTime) / 1000);
353
+ process.stdout.write(`\r Fetched in ${totalSec}s.${' '.repeat(20)}\n`);
354
+ }
173
355
 
174
356
  if (!result.ok) {
175
357
  const msg = result.errorMessage || result.error || `HTTP ${result.status}`;
176
358
  if (result.status === 0 || (typeof msg === 'string' && msg.toLowerCase().includes('timeout'))) {
177
- console.error(`\n Workspace is taking too long to respond. Try: atris pull ${slug} --timeout=120`);
359
+ console.error(`\n Workspace timed out (large workspaces can take 60s+). Try: atris pull ${slug} --timeout=600`);
360
+ } else if (result.status === 502) {
361
+ console.error(`\n Computer didn't respond in time. It may be waking up or the workspace is large.`);
362
+ console.error(` Try again in 30s, or use: atris pull ${slug} --only=team/,context/`);
178
363
  } else if (result.status === 409) {
179
364
  console.error(`\n Computer is sleeping. Wake it first, then pull again.`);
180
365
  } else if (result.status === 403) {
@@ -190,11 +375,15 @@ async function pullBusiness(slug) {
190
375
  let files = result.data.files || [];
191
376
  if (files.length === 0) {
192
377
  console.log(' Workspace is empty.');
193
- return;
378
+ // Don't early-return in force mode: we still need to fall through to the
379
+ // mirror sweep so a genuinely-emptied cloud can clear local files. The
380
+ // sweep itself has a safety guard that refuses to wipe local content
381
+ // when remote reports empty (the snapshot-glitch case), so this is safe.
382
+ if (!force) return;
383
+ } else {
384
+ console.log(` Processing ${files.length} files...`);
194
385
  }
195
386
 
196
- console.log(` Processing ${files.length} files...`);
197
-
198
387
  // Apply --only filter if specified
199
388
  if (onlyPrefixes) {
200
389
  files = files.filter(file => {
@@ -204,34 +393,60 @@ async function pullBusiness(slug) {
204
393
  });
205
394
  if (files.length === 0) {
206
395
  console.log(` No files matched --only filter: ${onlyPrefixes.join(', ')}`);
207
- return;
396
+ // Don't early-return: we still need to update the manifest so paths
397
+ // that USED to be in the scoped subtree but were deleted on cloud
398
+ // get evicted from the manifest. Without this, the next push freshness
399
+ // check would forever flag those paths as drift and demand a pull —
400
+ // but the pull would early-return again, creating a deadlock.
401
+ } else {
402
+ console.log(` Filtered to ${files.length} files matching: ${onlyPrefixes.join(', ')}`);
208
403
  }
209
- console.log(` Filtered to ${files.length} files matching: ${onlyPrefixes.join(', ')}`);
210
404
  }
211
405
 
212
- // Build remote file map {path: {hash, size, content}}
406
+ // Build remote file map {path: {hash, size}} and content map {path: content}.
407
+ //
408
+ // CRITICAL: smart-pull (hash-only fetch) returns files with `path`+`hash`+`size`
409
+ // but no `content`. Phase-2 batch fetch only adds content for CHANGED files —
410
+ // unchanged files stay hash-only. We must include hash-only entries in remoteFiles
411
+ // so threeWayCompare doesn't see them as missing-from-remote (deletedRemote).
412
+ // The previous version skipped any file without content, which caused every
413
+ // smart-pull to mark every unchanged file as deleted-on-cloud and rmSync them.
213
414
  const remoteFiles = {};
214
415
  const remoteContent = {};
416
+ const crypto = require('crypto');
215
417
  for (const file of files) {
216
- if (!file.path || file.binary || file.content === null || file.content === undefined) continue;
217
- // Skip empty files (deleted files that were blanked out)
218
- if (file.content === '') continue;
219
- // Compute hash from content bytes (matches computeLocalHashes raw byte hashing)
220
- const crypto = require('crypto');
221
- const rawBytes = Buffer.from(file.content, 'utf-8');
222
- remoteFiles[file.path] = { hash: crypto.createHash('sha256').update(rawBytes).digest('hex'), size: rawBytes.length };
223
- remoteContent[file.path] = file.content;
418
+ if (!file.path || file.binary) continue;
419
+ // An empty string IS valid content (a real, zero-byte file). The earlier
420
+ // version excluded `content === ''` from the hasContent path, which made
421
+ // empty files masquerade as hash-only entries; they'd then be recorded in
422
+ // the manifest (with the empty-string hash) but never written to disk.
423
+ // A subsequent push would compare local (file missing) to manifest (file
424
+ // present) and try to delete the file from cloud silently undoing the
425
+ // very thing the user just pulled.
426
+ const hasContent = file.content !== null && file.content !== undefined && typeof file.content === 'string';
427
+ if (hasContent) {
428
+ // Full content available — hash from raw bytes (matches computeLocalHashes)
429
+ const rawBytes = Buffer.from(file.content, 'utf-8');
430
+ remoteFiles[file.path] = { hash: crypto.createHash('sha256').update(rawBytes).digest('hex'), size: rawBytes.length };
431
+ remoteContent[file.path] = file.content;
432
+ } else if (file.hash) {
433
+ // Hash-only entry from smart pull — trust the cloud-reported hash
434
+ remoteFiles[file.path] = { hash: file.hash, size: file.size || 0 };
435
+ }
224
436
  }
225
437
 
226
438
  // Compute local file hashes
227
439
  const localFiles = fs.existsSync(outputDir) ? computeLocalHashes(outputDir) : {};
228
440
 
441
+ // If output dir is empty (fresh clone) or --force, treat as first sync — pull everything
442
+ const effectiveManifest = (Object.keys(localFiles).length === 0 || force) ? null : manifest;
443
+
229
444
  // Three-way compare
230
- const baseFiles = (manifest && manifest.files) ? manifest.files : {};
231
- const diff = threeWayCompare(localFiles, remoteFiles, manifest);
445
+ const diff = threeWayCompare(localFiles, remoteFiles, effectiveManifest);
232
446
 
233
447
  // Apply changes
234
448
  let pulled = 0;
449
+ let deleted = 0;
235
450
  let conflictCount = 0;
236
451
  let unchangedCount = diff.unchanged.length;
237
452
 
@@ -275,13 +490,84 @@ async function pullBusiness(slug) {
275
490
  }
276
491
  }
277
492
 
278
- // Warn about remote deletions
493
+ // Apply remote deletions
279
494
  for (const p of diff.deletedRemote) {
280
- console.log(` - ${p.replace(/^\//, '')} deleted on computer`);
495
+ const baseHash = effectiveManifest && effectiveManifest.files && effectiveManifest.files[p]
496
+ ? effectiveManifest.files[p].hash
497
+ : null;
498
+ const localHash = localFiles[p] ? localFiles[p].hash : null;
499
+ const localChanged = Boolean(baseHash && localHash && localHash !== baseHash);
500
+
501
+ if (force || !localChanged) {
502
+ const localPath = path.join(outputDir, p.replace(/^\//, ''));
503
+ fs.rmSync(localPath, { force: true });
504
+ pruneEmptyParentDirs(localPath, outputDir);
505
+ console.log(` - ${p.replace(/^\//, '')} deleted on computer`);
506
+ deleted++;
507
+ } else {
508
+ console.log(` \u26A0 ${p.replace(/^\//, '')} deleted on computer, but you changed it locally`);
509
+ conflictCount++;
510
+ }
511
+ }
512
+
513
+ // FORCE MIRROR SWEEP — local must EXACTLY match cloud after a force pull.
514
+ // The threeWayCompare path with effectiveManifest=null only computes
515
+ // newLocal/conflicts/newRemote and never marks files as deletedRemote, so
516
+ // local-only files (created locally, never on cloud) survive a force pull.
517
+ // That breaks the "cloud is the source of truth" promise. Sweep them now.
518
+ //
519
+ // SAFETY GUARDS — without these the sweep can wipe an entire local copy:
520
+ // • Scope the sweep: when --only is set, only sweep paths INSIDE the
521
+ // prefix(es). Out-of-scope local files must be left alone — the user
522
+ // asked for a partial pull, not a workspace-wide reset.
523
+ // • Skip when remoteFiles is empty AND local has in-scope content: the
524
+ // snapshot endpoint has a known server-side bug where it returns 0
525
+ // files for healthy workspaces. If cloud reports empty but local has
526
+ // in-scope content we refuse to sweep — the user can re-run with
527
+ // --keep-local and investigate, or run `atris align --hard` for an
528
+ // explicit nuke.
529
+ // • Skip files the server's snapshot filter hides. The warm runner's
530
+ // _snapshot_dir (ecs_warm_runner.py) deliberately omits CLAUDE.md and
531
+ // other names from snapshots, so they never appear in remoteFiles even
532
+ // when they DO exist on cloud. Sweeping them would delete server-managed
533
+ // files that aren't actually missing on cloud.
534
+ const SERVER_HIDDEN_BASENAMES = new Set(['CLAUDE.md']);
535
+ function basename(p) {
536
+ const idx = p.lastIndexOf('/');
537
+ return idx === -1 ? p : p.slice(idx + 1);
538
+ }
539
+ function isInScope(p) {
540
+ if (!onlyPrefixes) return true;
541
+ const rel = p.replace(/^\//, '');
542
+ return onlyPrefixes.some((pref) => rel.startsWith(pref));
543
+ }
544
+ if (force) {
545
+ const remotePathSet = new Set(Object.keys(remoteFiles));
546
+ const inScopeLocal = Object.keys(localFiles).filter(isInScope);
547
+ if (remotePathSet.size === 0 && inScopeLocal.length > 0) {
548
+ console.log('');
549
+ console.log(' ⚠ Cloud reported zero files but local has in-scope content. Refusing to sweep.');
550
+ console.log(' This usually means the snapshot endpoint glitched. Try again,');
551
+ console.log(' or run `atris align --hard` if you really want to nuke local.');
552
+ } else {
553
+ for (const p of inScopeLocal) {
554
+ if (remotePathSet.has(p)) continue;
555
+ if (SERVER_HIDDEN_BASENAMES.has(basename(p))) continue;
556
+ const localPath = path.join(outputDir, p.replace(/^\//, ''));
557
+ try {
558
+ fs.rmSync(localPath, { force: true });
559
+ pruneEmptyParentDirs(localPath, outputDir);
560
+ console.log(` - ${p.replace(/^\//, '')} not on cloud, removed locally`);
561
+ deleted++;
562
+ } catch {
563
+ // ignore — file might already be gone
564
+ }
565
+ }
566
+ }
281
567
  }
282
568
 
283
569
  // Show unchanged
284
- if (unchangedCount > 0 && pulled === 0 && conflictCount === 0 && diff.deletedRemote.length === 0) {
570
+ if (unchangedCount > 0 && pulled === 0 && deleted === 0 && conflictCount === 0) {
285
571
  console.log(' Already up to date.');
286
572
  }
287
573
 
@@ -289,17 +575,17 @@ async function pullBusiness(slug) {
289
575
  console.log('');
290
576
  const parts = [];
291
577
  if (pulled > 0) parts.push(`${pulled} pulled`);
578
+ if (deleted > 0) parts.push(`${deleted} deleted`);
292
579
  if (diff.newRemote.length > 0 && !parts.some(p => p.includes('pulled'))) parts.push(`${diff.newRemote.length} new`);
293
580
  if (unchangedCount > 0) parts.push(`${unchangedCount} unchanged`);
294
581
  if (conflictCount > 0) parts.push(`${conflictCount} conflict${conflictCount > 1 ? 's' : ''}`);
295
- if (diff.deletedRemote.length > 0) parts.push(`${diff.deletedRemote.length} deleted remotely`);
296
582
  if (parts.length > 0) console.log(` ${parts.join(', ')}.`);
297
583
 
298
584
  // Get current commit hash from remote (for manifest)
299
585
  let commitHash = null;
300
586
  try {
301
587
  const headResult = await apiRequestJson(
302
- `/businesses/${businessId}/workspaces/${workspaceId}/git/head`,
588
+ `/business/${businessId}/workspaces/${workspaceId}/git/head`,
303
589
  { method: 'GET', token: creds.token }
304
590
  );
305
591
  if (headResult.ok && headResult.data && headResult.data.commit) {
@@ -309,13 +595,113 @@ async function pullBusiness(slug) {
309
595
  // Git might not be initialized yet — that's fine
310
596
  }
311
597
 
312
- // Save manifest when using --only, merge into existing manifest to avoid data loss
598
+ // ANTI-WIPE GUARD: if cloud reported zero in-scope files but local still
599
+ // has in-scope content (i.e. the sweep refused), don't overwrite the
600
+ // manifest with empty data for the scoped subtree. The manifest is the
601
+ // authoritative record of what we last knew was on cloud — wiping it
602
+ // because of a transient empty snapshot would force every subsequent
603
+ // push to flag every file as drift. Better to leave the manifest stale
604
+ // than to record a never-actually-true "cloud is empty" state.
605
+ //
606
+ // Applies to both whole-workspace pulls and scoped (--only) pulls.
607
+ {
608
+ const inScopeLocalCount = onlyPrefixes
609
+ ? Object.keys(localFiles).filter((p) => onlyPrefixes.some((pref) => p.replace(/^\//, '').startsWith(pref))).length
610
+ : Object.keys(localFiles).length;
611
+ if (Object.keys(remoteFiles).length === 0 && inScopeLocalCount > 0) {
612
+ return;
613
+ }
614
+ }
615
+
616
+ // Save manifest — when using --only, merge into existing manifest so paths
617
+ // OUTSIDE the scoped prefix don't get dropped. Inside the scoped prefix,
618
+ // however, we must replace (not merge) so that files deleted on cloud
619
+ // since the last sync get evicted from the manifest. Without this, the
620
+ // push freshness check would forever flag those paths as "deleted on
621
+ // cloud" drift, blocking pushes for no reason.
313
622
  let manifestFiles = remoteFiles;
314
623
  if (onlyPrefixes && manifest && manifest.files) {
315
- manifestFiles = { ...manifest.files, ...remoteFiles };
624
+ const merged = {};
625
+ // 1. Keep paths from old manifest that are OUTSIDE the scoped prefix.
626
+ for (const [p, info] of Object.entries(manifest.files)) {
627
+ const inScope = onlyPrefixes.some((pref) => p.replace(/^\//, '').startsWith(pref));
628
+ if (!inScope) merged[p] = info;
629
+ }
630
+ // 2. Overwrite the in-scope subtree with what we just pulled (cloud truth).
631
+ for (const [p, info] of Object.entries(remoteFiles)) {
632
+ merged[p] = info;
633
+ }
634
+ manifestFiles = merged;
316
635
  }
317
636
  const newManifest = buildManifest(manifestFiles, commitHash);
318
637
  saveManifest(resolvedSlug || slug, newManifest);
638
+
639
+ // Save business config in the output dir so push/status work without args
640
+ const atrisDir = path.join(outputDir, '.atris');
641
+ fs.mkdirSync(atrisDir, { recursive: true });
642
+ fs.writeFileSync(path.join(atrisDir, 'business.json'), JSON.stringify({
643
+ slug: resolvedSlug || slug,
644
+ business_id: businessId,
645
+ workspace_id: workspaceId,
646
+ name: businessName,
647
+ }, null, 2));
648
+
649
+ // Wire skills → .claude/skills/ so they work as slash commands
650
+ const skillsDir = path.join(outputDir, 'skills');
651
+ const claudeSkillsDir = path.join(outputDir, '.claude', 'skills');
652
+
653
+ if (fs.existsSync(skillsDir)) {
654
+ fs.mkdirSync(claudeSkillsDir, { recursive: true });
655
+
656
+ // Recursively find all skill folders (any dir containing SKILL.md, at any depth)
657
+ const wireSkills = (dir, relPrefix) => {
658
+ const entries = fs.readdirSync(dir);
659
+ for (const entry of entries) {
660
+ const fullPath = path.join(dir, entry);
661
+ if (!fs.statSync(fullPath).isDirectory()) continue;
662
+ if (entry === 'README.md' || entry.startsWith('.')) continue;
663
+
664
+ const skillFile = path.join(fullPath, 'SKILL.md');
665
+ if (fs.existsSync(skillFile)) {
666
+ // This is a leaf skill — wire it
667
+ const skillName = relPrefix ? `${relPrefix}-${entry}` : entry;
668
+ const symlinkPath = path.join(claudeSkillsDir, skillName);
669
+ const relativePath = path.relative(path.dirname(symlinkPath), fullPath);
670
+
671
+ // Business skills override init skills (remove existing symlink if present)
672
+ if (fs.existsSync(symlinkPath)) {
673
+ try {
674
+ const stat = fs.lstatSync(symlinkPath);
675
+ if (stat.isSymbolicLink()) fs.unlinkSync(symlinkPath);
676
+ else continue; // Don't overwrite real directories
677
+ } catch { continue; }
678
+ }
679
+ try {
680
+ fs.symlinkSync(relativePath, symlinkPath);
681
+ } catch (e) {
682
+ // Fallback: copy
683
+ fs.mkdirSync(symlinkPath, { recursive: true });
684
+ fs.copyFileSync(skillFile, path.join(symlinkPath, 'SKILL.md'));
685
+ }
686
+ }
687
+
688
+ // Recurse into subdirectories (e.g. skills/executive/pipeline-health/)
689
+ wireSkills(fullPath, relPrefix ? `${relPrefix}-${entry}` : entry);
690
+ }
691
+ };
692
+
693
+ wireSkills(skillsDir, '');
694
+
695
+ // Count wired skills
696
+ const wiredSkills = fs.readdirSync(claudeSkillsDir).filter(f => {
697
+ const p = path.join(claudeSkillsDir, f);
698
+ return fs.statSync(p).isDirectory();
699
+ });
700
+ if (wiredSkills.length > 0) {
701
+ console.log(` Wired ${wiredSkills.length} skills → .claude/skills/`);
702
+ }
703
+ }
704
+
319
705
  }
320
706
 
321
707