atris 2.6.3 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/README.md +124 -34
  2. package/atris/CLAUDE.md +5 -1
  3. package/atris/atris.md +4 -0
  4. package/atris/features/README.md +24 -0
  5. package/atris/skills/autopilot/SKILL.md +74 -75
  6. package/atris/skills/endgame/SKILL.md +179 -0
  7. package/atris/skills/flow/SKILL.md +121 -0
  8. package/atris/skills/improve/SKILL.md +84 -0
  9. package/atris/skills/loop/SKILL.md +72 -0
  10. package/atris/skills/wiki/SKILL.md +61 -0
  11. package/atris/team/executor/MEMBER.md +10 -4
  12. package/atris/team/navigator/MEMBER.md +2 -0
  13. package/atris/team/validator/MEMBER.md +8 -5
  14. package/atris.md +33 -0
  15. package/bin/atris.js +210 -41
  16. package/commands/activate.js +28 -2
  17. package/commands/align.js +720 -0
  18. package/commands/auth.js +75 -2
  19. package/commands/autopilot.js +1213 -270
  20. package/commands/browse.js +100 -0
  21. package/commands/business.js +785 -12
  22. package/commands/clean.js +107 -2
  23. package/commands/computer.js +429 -0
  24. package/commands/context-sync.js +78 -8
  25. package/commands/experiments.js +351 -0
  26. package/commands/feedback.js +150 -0
  27. package/commands/fleet.js +395 -0
  28. package/commands/fork.js +127 -0
  29. package/commands/init.js +50 -1
  30. package/commands/learn.js +407 -0
  31. package/commands/lifecycle.js +94 -0
  32. package/commands/loop.js +114 -0
  33. package/commands/publish.js +129 -0
  34. package/commands/pull.js +369 -38
  35. package/commands/push.js +283 -246
  36. package/commands/review.js +149 -0
  37. package/commands/run.js +76 -43
  38. package/commands/serve.js +360 -0
  39. package/commands/setup.js +1 -1
  40. package/commands/soul.js +381 -0
  41. package/commands/status.js +119 -1
  42. package/commands/sync.js +147 -1
  43. package/commands/terminal.js +201 -0
  44. package/commands/wiki.js +376 -0
  45. package/commands/workflow.js +191 -74
  46. package/commands/workspace-clean.js +3 -3
  47. package/lib/endstate.js +259 -0
  48. package/lib/learnings.js +235 -0
  49. package/lib/manifest.js +1 -0
  50. package/lib/todo.js +9 -5
  51. package/lib/wiki.js +578 -0
  52. package/package.json +2 -2
  53. package/utils/api.js +40 -35
  54. package/utils/auth.js +1 -0
@@ -0,0 +1,129 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const os = require('os');
4
+ const { loadCredentials } = require('../utils/auth');
5
+ const { apiRequestJson } = require('../utils/api');
6
+
7
+ async function publishAtris() {
8
+ if (process.argv[3] === '--help') {
9
+ console.log('Usage: atris publish [--name <name>] [--description <desc>]');
10
+ console.log('');
11
+ console.log(' atris publish Publish current workspace as a template');
12
+ console.log(' atris publish --name "CRM Starter" Publish with a specific name');
13
+ console.log(' atris publish --name crm --description "Sales CRM template"');
14
+ process.exit(0);
15
+ }
16
+
17
+ const creds = loadCredentials();
18
+ if (!creds || !creds.token) {
19
+ console.error('Not logged in. Run: atris login');
20
+ process.exit(1);
21
+ }
22
+
23
+ // Parse --name
24
+ let name = null;
25
+ const nameEq = process.argv.find(a => a.startsWith('--name='));
26
+ if (nameEq) { name = nameEq.slice(7); }
27
+ else {
28
+ const ni = process.argv.indexOf('--name');
29
+ if (ni !== -1 && process.argv[ni + 1] && !process.argv[ni + 1].startsWith('-')) name = process.argv[ni + 1];
30
+ }
31
+
32
+ // Parse --description
33
+ let description = null;
34
+ const descEq = process.argv.find(a => a.startsWith('--description='));
35
+ if (descEq) { description = descEq.slice(14); }
36
+ else {
37
+ const di = process.argv.indexOf('--description');
38
+ if (di !== -1 && process.argv[di + 1] && !process.argv[di + 1].startsWith('-')) description = process.argv[di + 1];
39
+ }
40
+
41
+ // Resolve name from .atris/business.json or directory name
42
+ if (!name) {
43
+ const bizFile = path.join(process.cwd(), '.atris', 'business.json');
44
+ if (fs.existsSync(bizFile)) {
45
+ try {
46
+ const biz = JSON.parse(fs.readFileSync(bizFile, 'utf8'));
47
+ name = biz.name || biz.slug;
48
+ } catch {}
49
+ }
50
+ if (!name) name = path.basename(process.cwd());
51
+ }
52
+
53
+ // Collect files from atris/ directory
54
+ const atrisDir = path.join(process.cwd(), 'atris');
55
+ if (!fs.existsSync(atrisDir)) {
56
+ console.error('No atris/ directory found. Run from a workspace root.');
57
+ process.exit(1);
58
+ }
59
+
60
+ const EXCLUDE = ['logs', '.env', 'secrets'];
61
+ const files = [];
62
+
63
+ function collectFiles(dir, prefix) {
64
+ const entries = fs.readdirSync(dir);
65
+ for (const entry of entries) {
66
+ if (EXCLUDE.includes(entry)) continue;
67
+ if (entry.startsWith('.env') || entry === 'secrets') continue;
68
+ const full = path.join(dir, entry);
69
+ const rel = prefix ? `${prefix}/${entry}` : entry;
70
+ const stat = fs.statSync(full);
71
+ if (stat.isDirectory()) {
72
+ collectFiles(full, rel);
73
+ } else if (stat.isFile() && stat.size < 512 * 1024) {
74
+ try {
75
+ const content = fs.readFileSync(full, 'utf8');
76
+ files.push({ path: `/${rel}`, content });
77
+ } catch {}
78
+ }
79
+ }
80
+ }
81
+
82
+ collectFiles(atrisDir, '');
83
+
84
+ if (files.length === 0) {
85
+ console.error('No publishable files found in atris/.');
86
+ process.exit(1);
87
+ }
88
+
89
+ console.log('');
90
+ console.log(`Publishing "${name}" (${files.length} files)...`);
91
+
92
+ // POST to API
93
+ const body = { name, description: description || '', files };
94
+ const result = await apiRequestJson('/workspace/templates', {
95
+ method: 'POST',
96
+ token: creds.token,
97
+ body,
98
+ });
99
+
100
+ if (!result.ok) {
101
+ console.error(`\n Publish failed: ${result.errorMessage || result.error || result.status}`);
102
+ process.exit(1);
103
+ }
104
+
105
+ // Save local copy
106
+ const localDir = path.join(os.homedir(), '.atris', 'templates', name.toLowerCase().replace(/[^a-z0-9-]/g, '-'));
107
+ fs.mkdirSync(localDir, { recursive: true });
108
+
109
+ for (const f of files) {
110
+ const filePath = path.join(localDir, f.path.replace(/^\//, ''));
111
+ fs.mkdirSync(path.dirname(filePath), { recursive: true });
112
+ fs.writeFileSync(filePath, f.content);
113
+ }
114
+
115
+ // Save metadata
116
+ fs.writeFileSync(path.join(localDir, 'template.json'), JSON.stringify({
117
+ name,
118
+ description: description || '',
119
+ file_count: files.length,
120
+ published_at: new Date().toISOString(),
121
+ }, null, 2));
122
+
123
+ console.log('');
124
+ console.log(` Published as '${name}'. Others can fork with: atris fork ${name}`);
125
+ console.log(` Local copy saved to ${localDir}`);
126
+ console.log('');
127
+ }
128
+
129
+ module.exports = { publishAtris };
package/commands/pull.js CHANGED
@@ -8,10 +8,39 @@ const { getLogPath } = require('../lib/file-ops');
8
8
  const { parseJournalSections, mergeSections, reconstructJournal } = require('../lib/journal');
9
9
  const { loadBusinesses } = require('./business');
10
10
  const { loadManifest, saveManifest, computeFileHash, buildManifest, computeLocalHashes, threeWayCompare } = require('../lib/manifest');
11
+ const { normalizeWikiOnlyPrefix } = require('../lib/wiki');
12
+
13
+ function pruneEmptyParentDirs(filePath, stopDir) {
14
+ let current = path.dirname(filePath);
15
+ const boundary = path.resolve(stopDir);
16
+ while (current.startsWith(boundary) && current !== boundary) {
17
+ try {
18
+ if (fs.readdirSync(current).length > 0) break;
19
+ fs.rmdirSync(current);
20
+ current = path.dirname(current);
21
+ } catch {
22
+ break;
23
+ }
24
+ }
25
+ }
11
26
 
12
27
  async function pullAtris() {
13
28
  let arg = process.argv[3];
14
29
 
30
+ if (arg === '--help') {
31
+ console.log('Usage: atris pull [business] [--into <path>] [--only <prefix>] [--keep-local] [--timeout <seconds>]');
32
+ console.log('');
33
+ console.log(' Pull is force-overwrite by default. Cloud is the source of truth.');
34
+ console.log(' Local files that conflict with cloud are replaced by the cloud version.');
35
+ console.log('');
36
+ console.log(' atris pull Pull into current business workspace');
37
+ console.log(' atris pull doordash Pull a business into ./doordash or --into <path>');
38
+ console.log(' atris pull doordash --into /tmp/doordash');
39
+ console.log(' atris pull doordash --only atris/wiki/');
40
+ console.log(' atris pull --keep-local Preserve conflicting local edits as .remote files (legacy)');
41
+ return;
42
+ }
43
+
15
44
  // Auto-detect business from .atris/business.json in current dir
16
45
  if (!arg || arg.startsWith('--')) {
17
46
  const bizFile = path.join(process.cwd(), '.atris', 'business.json');
@@ -91,7 +120,10 @@ async function pullBusiness(slug) {
91
120
  process.exit(1);
92
121
  }
93
122
 
94
- const force = process.argv.includes('--force');
123
+ // Pull is force-overwrite by default (cloud = source of truth).
124
+ // --keep-local opts back into the legacy three-way merge with .remote conflict files.
125
+ // --force is still accepted as an alias for the default for muscle-memory.
126
+ const force = !process.argv.includes('--keep-local');
95
127
 
96
128
  // Parse --only flag: comma-separated directory prefixes to filter
97
129
  // Supports both --only=team/,context/ and --only team/,context/
@@ -108,6 +140,8 @@ async function pullBusiness(slug) {
108
140
  const onlyPrefixes = onlyRaw
109
141
  ? onlyRaw.split(',').map(p => {
110
142
  let norm = p.replace(/^\//, '');
143
+ const wikiPrefix = normalizeWikiOnlyPrefix(norm);
144
+ if (wikiPrefix) return wikiPrefix;
111
145
  if (norm && !norm.endsWith('/') && !norm.includes('.')) norm += '/';
112
146
  return norm;
113
147
  }).filter(Boolean)
@@ -132,20 +166,22 @@ async function pullBusiness(slug) {
132
166
  let outputDir;
133
167
  if (intoIdx !== -1 && process.argv[intoIdx + 1]) {
134
168
  outputDir = path.resolve(process.argv[intoIdx + 1]);
169
+ } else if (fs.existsSync(path.join(process.cwd(), '.atris', 'business.json'))) {
170
+ // Inside a pulled workspace — pull into current dir (no nesting)
171
+ outputDir = process.cwd();
172
+ } else if (fs.existsSync(path.join(process.cwd(), 'atris')) && fs.statSync(path.join(process.cwd(), 'atris')).isDirectory()) {
173
+ // Inside an atris init'd workspace — merge business into current dir
174
+ outputDir = process.cwd();
135
175
  } else {
136
- const atrisDir = path.join(process.cwd(), 'atris');
137
- if (fs.existsSync(atrisDir)) {
138
- outputDir = path.join(atrisDir, slug);
139
- } else {
140
- outputDir = path.join(process.cwd(), slug);
141
- }
176
+ // Default: ./{slug}/ in current directory
177
+ outputDir = path.join(process.cwd(), slug);
142
178
  }
143
179
 
144
180
  // Resolve business ID — always refresh from API to avoid stale workspace_id
145
181
  let businessId, workspaceId, businessName, resolvedSlug;
146
182
  const businesses = loadBusinesses();
147
183
 
148
- const listResult = await apiRequestJson('/businesses/', { method: 'GET', token: creds.token });
184
+ const listResult = await apiRequestJson('/business/', { method: 'GET', token: creds.token });
149
185
  if (!listResult.ok) {
150
186
  // Fall back to local cache if API fails
151
187
  if (businesses[slug]) {
@@ -187,6 +223,29 @@ async function pullBusiness(slug) {
187
223
  process.exit(1);
188
224
  }
189
225
 
226
+ // Auto-wake the EC2 computer if --auto-wake is set.
227
+ // Without this, pull silently serves stale data from agent_files cache when
228
+ // the computer is asleep — the bug that confused us all night.
229
+ const autoWake = process.argv.includes('--auto-wake');
230
+ if (autoWake) {
231
+ const statusResult = await apiRequestJson(`/business/${businessId}/ai-computer/status`, { method: 'GET', token: creds.token });
232
+ const computerStatus = statusResult.ok && statusResult.data ? statusResult.data.status : 'unknown';
233
+ if (computerStatus !== 'running' || !(statusResult.data && statusResult.data.endpoint)) {
234
+ process.stdout.write(' Waking EC2 computer... ');
235
+ await apiRequestJson(`/business/${businessId}/ai-computer/wake`, { method: 'POST', token: creds.token });
236
+ const wakeStart = Date.now();
237
+ while (Date.now() - wakeStart < 90000) {
238
+ await new Promise((r) => setTimeout(r, 3000));
239
+ const s = await apiRequestJson(`/business/${businessId}/ai-computer/status`, { method: 'GET', token: creds.token });
240
+ if (s.ok && s.data && s.data.status === 'running' && s.data.endpoint) {
241
+ const elapsed = Math.floor((Date.now() - wakeStart) / 1000);
242
+ console.log(`awake (${elapsed}s)`);
243
+ break;
244
+ }
245
+ }
246
+ }
247
+ }
248
+
190
249
  // Load manifest (last sync state)
191
250
  const manifest = loadManifest(resolvedSlug || slug);
192
251
  const timeSince = manifest ? _timeSince(manifest.last_sync) : null;
@@ -203,21 +262,104 @@ async function pullBusiness(slug) {
203
262
  process.stdout.write(`\r Fetching workspace... ${spinner[spinIdx++ % 4]} ${elapsed}s`);
204
263
  }, 250);
205
264
 
206
- // Get remote snapshot pass --only prefixes to server for faster response
207
- let snapshotUrl = `/businesses/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true`;
208
- if (onlyPrefixes) {
209
- snapshotUrl += `&paths=${encodeURIComponent(onlyPrefixes.map(p => p.replace(/\/$/, '')).join(','))}`;
210
- }
211
- const result = await apiRequestJson(snapshotUrl, { method: 'GET', token: creds.token, timeoutMs });
265
+ // Smart pull: if we have a manifest (not first sync), fetch hashes first, then only changed content
266
+ const hasManifest = manifest && manifest.files && Object.keys(manifest.files).length > 0 && !force;
267
+ let result;
268
+
269
+ const pathsParam = onlyPrefixes ? `&paths=${encodeURIComponent(onlyPrefixes.map(p => p.replace(/\/$/, '')).join(','))}` : '';
212
270
 
213
- clearInterval(loading);
214
- const totalSec = Math.floor((Date.now() - startTime) / 1000);
215
- process.stdout.write(`\r Fetched in ${totalSec}s.${' '.repeat(20)}\n`);
271
+ if (hasManifest) {
272
+ // Phase 1: fetch hashes only (fast no file content transferred)
273
+ const hashUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=false${pathsParam}`;
274
+ const hashResult = await apiRequestJson(hashUrl, { method: 'GET', token: creds.token, timeoutMs });
275
+
276
+ if (hashResult.ok && hashResult.data && hashResult.data.files) {
277
+ // Diff against manifest to find changed files
278
+ const remoteHashes = {};
279
+ for (const f of hashResult.data.files) {
280
+ if (f.path && f.hash) remoteHashes[f.path] = f.hash;
281
+ }
282
+ const changedPaths = [];
283
+ const manifestFiles = manifest.files || {};
284
+ for (const [p, hash] of Object.entries(remoteHashes)) {
285
+ const prev = manifestFiles[p];
286
+ if (!prev || prev.hash !== hash) changedPaths.push(p);
287
+ }
288
+
289
+ if (changedPaths.length === 0) {
290
+ clearInterval(loading);
291
+ process.stdout.write(`\r Checked ${Object.keys(remoteHashes).length} files in ${Math.floor((Date.now() - startTime) / 1000)}s.${' '.repeat(10)}\n`);
292
+ // Still need full result for diff logic below — build it from hash-only data
293
+ result = { ok: true, data: { files: hashResult.data.files } };
294
+ } else {
295
+ // Phase 2: fetch ONLY changed files via batch endpoint (not full snapshot)
296
+ clearInterval(loading);
297
+ const checkSec = Math.floor((Date.now() - startTime) / 1000);
298
+ console.log(`\r Checked in ${checkSec}s — ${changedPaths.length} changed, ${Object.keys(remoteHashes).length - changedPaths.length} unchanged.${' '.repeat(10)}`);
299
+
300
+ const startPhase2 = Date.now();
301
+ const loading2 = setInterval(() => {
302
+ const elapsed = Math.floor((Date.now() - startPhase2) / 1000);
303
+ process.stdout.write(`\r Fetching ${changedPaths.length} changed files... ${spinner[spinIdx++ % 4]} ${elapsed}s`);
304
+ }, 250);
305
+
306
+ // Try batch file read first (fast — only changed files)
307
+ const batchUrl = `/business/${businessId}/workspaces/${workspaceId}/files/batch`;
308
+ const batchResult = await apiRequestJson(batchUrl, {
309
+ method: 'POST',
310
+ token: creds.token,
311
+ body: { paths: changedPaths },
312
+ timeoutMs,
313
+ });
314
+
315
+ clearInterval(loading2);
316
+ const phase2Sec = Math.floor((Date.now() - startPhase2) / 1000);
317
+
318
+ if (batchResult.ok && batchResult.data && batchResult.data.files) {
319
+ process.stdout.write(`\r Fetched ${batchResult.data.files.length} files in ${phase2Sec}s.${' '.repeat(10)}\n`);
320
+ // Merge: hash-only results + content for changed files
321
+ const contentMap = {};
322
+ for (const f of batchResult.data.files) {
323
+ if (f.path) contentMap[f.path] = f;
324
+ }
325
+ // Build merged file list: all hash-only entries + inject content for changed ones
326
+ const mergedFiles = hashResult.data.files.map(f => {
327
+ const withContent = contentMap[f.path];
328
+ return withContent || f;
329
+ });
330
+ result = { ok: true, data: { files: mergedFiles } };
331
+ } else {
332
+ // Batch not available — fall back to full snapshot
333
+ process.stdout.write(`\r Batch unavailable, fetching full snapshot...${' '.repeat(10)}\n`);
334
+ const contentUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true${pathsParam}`;
335
+ result = await apiRequestJson(contentUrl, { method: 'GET', token: creds.token, timeoutMs });
336
+ const fullSec = Math.floor((Date.now() - startPhase2) / 1000);
337
+ process.stdout.write(`\r Fetched in ${fullSec}s.${' '.repeat(20)}\n`);
338
+ }
339
+ }
340
+ } else {
341
+ // Hash-only fetch failed — fall back to full snapshot
342
+ const fullUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true${pathsParam}`;
343
+ result = await apiRequestJson(fullUrl, { method: 'GET', token: creds.token, timeoutMs });
344
+ clearInterval(loading);
345
+ process.stdout.write(`\r Fetched in ${Math.floor((Date.now() - startTime) / 1000)}s.${' '.repeat(20)}\n`);
346
+ }
347
+ } else {
348
+ // First sync or --force — full snapshot with content
349
+ const snapshotUrl = `/business/${businessId}/workspaces/${workspaceId}/snapshot?include_content=true${pathsParam}`;
350
+ result = await apiRequestJson(snapshotUrl, { method: 'GET', token: creds.token, timeoutMs });
351
+ clearInterval(loading);
352
+ const totalSec = Math.floor((Date.now() - startTime) / 1000);
353
+ process.stdout.write(`\r Fetched in ${totalSec}s.${' '.repeat(20)}\n`);
354
+ }
216
355
 
217
356
  if (!result.ok) {
218
357
  const msg = result.errorMessage || result.error || `HTTP ${result.status}`;
219
358
  if (result.status === 0 || (typeof msg === 'string' && msg.toLowerCase().includes('timeout'))) {
220
359
  console.error(`\n Workspace timed out (large workspaces can take 60s+). Try: atris pull ${slug} --timeout=600`);
360
+ } else if (result.status === 502) {
361
+ console.error(`\n Computer didn't respond in time. It may be waking up or the workspace is large.`);
362
+ console.error(` Try again in 30s, or use: atris pull ${slug} --only=team/,context/`);
221
363
  } else if (result.status === 409) {
222
364
  console.error(`\n Computer is sleeping. Wake it first, then pull again.`);
223
365
  } else if (result.status === 403) {
@@ -233,11 +375,15 @@ async function pullBusiness(slug) {
233
375
  let files = result.data.files || [];
234
376
  if (files.length === 0) {
235
377
  console.log(' Workspace is empty.');
236
- return;
378
+ // Don't early-return in force mode: we still need to fall through to the
379
+ // mirror sweep so a genuinely-emptied cloud can clear local files. The
380
+ // sweep itself has a safety guard that refuses to wipe local content
381
+ // when remote reports empty (the snapshot-glitch case), so this is safe.
382
+ if (!force) return;
383
+ } else {
384
+ console.log(` Processing ${files.length} files...`);
237
385
  }
238
386
 
239
- console.log(` Processing ${files.length} files...`);
240
-
241
387
  // Apply --only filter if specified
242
388
  if (onlyPrefixes) {
243
389
  files = files.filter(file => {
@@ -247,23 +393,46 @@ async function pullBusiness(slug) {
247
393
  });
248
394
  if (files.length === 0) {
249
395
  console.log(` No files matched --only filter: ${onlyPrefixes.join(', ')}`);
250
- return;
396
+ // Don't early-return: we still need to update the manifest so paths
397
+ // that USED to be in the scoped subtree but were deleted on cloud
398
+ // get evicted from the manifest. Without this, the next push freshness
399
+ // check would forever flag those paths as drift and demand a pull —
400
+ // but the pull would early-return again, creating a deadlock.
401
+ } else {
402
+ console.log(` Filtered to ${files.length} files matching: ${onlyPrefixes.join(', ')}`);
251
403
  }
252
- console.log(` Filtered to ${files.length} files matching: ${onlyPrefixes.join(', ')}`);
253
404
  }
254
405
 
255
- // Build remote file map {path: {hash, size, content}}
406
+ // Build remote file map {path: {hash, size}} and content map {path: content}.
407
+ //
408
+ // CRITICAL: smart-pull (hash-only fetch) returns files with `path`+`hash`+`size`
409
+ // but no `content`. Phase-2 batch fetch only adds content for CHANGED files —
410
+ // unchanged files stay hash-only. We must include hash-only entries in remoteFiles
411
+ // so threeWayCompare doesn't see them as missing-from-remote (deletedRemote).
412
+ // The previous version skipped any file without content, which caused every
413
+ // smart-pull to mark every unchanged file as deleted-on-cloud and rmSync them.
256
414
  const remoteFiles = {};
257
415
  const remoteContent = {};
416
+ const crypto = require('crypto');
258
417
  for (const file of files) {
259
- if (!file.path || file.binary || file.content === null || file.content === undefined) continue;
260
- // Skip empty files (deleted files that were blanked out)
261
- if (file.content === '') continue;
262
- // Compute hash from content bytes (matches computeLocalHashes raw byte hashing)
263
- const crypto = require('crypto');
264
- const rawBytes = Buffer.from(file.content, 'utf-8');
265
- remoteFiles[file.path] = { hash: crypto.createHash('sha256').update(rawBytes).digest('hex'), size: rawBytes.length };
266
- remoteContent[file.path] = file.content;
418
+ if (!file.path || file.binary) continue;
419
+ // An empty string IS valid content (a real, zero-byte file). The earlier
420
+ // version excluded `content === ''` from the hasContent path, which made
421
+ // empty files masquerade as hash-only entries; they'd then be recorded in
422
+ // the manifest (with the empty-string hash) but never written to disk.
423
+ // A subsequent push would compare local (file missing) to manifest (file
424
+ // present) and try to delete the file from cloud silently undoing the
425
+ // very thing the user just pulled.
426
+ const hasContent = file.content !== null && file.content !== undefined && typeof file.content === 'string';
427
+ if (hasContent) {
428
+ // Full content available — hash from raw bytes (matches computeLocalHashes)
429
+ const rawBytes = Buffer.from(file.content, 'utf-8');
430
+ remoteFiles[file.path] = { hash: crypto.createHash('sha256').update(rawBytes).digest('hex'), size: rawBytes.length };
431
+ remoteContent[file.path] = file.content;
432
+ } else if (file.hash) {
433
+ // Hash-only entry from smart pull — trust the cloud-reported hash
434
+ remoteFiles[file.path] = { hash: file.hash, size: file.size || 0 };
435
+ }
267
436
  }
268
437
 
269
438
  // Compute local file hashes
@@ -277,6 +446,7 @@ async function pullBusiness(slug) {
277
446
 
278
447
  // Apply changes
279
448
  let pulled = 0;
449
+ let deleted = 0;
280
450
  let conflictCount = 0;
281
451
  let unchangedCount = diff.unchanged.length;
282
452
 
@@ -320,13 +490,84 @@ async function pullBusiness(slug) {
320
490
  }
321
491
  }
322
492
 
323
- // Warn about remote deletions
493
+ // Apply remote deletions
324
494
  for (const p of diff.deletedRemote) {
325
- console.log(` - ${p.replace(/^\//, '')} deleted on computer`);
495
+ const baseHash = effectiveManifest && effectiveManifest.files && effectiveManifest.files[p]
496
+ ? effectiveManifest.files[p].hash
497
+ : null;
498
+ const localHash = localFiles[p] ? localFiles[p].hash : null;
499
+ const localChanged = Boolean(baseHash && localHash && localHash !== baseHash);
500
+
501
+ if (force || !localChanged) {
502
+ const localPath = path.join(outputDir, p.replace(/^\//, ''));
503
+ fs.rmSync(localPath, { force: true });
504
+ pruneEmptyParentDirs(localPath, outputDir);
505
+ console.log(` - ${p.replace(/^\//, '')} deleted on computer`);
506
+ deleted++;
507
+ } else {
508
+ console.log(` \u26A0 ${p.replace(/^\//, '')} deleted on computer, but you changed it locally`);
509
+ conflictCount++;
510
+ }
511
+ }
512
+
513
+ // FORCE MIRROR SWEEP — local must EXACTLY match cloud after a force pull.
514
+ // The threeWayCompare path with effectiveManifest=null only computes
515
+ // newLocal/conflicts/newRemote and never marks files as deletedRemote, so
516
+ // local-only files (created locally, never on cloud) survive a force pull.
517
+ // That breaks the "cloud is the source of truth" promise. Sweep them now.
518
+ //
519
+ // SAFETY GUARDS — without these the sweep can wipe an entire local copy:
520
+ // • Scope the sweep: when --only is set, only sweep paths INSIDE the
521
+ // prefix(es). Out-of-scope local files must be left alone — the user
522
+ // asked for a partial pull, not a workspace-wide reset.
523
+ // • Skip when remoteFiles is empty AND local has in-scope content: the
524
+ // snapshot endpoint has a known server-side bug where it returns 0
525
+ // files for healthy workspaces. If cloud reports empty but local has
526
+ // in-scope content we refuse to sweep — the user can re-run with
527
+ // --keep-local and investigate, or run `atris align --hard` for an
528
+ // explicit nuke.
529
+ // • Skip files the server's snapshot filter hides. The warm runner's
530
+ // _snapshot_dir (ecs_warm_runner.py) deliberately omits CLAUDE.md and
531
+ // other names from snapshots, so they never appear in remoteFiles even
532
+ // when they DO exist on cloud. Sweeping them would delete server-managed
533
+ // files that aren't actually missing on cloud.
534
+ const SERVER_HIDDEN_BASENAMES = new Set(['CLAUDE.md']);
535
+ function basename(p) {
536
+ const idx = p.lastIndexOf('/');
537
+ return idx === -1 ? p : p.slice(idx + 1);
538
+ }
539
+ function isInScope(p) {
540
+ if (!onlyPrefixes) return true;
541
+ const rel = p.replace(/^\//, '');
542
+ return onlyPrefixes.some((pref) => rel.startsWith(pref));
543
+ }
544
+ if (force) {
545
+ const remotePathSet = new Set(Object.keys(remoteFiles));
546
+ const inScopeLocal = Object.keys(localFiles).filter(isInScope);
547
+ if (remotePathSet.size === 0 && inScopeLocal.length > 0) {
548
+ console.log('');
549
+ console.log(' ⚠ Cloud reported zero files but local has in-scope content. Refusing to sweep.');
550
+ console.log(' This usually means the snapshot endpoint glitched. Try again,');
551
+ console.log(' or run `atris align --hard` if you really want to nuke local.');
552
+ } else {
553
+ for (const p of inScopeLocal) {
554
+ if (remotePathSet.has(p)) continue;
555
+ if (SERVER_HIDDEN_BASENAMES.has(basename(p))) continue;
556
+ const localPath = path.join(outputDir, p.replace(/^\//, ''));
557
+ try {
558
+ fs.rmSync(localPath, { force: true });
559
+ pruneEmptyParentDirs(localPath, outputDir);
560
+ console.log(` - ${p.replace(/^\//, '')} not on cloud, removed locally`);
561
+ deleted++;
562
+ } catch {
563
+ // ignore — file might already be gone
564
+ }
565
+ }
566
+ }
326
567
  }
327
568
 
328
569
  // Show unchanged
329
- if (unchangedCount > 0 && pulled === 0 && conflictCount === 0 && diff.deletedRemote.length === 0) {
570
+ if (unchangedCount > 0 && pulled === 0 && deleted === 0 && conflictCount === 0) {
330
571
  console.log(' Already up to date.');
331
572
  }
332
573
 
@@ -334,17 +575,17 @@ async function pullBusiness(slug) {
334
575
  console.log('');
335
576
  const parts = [];
336
577
  if (pulled > 0) parts.push(`${pulled} pulled`);
578
+ if (deleted > 0) parts.push(`${deleted} deleted`);
337
579
  if (diff.newRemote.length > 0 && !parts.some(p => p.includes('pulled'))) parts.push(`${diff.newRemote.length} new`);
338
580
  if (unchangedCount > 0) parts.push(`${unchangedCount} unchanged`);
339
581
  if (conflictCount > 0) parts.push(`${conflictCount} conflict${conflictCount > 1 ? 's' : ''}`);
340
- if (diff.deletedRemote.length > 0) parts.push(`${diff.deletedRemote.length} deleted remotely`);
341
582
  if (parts.length > 0) console.log(` ${parts.join(', ')}.`);
342
583
 
343
584
  // Get current commit hash from remote (for manifest)
344
585
  let commitHash = null;
345
586
  try {
346
587
  const headResult = await apiRequestJson(
347
- `/businesses/${businessId}/workspaces/${workspaceId}/git/head`,
588
+ `/business/${businessId}/workspaces/${workspaceId}/git/head`,
348
589
  { method: 'GET', token: creds.token }
349
590
  );
350
591
  if (headResult.ok && headResult.data && headResult.data.commit) {
@@ -354,10 +595,43 @@ async function pullBusiness(slug) {
354
595
  // Git might not be initialized yet — that's fine
355
596
  }
356
597
 
357
- // Save manifest when using --only, merge into existing manifest to avoid data loss
598
+ // ANTI-WIPE GUARD: if cloud reported zero in-scope files but local still
599
+ // has in-scope content (i.e. the sweep refused), don't overwrite the
600
+ // manifest with empty data for the scoped subtree. The manifest is the
601
+ // authoritative record of what we last knew was on cloud — wiping it
602
+ // because of a transient empty snapshot would force every subsequent
603
+ // push to flag every file as drift. Better to leave the manifest stale
604
+ // than to record a never-actually-true "cloud is empty" state.
605
+ //
606
+ // Applies to both whole-workspace pulls and scoped (--only) pulls.
607
+ {
608
+ const inScopeLocalCount = onlyPrefixes
609
+ ? Object.keys(localFiles).filter((p) => onlyPrefixes.some((pref) => p.replace(/^\//, '').startsWith(pref))).length
610
+ : Object.keys(localFiles).length;
611
+ if (Object.keys(remoteFiles).length === 0 && inScopeLocalCount > 0) {
612
+ return;
613
+ }
614
+ }
615
+
616
+ // Save manifest — when using --only, merge into existing manifest so paths
617
+ // OUTSIDE the scoped prefix don't get dropped. Inside the scoped prefix,
618
+ // however, we must replace (not merge) so that files deleted on cloud
619
+ // since the last sync get evicted from the manifest. Without this, the
620
+ // push freshness check would forever flag those paths as "deleted on
621
+ // cloud" drift, blocking pushes for no reason.
358
622
  let manifestFiles = remoteFiles;
359
623
  if (onlyPrefixes && manifest && manifest.files) {
360
- manifestFiles = { ...manifest.files, ...remoteFiles };
624
+ const merged = {};
625
+ // 1. Keep paths from old manifest that are OUTSIDE the scoped prefix.
626
+ for (const [p, info] of Object.entries(manifest.files)) {
627
+ const inScope = onlyPrefixes.some((pref) => p.replace(/^\//, '').startsWith(pref));
628
+ if (!inScope) merged[p] = info;
629
+ }
630
+ // 2. Overwrite the in-scope subtree with what we just pulled (cloud truth).
631
+ for (const [p, info] of Object.entries(remoteFiles)) {
632
+ merged[p] = info;
633
+ }
634
+ manifestFiles = merged;
361
635
  }
362
636
  const newManifest = buildManifest(manifestFiles, commitHash);
363
637
  saveManifest(resolvedSlug || slug, newManifest);
@@ -371,6 +645,63 @@ async function pullBusiness(slug) {
371
645
  workspace_id: workspaceId,
372
646
  name: businessName,
373
647
  }, null, 2));
648
+
649
+ // Wire skills → .claude/skills/ so they work as slash commands
650
+ const skillsDir = path.join(outputDir, 'skills');
651
+ const claudeSkillsDir = path.join(outputDir, '.claude', 'skills');
652
+
653
+ if (fs.existsSync(skillsDir)) {
654
+ fs.mkdirSync(claudeSkillsDir, { recursive: true });
655
+
656
+ // Recursively find all skill folders (any dir containing SKILL.md, at any depth)
657
+ const wireSkills = (dir, relPrefix) => {
658
+ const entries = fs.readdirSync(dir);
659
+ for (const entry of entries) {
660
+ const fullPath = path.join(dir, entry);
661
+ if (!fs.statSync(fullPath).isDirectory()) continue;
662
+ if (entry === 'README.md' || entry.startsWith('.')) continue;
663
+
664
+ const skillFile = path.join(fullPath, 'SKILL.md');
665
+ if (fs.existsSync(skillFile)) {
666
+ // This is a leaf skill — wire it
667
+ const skillName = relPrefix ? `${relPrefix}-${entry}` : entry;
668
+ const symlinkPath = path.join(claudeSkillsDir, skillName);
669
+ const relativePath = path.relative(path.dirname(symlinkPath), fullPath);
670
+
671
+ // Business skills override init skills (remove existing symlink if present)
672
+ if (fs.existsSync(symlinkPath)) {
673
+ try {
674
+ const stat = fs.lstatSync(symlinkPath);
675
+ if (stat.isSymbolicLink()) fs.unlinkSync(symlinkPath);
676
+ else continue; // Don't overwrite real directories
677
+ } catch { continue; }
678
+ }
679
+ try {
680
+ fs.symlinkSync(relativePath, symlinkPath);
681
+ } catch (e) {
682
+ // Fallback: copy
683
+ fs.mkdirSync(symlinkPath, { recursive: true });
684
+ fs.copyFileSync(skillFile, path.join(symlinkPath, 'SKILL.md'));
685
+ }
686
+ }
687
+
688
+ // Recurse into subdirectories (e.g. skills/executive/pipeline-health/)
689
+ wireSkills(fullPath, relPrefix ? `${relPrefix}-${entry}` : entry);
690
+ }
691
+ };
692
+
693
+ wireSkills(skillsDir, '');
694
+
695
+ // Count wired skills
696
+ const wiredSkills = fs.readdirSync(claudeSkillsDir).filter(f => {
697
+ const p = path.join(claudeSkillsDir, f);
698
+ return fs.statSync(p).isDirectory();
699
+ });
700
+ if (wiredSkills.length > 0) {
701
+ console.log(` Wired ${wiredSkills.length} skills → .claude/skills/`);
702
+ }
703
+ }
704
+
374
705
  }
375
706
 
376
707