atris 2.6.0 → 2.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/GETTING_STARTED.md +2 -2
- package/atris/GETTING_STARTED.md +2 -2
- package/bin/atris.js +35 -4
- package/commands/business.js +244 -2
- package/commands/context-sync.js +228 -0
- package/commands/pull.js +176 -50
- package/commands/push.js +154 -61
- package/commands/setup.js +178 -0
- package/commands/workspace-clean.js +249 -0
- package/lib/manifest.js +224 -0
- package/lib/section-merge.js +196 -0
- package/package.json +9 -4
- package/utils/api.js +9 -1
- package/utils/update-check.js +11 -11
- package/AGENT.md +0 -35
- package/atris/experiments/README.md +0 -118
- package/atris/experiments/_examples/smoke-keep-revert/README.md +0 -45
- package/atris/experiments/_examples/smoke-keep-revert/candidate.py +0 -8
- package/atris/experiments/_examples/smoke-keep-revert/loop.py +0 -129
- package/atris/experiments/_examples/smoke-keep-revert/measure.py +0 -47
- package/atris/experiments/_examples/smoke-keep-revert/program.md +0 -3
- package/atris/experiments/_examples/smoke-keep-revert/proposals/bad_patch.py +0 -19
- package/atris/experiments/_examples/smoke-keep-revert/proposals/fix_patch.py +0 -22
- package/atris/experiments/_examples/smoke-keep-revert/reset.py +0 -21
- package/atris/experiments/_examples/smoke-keep-revert/results.tsv +0 -5
- package/atris/experiments/_examples/smoke-keep-revert/visual.svg +0 -52
- package/atris/experiments/_fixtures/invalid/BadName/loop.py +0 -1
- package/atris/experiments/_fixtures/invalid/BadName/program.md +0 -3
- package/atris/experiments/_fixtures/invalid/BadName/results.tsv +0 -1
- package/atris/experiments/_fixtures/invalid/bloated-context/loop.py +0 -1
- package/atris/experiments/_fixtures/invalid/bloated-context/measure.py +0 -1
- package/atris/experiments/_fixtures/invalid/bloated-context/program.md +0 -6
- package/atris/experiments/_fixtures/invalid/bloated-context/results.tsv +0 -1
- package/atris/experiments/_fixtures/valid/good-experiment/loop.py +0 -1
- package/atris/experiments/_fixtures/valid/good-experiment/measure.py +0 -1
- package/atris/experiments/_fixtures/valid/good-experiment/program.md +0 -3
- package/atris/experiments/_fixtures/valid/good-experiment/results.tsv +0 -1
- package/atris/experiments/_template/pack/loop.py +0 -3
- package/atris/experiments/_template/pack/measure.py +0 -13
- package/atris/experiments/_template/pack/program.md +0 -3
- package/atris/experiments/_template/pack/reset.py +0 -3
- package/atris/experiments/_template/pack/results.tsv +0 -1
- package/atris/experiments/benchmark_runtime.py +0 -81
- package/atris/experiments/benchmark_validate.py +0 -70
- package/atris/experiments/validate.py +0 -92
- package/atris/team/navigator/journal/2026-02-23.md +0 -6
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { loadCredentials, promptUser } = require('../utils/auth');
|
|
4
|
+
const { apiRequestJson } = require('../utils/api');
|
|
5
|
+
|
|
6
|
+
async function setupAtris() {
|
|
7
|
+
console.log('');
|
|
8
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
9
|
+
console.log(' Atris Setup');
|
|
10
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
11
|
+
console.log('');
|
|
12
|
+
|
|
13
|
+
// Step 1: Check Node version
|
|
14
|
+
const nodeVersion = process.versions.node;
|
|
15
|
+
const major = parseInt(nodeVersion.split('.')[0], 10);
|
|
16
|
+
if (major < 18) {
|
|
17
|
+
console.error(`Node.js ${nodeVersion} is too old. Atris requires Node.js 18 or newer.`);
|
|
18
|
+
console.error('');
|
|
19
|
+
console.error('Update Node.js:');
|
|
20
|
+
console.error(' macOS: brew install node');
|
|
21
|
+
console.error(' or visit https://nodejs.org/en/download');
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
24
|
+
console.log(` [1/4] Node.js ${nodeVersion} ... OK`);
|
|
25
|
+
|
|
26
|
+
// Step 2: Check login status
|
|
27
|
+
let creds = loadCredentials();
|
|
28
|
+
if (creds && creds.token) {
|
|
29
|
+
const label = creds.email || creds.user_id || 'unknown';
|
|
30
|
+
console.log(` [2/4] Logged in as ${label} ... OK`);
|
|
31
|
+
} else {
|
|
32
|
+
console.log(' [2/4] Not logged in. Starting login...');
|
|
33
|
+
console.log('');
|
|
34
|
+
const { loginAtris } = require('./auth');
|
|
35
|
+
// loginAtris calls process.exit, so we override it temporarily
|
|
36
|
+
const originalExit = process.exit;
|
|
37
|
+
let loginCompleted = false;
|
|
38
|
+
process.exit = (code) => {
|
|
39
|
+
if (code === 0) {
|
|
40
|
+
loginCompleted = true;
|
|
41
|
+
return; // Suppress exit on success so setup can continue
|
|
42
|
+
}
|
|
43
|
+
// On failure, actually exit
|
|
44
|
+
originalExit(code);
|
|
45
|
+
};
|
|
46
|
+
try {
|
|
47
|
+
await loginAtris();
|
|
48
|
+
} finally {
|
|
49
|
+
process.exit = originalExit;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (!loginCompleted) {
|
|
53
|
+
console.error('\nLogin failed. Run "atris setup" again after fixing the issue.');
|
|
54
|
+
process.exit(1);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Reload credentials after login
|
|
58
|
+
creds = loadCredentials();
|
|
59
|
+
if (!creds || !creds.token) {
|
|
60
|
+
console.error('\nLogin did not produce credentials. Run "atris login" manually, then "atris setup" again.');
|
|
61
|
+
process.exit(1);
|
|
62
|
+
}
|
|
63
|
+
console.log('');
|
|
64
|
+
console.log(` [2/4] Logged in ... OK`);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Step 3: Fetch businesses
|
|
68
|
+
console.log(' [3/4] Fetching your businesses...');
|
|
69
|
+
let businesses = [];
|
|
70
|
+
try {
|
|
71
|
+
const result = await apiRequestJson('/businesses/', {
|
|
72
|
+
method: 'GET',
|
|
73
|
+
token: creds.token,
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
if (!result.ok) {
|
|
77
|
+
console.error(`\n Could not fetch businesses: ${result.error || 'Unknown error'}`);
|
|
78
|
+
console.error(' You can add one later with: atris business add <slug>');
|
|
79
|
+
console.log('');
|
|
80
|
+
printFinished();
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
businesses = Array.isArray(result.data) ? result.data : [];
|
|
85
|
+
} catch (err) {
|
|
86
|
+
console.error(`\n Could not fetch businesses: ${err.message || err}`);
|
|
87
|
+
console.error(' You can add one later with: atris business add <slug>');
|
|
88
|
+
console.log('');
|
|
89
|
+
printFinished();
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (businesses.length === 0) {
|
|
94
|
+
console.log('\n No businesses found on your account.');
|
|
95
|
+
console.log(' Create one at https://atris.ai or ask your team admin for access.');
|
|
96
|
+
console.log('');
|
|
97
|
+
printFinished();
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Step 4: List businesses and let user pick
|
|
102
|
+
console.log('');
|
|
103
|
+
console.log(' Your businesses:');
|
|
104
|
+
businesses.forEach((b, i) => {
|
|
105
|
+
const name = b.name || b.slug || 'Unnamed';
|
|
106
|
+
const slug = b.slug || b.id || '';
|
|
107
|
+
console.log(` ${i + 1}. ${name} (${slug})`);
|
|
108
|
+
});
|
|
109
|
+
console.log('');
|
|
110
|
+
|
|
111
|
+
const answer = await promptUser(' Which business to pull? (number or slug, or "skip"): ');
|
|
112
|
+
|
|
113
|
+
if (!answer || answer.toLowerCase() === 'skip') {
|
|
114
|
+
console.log(' Skipped. You can pull a business later with: atris pull <slug>');
|
|
115
|
+
console.log('');
|
|
116
|
+
printFinished();
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Resolve selection — try number first, then slug match
|
|
121
|
+
let selected = null;
|
|
122
|
+
const num = parseInt(answer, 10);
|
|
123
|
+
if (!isNaN(num) && num >= 1 && num <= businesses.length) {
|
|
124
|
+
selected = businesses[num - 1];
|
|
125
|
+
} else {
|
|
126
|
+
// Try slug or name match
|
|
127
|
+
const q = answer.toLowerCase();
|
|
128
|
+
selected = businesses.find(b => (b.slug || '').toLowerCase() === q)
|
|
129
|
+
|| businesses.find(b => (b.name || '').toLowerCase() === q)
|
|
130
|
+
|| businesses.find(b => (b.slug || '').toLowerCase().includes(q))
|
|
131
|
+
|| businesses.find(b => (b.name || '').toLowerCase().includes(q));
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
if (!selected) {
|
|
135
|
+
console.error(`\n Could not find a business matching "${answer}".`);
|
|
136
|
+
console.log(' Run "atris pull <slug>" to pull manually.');
|
|
137
|
+
console.log('');
|
|
138
|
+
printFinished();
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
const slug = selected.slug || selected.id;
|
|
143
|
+
console.log(`\n [4/4] Pulling "${selected.name || slug}"...`);
|
|
144
|
+
|
|
145
|
+
try {
|
|
146
|
+
const { pullAtris } = require('./pull');
|
|
147
|
+
// Set the arg so pullAtris picks it up
|
|
148
|
+
const originalArgv = process.argv.slice();
|
|
149
|
+
process.argv[3] = slug;
|
|
150
|
+
const originalExit = process.exit;
|
|
151
|
+
process.exit = (code) => {
|
|
152
|
+
if (code === 0) return;
|
|
153
|
+
originalExit(code);
|
|
154
|
+
};
|
|
155
|
+
try {
|
|
156
|
+
await pullAtris();
|
|
157
|
+
} finally {
|
|
158
|
+
process.exit = originalExit;
|
|
159
|
+
process.argv = originalArgv;
|
|
160
|
+
}
|
|
161
|
+
console.log(` Pulled "${selected.name || slug}" ... OK`);
|
|
162
|
+
} catch (err) {
|
|
163
|
+
console.error(`\n Pull failed: ${err.message || err}`);
|
|
164
|
+
console.log(` You can try again with: atris pull ${slug}`);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
console.log('');
|
|
168
|
+
printFinished();
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
function printFinished() {
|
|
172
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
173
|
+
console.log(' You\'re all set! Run `atris activate` to start.');
|
|
174
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
175
|
+
console.log('');
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
module.exports = { setupAtris };
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
const { loadCredentials } = require('../utils/auth');
|
|
2
|
+
const { apiRequestJson } = require('../utils/api');
|
|
3
|
+
const { loadBusinesses, saveBusinesses } = require('./business');
|
|
4
|
+
|
|
5
|
+
// Junk detection patterns
|
|
6
|
+
const JUNK_PATTERNS = {
|
|
7
|
+
emptyFiles: (file) => (file.size || 0) <= 1,
|
|
8
|
+
versionedDuplicates: (file) => /_v\d+\.\w+$/.test(file.path),
|
|
9
|
+
actionQueues: (file) => /action_queue\.json$/.test(file.path),
|
|
10
|
+
agentOutputDumps: (file) => /^\/?(agents\/[^/]+\/output\/)/.test(file.path),
|
|
11
|
+
researchDumps: (file) => /^\/?(agents\/[^/]+\/research\/)/.test(file.path),
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
const JUNK_LABELS = {
|
|
15
|
+
emptyFiles: 'Empty files (size <= 1 byte)',
|
|
16
|
+
versionedDuplicates: 'Versioned duplicates (*_v1, *_v2, etc.)',
|
|
17
|
+
actionQueues: 'Action queue files',
|
|
18
|
+
agentOutputDumps: 'Agent output dumps (agents/*/output/)',
|
|
19
|
+
researchDumps: 'Research dumps (agents/*/research/)',
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
function formatBytes(bytes) {
|
|
23
|
+
if (bytes === 0) return '0 B';
|
|
24
|
+
const units = ['B', 'KB', 'MB', 'GB'];
|
|
25
|
+
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
|
26
|
+
return `${(bytes / Math.pow(1024, i)).toFixed(i > 0 ? 1 : 0)} ${units[i]}`;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async function cleanWorkspace() {
|
|
30
|
+
const slug = process.argv[3];
|
|
31
|
+
const autoConfirm = process.argv.includes('--yes');
|
|
32
|
+
|
|
33
|
+
if (!slug || slug === '--help') {
|
|
34
|
+
console.log('');
|
|
35
|
+
console.log('Usage: atris clean-workspace <business-slug> [--yes]');
|
|
36
|
+
console.log('');
|
|
37
|
+
console.log('Analyzes a workspace for junk files and shows a cleanup report.');
|
|
38
|
+
console.log('Pass --yes to actually delete the detected junk.');
|
|
39
|
+
console.log('');
|
|
40
|
+
console.log('Detects:');
|
|
41
|
+
console.log(' - Empty files (0-1 bytes)');
|
|
42
|
+
console.log(' - Versioned duplicates (*_v1.md, *_v2.md, etc.)');
|
|
43
|
+
console.log(' - action_queue.json files');
|
|
44
|
+
console.log(' - Agent output dumps (agents/*/output/)');
|
|
45
|
+
console.log(' - Research dumps (agents/*/research/)');
|
|
46
|
+
console.log('');
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Auth
|
|
51
|
+
const creds = loadCredentials();
|
|
52
|
+
if (!creds || !creds.token) {
|
|
53
|
+
console.error('Not logged in. Run: atris login');
|
|
54
|
+
process.exit(1);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Resolve business
|
|
58
|
+
let businessId, workspaceId, businessName;
|
|
59
|
+
const businesses = loadBusinesses();
|
|
60
|
+
|
|
61
|
+
if (businesses[slug]) {
|
|
62
|
+
businessId = businesses[slug].business_id;
|
|
63
|
+
workspaceId = businesses[slug].workspace_id;
|
|
64
|
+
businessName = businesses[slug].name || slug;
|
|
65
|
+
} else {
|
|
66
|
+
const listResult = await apiRequestJson('/businesses/', { method: 'GET', token: creds.token });
|
|
67
|
+
if (!listResult.ok) {
|
|
68
|
+
console.error(`Failed to fetch businesses: ${listResult.error || listResult.status}`);
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
const match = (listResult.data || []).find(
|
|
72
|
+
b => b.slug === slug || b.name.toLowerCase() === slug.toLowerCase()
|
|
73
|
+
);
|
|
74
|
+
if (!match) {
|
|
75
|
+
console.error(`Business "${slug}" not found.`);
|
|
76
|
+
process.exit(1);
|
|
77
|
+
}
|
|
78
|
+
businessId = match.id;
|
|
79
|
+
workspaceId = match.workspace_id;
|
|
80
|
+
businessName = match.name;
|
|
81
|
+
|
|
82
|
+
// Cache for next time
|
|
83
|
+
businesses[slug] = {
|
|
84
|
+
business_id: businessId,
|
|
85
|
+
workspace_id: workspaceId,
|
|
86
|
+
name: businessName,
|
|
87
|
+
slug: match.slug,
|
|
88
|
+
added_at: new Date().toISOString(),
|
|
89
|
+
};
|
|
90
|
+
saveBusinesses(businesses);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (!workspaceId) {
|
|
94
|
+
console.error(`Business "${slug}" has no workspace.`);
|
|
95
|
+
process.exit(1);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Fetch snapshot (metadata only)
|
|
99
|
+
console.log('');
|
|
100
|
+
console.log(`Scanning ${businessName}...`);
|
|
101
|
+
|
|
102
|
+
const result = await apiRequestJson(
|
|
103
|
+
`/businesses/${businessId}/workspaces/${workspaceId}/snapshot?include_content=false`,
|
|
104
|
+
{ method: 'GET', token: creds.token, timeoutMs: 60000 }
|
|
105
|
+
);
|
|
106
|
+
|
|
107
|
+
if (!result.ok) {
|
|
108
|
+
const msg = result.error || `HTTP ${result.status}`;
|
|
109
|
+
if (result.status === 409) {
|
|
110
|
+
console.error('\n Computer is sleeping. Wake it first.');
|
|
111
|
+
} else if (result.status === 403) {
|
|
112
|
+
console.error(`\n Access denied for "${slug}".`);
|
|
113
|
+
} else if (result.status === 404) {
|
|
114
|
+
console.error(`\n Business "${slug}" not found.`);
|
|
115
|
+
} else {
|
|
116
|
+
console.error(`\n Failed: ${msg}`);
|
|
117
|
+
}
|
|
118
|
+
process.exit(1);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const files = result.data.files || [];
|
|
122
|
+
if (files.length === 0) {
|
|
123
|
+
console.log(' Workspace is empty. Nothing to clean.');
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Analyze workspace
|
|
128
|
+
const totalSize = files.reduce((sum, f) => sum + (f.size || 0), 0);
|
|
129
|
+
|
|
130
|
+
// Directory breakdown
|
|
131
|
+
const dirStats = {};
|
|
132
|
+
for (const file of files) {
|
|
133
|
+
const p = (file.path || '').replace(/^\//, '');
|
|
134
|
+
const topDir = p.includes('/') ? p.split('/')[0] : '(root)';
|
|
135
|
+
if (!dirStats[topDir]) dirStats[topDir] = { count: 0, size: 0 };
|
|
136
|
+
dirStats[topDir].count++;
|
|
137
|
+
dirStats[topDir].size += file.size || 0;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Detect junk
|
|
141
|
+
const junkByCategory = {};
|
|
142
|
+
const allJunkPaths = new Set();
|
|
143
|
+
|
|
144
|
+
for (const [key, testFn] of Object.entries(JUNK_PATTERNS)) {
|
|
145
|
+
const matches = files.filter(testFn);
|
|
146
|
+
if (matches.length > 0) {
|
|
147
|
+
junkByCategory[key] = matches;
|
|
148
|
+
for (const m of matches) allJunkPaths.add(m.path);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const junkSize = files
|
|
153
|
+
.filter(f => allJunkPaths.has(f.path))
|
|
154
|
+
.reduce((sum, f) => sum + (f.size || 0), 0);
|
|
155
|
+
|
|
156
|
+
// Print report
|
|
157
|
+
console.log('');
|
|
158
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
159
|
+
console.log(` Workspace: ${businessName}`);
|
|
160
|
+
console.log(` Total files: ${files.length} Total size: ${formatBytes(totalSize)}`);
|
|
161
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
162
|
+
|
|
163
|
+
// Directory breakdown
|
|
164
|
+
console.log('');
|
|
165
|
+
console.log(' Files by directory:');
|
|
166
|
+
const sortedDirs = Object.entries(dirStats).sort((a, b) => b[1].size - a[1].size);
|
|
167
|
+
for (const [dir, stats] of sortedDirs) {
|
|
168
|
+
const pct = totalSize > 0 ? ((stats.size / totalSize) * 100).toFixed(0) : 0;
|
|
169
|
+
console.log(` ${dir.padEnd(30)} ${String(stats.count).padStart(5)} files ${formatBytes(stats.size).padStart(10)} (${pct}%)`);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Junk report
|
|
173
|
+
console.log('');
|
|
174
|
+
if (allJunkPaths.size === 0) {
|
|
175
|
+
console.log(' No junk detected. Workspace is clean.');
|
|
176
|
+
console.log('');
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
console.log(' Junk detected:');
|
|
181
|
+
console.log('');
|
|
182
|
+
|
|
183
|
+
for (const [key, matches] of Object.entries(junkByCategory)) {
|
|
184
|
+
const catSize = matches.reduce((sum, f) => sum + (f.size || 0), 0);
|
|
185
|
+
console.log(` ${JUNK_LABELS[key]} (${matches.length} files, ${formatBytes(catSize)})`);
|
|
186
|
+
|
|
187
|
+
// Show up to 10 example paths
|
|
188
|
+
const show = matches.slice(0, 10);
|
|
189
|
+
for (const f of show) {
|
|
190
|
+
console.log(` - ${(f.path || '').replace(/^\//, '')} (${formatBytes(f.size || 0)})`);
|
|
191
|
+
}
|
|
192
|
+
if (matches.length > 10) {
|
|
193
|
+
console.log(` ... and ${matches.length - 10} more`);
|
|
194
|
+
}
|
|
195
|
+
console.log('');
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
199
|
+
console.log(` Would remove: ${allJunkPaths.size} files (${formatBytes(junkSize)})`);
|
|
200
|
+
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
|
201
|
+
console.log('');
|
|
202
|
+
|
|
203
|
+
if (!autoConfirm) {
|
|
204
|
+
console.log(' Run with --yes to clean up:');
|
|
205
|
+
console.log(` atris clean-workspace ${slug} --yes`);
|
|
206
|
+
console.log('');
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// Delete junk by syncing empty content
|
|
211
|
+
console.log(' Cleaning...');
|
|
212
|
+
|
|
213
|
+
const filesToDelete = Array.from(allJunkPaths).map(p => ({ path: p, content: '' }));
|
|
214
|
+
|
|
215
|
+
// Batch in chunks of 50 to avoid huge payloads
|
|
216
|
+
const BATCH_SIZE = 50;
|
|
217
|
+
let deleted = 0;
|
|
218
|
+
|
|
219
|
+
for (let i = 0; i < filesToDelete.length; i += BATCH_SIZE) {
|
|
220
|
+
const batch = filesToDelete.slice(i, i + BATCH_SIZE);
|
|
221
|
+
|
|
222
|
+
const syncResult = await apiRequestJson(
|
|
223
|
+
`/businesses/${businessId}/workspaces/${workspaceId}/sync`,
|
|
224
|
+
{
|
|
225
|
+
method: 'POST',
|
|
226
|
+
token: creds.token,
|
|
227
|
+
body: { files: batch },
|
|
228
|
+
headers: { 'X-Atris-Actor-Source': 'cli' },
|
|
229
|
+
}
|
|
230
|
+
);
|
|
231
|
+
|
|
232
|
+
if (!syncResult.ok) {
|
|
233
|
+
const msg = syncResult.error || `HTTP ${syncResult.status}`;
|
|
234
|
+
console.error(`\n Cleanup failed at batch ${Math.floor(i / BATCH_SIZE) + 1}: ${msg}`);
|
|
235
|
+
process.exit(1);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
deleted += batch.length;
|
|
239
|
+
if (filesToDelete.length > BATCH_SIZE) {
|
|
240
|
+
console.log(` ${deleted}/${filesToDelete.length} files processed...`);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
console.log('');
|
|
245
|
+
console.log(` Done. Removed ${deleted} junk files (${formatBytes(junkSize)}).`);
|
|
246
|
+
console.log('');
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
module.exports = { cleanWorkspace };
|
package/lib/manifest.js
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
const crypto = require('crypto');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const os = require('os');
|
|
5
|
+
const { computeContentHash } = require('./journal');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Get the manifest file path for a business slug.
|
|
9
|
+
* Stored at ~/.atris/businesses/{slug}/manifest.json
|
|
10
|
+
*/
|
|
11
|
+
function getManifestPath(slug) {
|
|
12
|
+
return path.join(os.homedir(), '.atris', 'businesses', slug, 'manifest.json');
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Load the manifest for a business, or null if no previous sync.
|
|
17
|
+
*/
|
|
18
|
+
function loadManifest(slug) {
|
|
19
|
+
const p = getManifestPath(slug);
|
|
20
|
+
if (!fs.existsSync(p)) return null;
|
|
21
|
+
try {
|
|
22
|
+
return JSON.parse(fs.readFileSync(p, 'utf8'));
|
|
23
|
+
} catch {
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Save a manifest after a successful sync.
|
|
30
|
+
*/
|
|
31
|
+
function saveManifest(slug, manifest) {
|
|
32
|
+
const p = getManifestPath(slug);
|
|
33
|
+
const dir = path.dirname(p);
|
|
34
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
35
|
+
fs.writeFileSync(p, JSON.stringify(manifest, null, 2));
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Compute SHA-256 hash of normalized content.
|
|
40
|
+
* Delegates to journal.js computeContentHash.
|
|
41
|
+
*/
|
|
42
|
+
function computeFileHash(content) {
|
|
43
|
+
return computeContentHash(content);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Build a manifest object from a set of files.
|
|
48
|
+
* files: { [path]: { hash, size } }
|
|
49
|
+
*/
|
|
50
|
+
function buildManifest(files, commitHash) {
|
|
51
|
+
return {
|
|
52
|
+
last_sync: new Date().toISOString(),
|
|
53
|
+
last_commit: commitHash || null,
|
|
54
|
+
files,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Walk a local directory and compute hashes for all text files.
|
|
60
|
+
* Returns: { "/path": { hash, size } }
|
|
61
|
+
*/
|
|
62
|
+
const SKIP_DIRS = new Set([
|
|
63
|
+
'node_modules', '__pycache__', '.git', 'venv', '.venv',
|
|
64
|
+
'lost+found', '.cache', '.atris',
|
|
65
|
+
]);
|
|
66
|
+
|
|
67
|
+
function computeLocalHashes(localDir) {
|
|
68
|
+
const files = {};
|
|
69
|
+
|
|
70
|
+
function walk(dir) {
|
|
71
|
+
let entries;
|
|
72
|
+
try {
|
|
73
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
74
|
+
} catch {
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
for (const entry of entries) {
|
|
78
|
+
if (entry.name.startsWith('.')) continue;
|
|
79
|
+
const fullPath = path.join(dir, entry.name);
|
|
80
|
+
if (entry.isDirectory()) {
|
|
81
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
82
|
+
walk(fullPath);
|
|
83
|
+
} else if (entry.isFile()) {
|
|
84
|
+
const relPath = '/' + path.relative(localDir, fullPath);
|
|
85
|
+
try {
|
|
86
|
+
// Hash raw bytes to match warm runner's _hash_bytes(data)
|
|
87
|
+
const rawBytes = fs.readFileSync(fullPath);
|
|
88
|
+
const hash = crypto.createHash('sha256').update(rawBytes).digest('hex');
|
|
89
|
+
files[relPath] = { hash, size: rawBytes.length };
|
|
90
|
+
} catch {
|
|
91
|
+
// skip binary or unreadable
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
walk(localDir);
|
|
98
|
+
return files;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Three-way comparison between local files, remote files, and the last-synced manifest.
|
|
103
|
+
*
|
|
104
|
+
* localFiles: { "/path": { hash, size } }
|
|
105
|
+
* remoteFiles: { "/path": { hash, size } }
|
|
106
|
+
* manifest: { files: { "/path": { hash, size } } } or null (first sync)
|
|
107
|
+
*
|
|
108
|
+
* Returns: { toPull, toPush, conflicts, unchanged, deletedLocal, deletedRemote, newLocal, newRemote }
|
|
109
|
+
* Each array contains file path strings.
|
|
110
|
+
*/
|
|
111
|
+
function threeWayCompare(localFiles, remoteFiles, manifest) {
|
|
112
|
+
const result = {
|
|
113
|
+
toPull: [],
|
|
114
|
+
toPush: [],
|
|
115
|
+
conflicts: [],
|
|
116
|
+
unchanged: [],
|
|
117
|
+
deletedLocal: [],
|
|
118
|
+
deletedRemote: [],
|
|
119
|
+
newLocal: [],
|
|
120
|
+
newRemote: [],
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
// First sync — no manifest
|
|
124
|
+
if (!manifest || !manifest.files) {
|
|
125
|
+
// Everything remote is "new from remote"
|
|
126
|
+
for (const p of Object.keys(remoteFiles)) {
|
|
127
|
+
if (localFiles[p] && localFiles[p].hash === remoteFiles[p].hash) {
|
|
128
|
+
result.unchanged.push(p);
|
|
129
|
+
} else if (localFiles[p]) {
|
|
130
|
+
// Both exist with different hashes, no baseline — treat as conflict
|
|
131
|
+
result.conflicts.push(p);
|
|
132
|
+
} else {
|
|
133
|
+
result.newRemote.push(p);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
// Local files not in remote are new local
|
|
137
|
+
for (const p of Object.keys(localFiles)) {
|
|
138
|
+
if (!remoteFiles[p]) {
|
|
139
|
+
result.newLocal.push(p);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
return result;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const base = manifest.files;
|
|
146
|
+
const allPaths = new Set([
|
|
147
|
+
...Object.keys(localFiles),
|
|
148
|
+
...Object.keys(remoteFiles),
|
|
149
|
+
...Object.keys(base),
|
|
150
|
+
]);
|
|
151
|
+
|
|
152
|
+
for (const p of allPaths) {
|
|
153
|
+
const inLocal = p in localFiles;
|
|
154
|
+
const inRemote = p in remoteFiles;
|
|
155
|
+
const inBase = p in base;
|
|
156
|
+
|
|
157
|
+
const localHash = inLocal ? localFiles[p].hash : null;
|
|
158
|
+
const remoteHash = inRemote ? remoteFiles[p].hash : null;
|
|
159
|
+
const baseHash = inBase ? base[p].hash : null;
|
|
160
|
+
|
|
161
|
+
if (inLocal && inRemote && inBase) {
|
|
162
|
+
// All three exist — standard three-way
|
|
163
|
+
const localChanged = localHash !== baseHash;
|
|
164
|
+
const remoteChanged = remoteHash !== baseHash;
|
|
165
|
+
|
|
166
|
+
if (!localChanged && !remoteChanged) {
|
|
167
|
+
result.unchanged.push(p);
|
|
168
|
+
} else if (!localChanged && remoteChanged) {
|
|
169
|
+
result.toPull.push(p);
|
|
170
|
+
} else if (localChanged && !remoteChanged) {
|
|
171
|
+
result.toPush.push(p);
|
|
172
|
+
} else {
|
|
173
|
+
// Both changed
|
|
174
|
+
if (localHash === remoteHash) {
|
|
175
|
+
// Both changed to the same thing
|
|
176
|
+
result.unchanged.push(p);
|
|
177
|
+
} else {
|
|
178
|
+
result.conflicts.push(p);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
} else if (inRemote && !inBase && !inLocal) {
|
|
182
|
+
// New on remote
|
|
183
|
+
result.newRemote.push(p);
|
|
184
|
+
} else if (inLocal && !inBase && !inRemote) {
|
|
185
|
+
// New locally
|
|
186
|
+
result.newLocal.push(p);
|
|
187
|
+
} else if (inBase && !inRemote && inLocal) {
|
|
188
|
+
// Was in base, deleted on remote, still local
|
|
189
|
+
result.deletedRemote.push(p);
|
|
190
|
+
} else if (inBase && !inLocal && inRemote) {
|
|
191
|
+
// Was in base, deleted locally, still remote
|
|
192
|
+
result.deletedLocal.push(p);
|
|
193
|
+
} else if (inBase && !inLocal && !inRemote) {
|
|
194
|
+
// Deleted on both sides — nothing to do
|
|
195
|
+
// (don't add to any list)
|
|
196
|
+
} else if (inLocal && inRemote && !inBase) {
|
|
197
|
+
// Both sides have it but no base — new on both
|
|
198
|
+
if (localHash === remoteHash) {
|
|
199
|
+
result.unchanged.push(p);
|
|
200
|
+
} else {
|
|
201
|
+
result.conflicts.push(p);
|
|
202
|
+
}
|
|
203
|
+
} else if (inRemote && inBase && !inLocal) {
|
|
204
|
+
// Deleted locally but remote still has it (and maybe changed)
|
|
205
|
+
result.deletedLocal.push(p);
|
|
206
|
+
} else if (inLocal && inBase && !inRemote) {
|
|
207
|
+
// Deleted on remote but local still has it (and maybe changed)
|
|
208
|
+
result.deletedRemote.push(p);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return result;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
module.exports = {
|
|
216
|
+
loadManifest,
|
|
217
|
+
saveManifest,
|
|
218
|
+
computeFileHash,
|
|
219
|
+
buildManifest,
|
|
220
|
+
computeLocalHashes,
|
|
221
|
+
threeWayCompare,
|
|
222
|
+
getManifestPath,
|
|
223
|
+
SKIP_DIRS,
|
|
224
|
+
};
|