atris 2.6.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/README.md +124 -34
  2. package/atris/CLAUDE.md +5 -1
  3. package/atris/atris.md +4 -0
  4. package/atris/features/README.md +24 -0
  5. package/atris/skills/autopilot/SKILL.md +74 -75
  6. package/atris/skills/endgame/SKILL.md +179 -0
  7. package/atris/skills/flow/SKILL.md +121 -0
  8. package/atris/skills/improve/SKILL.md +84 -0
  9. package/atris/skills/loop/SKILL.md +72 -0
  10. package/atris/skills/wiki/SKILL.md +61 -0
  11. package/atris/team/executor/MEMBER.md +10 -4
  12. package/atris/team/navigator/MEMBER.md +2 -0
  13. package/atris/team/validator/MEMBER.md +8 -5
  14. package/atris.md +33 -0
  15. package/bin/atris.js +210 -41
  16. package/commands/activate.js +28 -2
  17. package/commands/align.js +720 -0
  18. package/commands/auth.js +75 -2
  19. package/commands/autopilot.js +1213 -270
  20. package/commands/browse.js +100 -0
  21. package/commands/business.js +785 -12
  22. package/commands/clean.js +107 -2
  23. package/commands/computer.js +429 -0
  24. package/commands/context-sync.js +78 -8
  25. package/commands/experiments.js +351 -0
  26. package/commands/feedback.js +150 -0
  27. package/commands/fleet.js +395 -0
  28. package/commands/fork.js +127 -0
  29. package/commands/init.js +50 -1
  30. package/commands/learn.js +407 -0
  31. package/commands/lifecycle.js +94 -0
  32. package/commands/loop.js +114 -0
  33. package/commands/publish.js +129 -0
  34. package/commands/pull.js +434 -48
  35. package/commands/push.js +312 -164
  36. package/commands/review.js +149 -0
  37. package/commands/run.js +76 -43
  38. package/commands/serve.js +360 -0
  39. package/commands/setup.js +1 -1
  40. package/commands/soul.js +381 -0
  41. package/commands/status.js +119 -1
  42. package/commands/sync.js +147 -1
  43. package/commands/terminal.js +201 -0
  44. package/commands/wiki.js +376 -0
  45. package/commands/workflow.js +191 -74
  46. package/commands/workspace-clean.js +3 -3
  47. package/lib/endstate.js +259 -0
  48. package/lib/learnings.js +235 -0
  49. package/lib/manifest.js +1 -0
  50. package/lib/todo.js +9 -5
  51. package/lib/wiki.js +578 -0
  52. package/package.json +2 -2
  53. package/utils/api.js +48 -36
  54. package/utils/auth.js +1 -0
package/lib/wiki.js ADDED
@@ -0,0 +1,578 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ const WIKI_ROOT = 'atris/wiki';
5
+ const LEGACY_WIKI_ROOT = 'wiki';
6
+ const WIKI_BRIEFS_SUBDIR = 'briefs';
7
+ const LEGACY_WIKI_BRIEFS_SUBDIR = 'syntheses';
8
+ const WIKI_SUBDIRS = ['people', 'systems', 'concepts', WIKI_BRIEFS_SUBDIR];
9
+ const WIKI_STATUS_FILE = 'STATUS.md';
10
+ const WIKI_CONTENT_SUBDIRS = WIKI_SUBDIRS.map((subdir) => path.join(WIKI_ROOT, subdir));
11
+
12
+ function today() {
13
+ return new Date().toISOString().slice(0, 10);
14
+ }
15
+
16
+ function nowTime() {
17
+ return new Date().toTimeString().slice(0, 5);
18
+ }
19
+
20
+ function protocolMarkdown() {
21
+ return `# Atris Wiki Protocol
22
+
23
+ This wiki lives in \`${WIKI_ROOT}/\`.
24
+
25
+ ## Purpose
26
+
27
+ Turn raw project context into a living memory the next agent can pick up cold.
28
+
29
+ ## Shape
30
+
31
+ - \`${WIKI_ROOT}/wiki.md\` - this protocol
32
+ - \`${WIKI_ROOT}/index.md\` - catalog grouped by page type
33
+ - \`${WIKI_ROOT}/log.md\` - append-only ingest and lint history
34
+ - \`${WIKI_ROOT}/STATUS.md\` - plain-English health summary
35
+ - \`${WIKI_ROOT}/people/\` - humans (employees, contacts, stakeholders)
36
+ - \`${WIKI_ROOT}/systems/\` - tools, tables, dashboards, services, products
37
+ - \`${WIKI_ROOT}/concepts/\` - patterns, frameworks, recurring ideas
38
+ - \`${WIKI_ROOT}/${WIKI_BRIEFS_SUBDIR}/\` - multi-page briefs and cross-cutting analysis
39
+
40
+ ## Rules
41
+
42
+ - Read the full source before writing.
43
+ - Merge new facts into existing pages. Do not overwrite history blindly.
44
+ - Add cross-references with \`[[atris/wiki/...]]\` links.
45
+ - Keep \`index.md\`, \`log.md\`, and \`STATUS.md\` in sync with page changes.
46
+ - If something is unclear or contradictory, say so directly.
47
+ `;
48
+ }
49
+
50
+ function indexMarkdown() {
51
+ return `# Atris Wiki Index
52
+
53
+ ## People
54
+
55
+ ## Systems
56
+
57
+ ## Concepts
58
+
59
+ ## Briefs
60
+ `;
61
+ }
62
+
63
+ function logMarkdown() {
64
+ return `# Atris Wiki Log
65
+
66
+ ## ${today()}
67
+ `;
68
+ }
69
+
70
+ function statusMarkdown() {
71
+ return `# Atris Wiki Status
72
+
73
+ - Last ingest: never
74
+ - Last lint: never
75
+ - Last loop: never
76
+ - Health: wiki scaffold created, no pages yet
77
+ - Next move: run \`atris ingest <path>\`
78
+ `;
79
+ }
80
+
81
+ function ensureFile(filePath, content) {
82
+ if (!fs.existsSync(filePath)) {
83
+ fs.writeFileSync(filePath, content, 'utf8');
84
+ }
85
+ }
86
+
87
+ function isDirectoryEmpty(dir) {
88
+ try {
89
+ return fs.readdirSync(dir).length === 0;
90
+ } catch {
91
+ return true;
92
+ }
93
+ }
94
+
95
+ function mergeLegacyDirectory(sourceDir, targetDir) {
96
+ if (!fs.existsSync(sourceDir)) return;
97
+ fs.mkdirSync(targetDir, { recursive: true });
98
+
99
+ const entries = fs.readdirSync(sourceDir, { withFileTypes: true });
100
+ for (const entry of entries) {
101
+ const sourcePath = path.join(sourceDir, entry.name);
102
+ const targetPath = path.join(targetDir, entry.name);
103
+
104
+ if (entry.isDirectory()) {
105
+ mergeLegacyDirectory(sourcePath, targetPath);
106
+ if (isDirectoryEmpty(sourcePath)) {
107
+ fs.rmdirSync(sourcePath);
108
+ }
109
+ continue;
110
+ }
111
+
112
+ if (!fs.existsSync(targetPath)) {
113
+ fs.renameSync(sourcePath, targetPath);
114
+ continue;
115
+ }
116
+
117
+ const sourceContent = fs.readFileSync(sourcePath, 'utf8');
118
+ const targetContent = fs.readFileSync(targetPath, 'utf8');
119
+ if (sourceContent === targetContent) {
120
+ fs.rmSync(sourcePath, { force: true });
121
+ }
122
+ }
123
+ }
124
+
125
+ function rewriteLegacyWikiReferences(wikiDir) {
126
+ for (const filePath of walkMarkdownFiles(wikiDir)) {
127
+ const current = fs.readFileSync(filePath, 'utf8');
128
+ const updated = current
129
+ .replace(/atris\/wiki\/syntheses\//g, `atris/wiki/${WIKI_BRIEFS_SUBDIR}/`)
130
+ .replace(/\bsyntheses\//g, `${WIKI_BRIEFS_SUBDIR}/`)
131
+ .replace(/^## Syntheses$/gm, '## Briefs')
132
+ .replace(/^type:\s*synthesis$/m, 'type: brief');
133
+
134
+ if (updated !== current) {
135
+ fs.writeFileSync(filePath, updated, 'utf8');
136
+ }
137
+ }
138
+ }
139
+
140
+ function migrateLegacyBriefsDir(wikiDir) {
141
+ const legacyDir = path.join(wikiDir, LEGACY_WIKI_BRIEFS_SUBDIR);
142
+ const briefsDir = path.join(wikiDir, WIKI_BRIEFS_SUBDIR);
143
+
144
+ if (!fs.existsSync(legacyDir)) return;
145
+
146
+ if (!fs.existsSync(briefsDir)) {
147
+ fs.renameSync(legacyDir, briefsDir);
148
+ } else {
149
+ mergeLegacyDirectory(legacyDir, briefsDir);
150
+ if (isDirectoryEmpty(legacyDir)) {
151
+ fs.rmdirSync(legacyDir);
152
+ }
153
+ }
154
+
155
+ rewriteLegacyWikiReferences(wikiDir);
156
+ }
157
+
158
+ function ensureWikiScaffold(projectRoot = process.cwd()) {
159
+ const wikiDir = path.join(projectRoot, WIKI_ROOT);
160
+ fs.mkdirSync(wikiDir, { recursive: true });
161
+ migrateLegacyBriefsDir(wikiDir);
162
+ for (const subdir of WIKI_SUBDIRS) {
163
+ fs.mkdirSync(path.join(wikiDir, subdir), { recursive: true });
164
+ }
165
+
166
+ ensureFile(path.join(wikiDir, 'wiki.md'), protocolMarkdown());
167
+ ensureFile(path.join(wikiDir, 'index.md'), indexMarkdown());
168
+ ensureFile(path.join(wikiDir, 'log.md'), logMarkdown());
169
+ ensureFile(path.join(wikiDir, WIKI_STATUS_FILE), statusMarkdown());
170
+
171
+ return wikiDir;
172
+ }
173
+
174
+ function findLocalWikiDir(projectRoot = process.cwd(), slug = null) {
175
+ const tries = [
176
+ path.join(projectRoot, WIKI_ROOT),
177
+ path.join(projectRoot, LEGACY_WIKI_ROOT),
178
+ slug && path.join(projectRoot, 'atris', slug, 'wiki'),
179
+ slug && path.join(projectRoot, slug, 'wiki'),
180
+ ].filter(Boolean);
181
+
182
+ return tries.find((candidate) => fs.existsSync(candidate)) || null;
183
+ }
184
+
185
+ function normalizeWikiOnlyPrefix(prefix) {
186
+ const trimmed = prefix.replace(/^\//, '');
187
+ if (trimmed === 'wiki' || trimmed === 'wiki/' || trimmed === 'atris/wiki' || trimmed === 'atris/wiki/') {
188
+ return 'atris/wiki/';
189
+ }
190
+ return null;
191
+ }
192
+
193
+ function readWikiStatus(projectRoot = process.cwd(), slug = null) {
194
+ const wikiDir = findLocalWikiDir(projectRoot, slug);
195
+ if (!wikiDir) return null;
196
+
197
+ const statusPath = path.join(wikiDir, WIKI_STATUS_FILE);
198
+ if (!fs.existsSync(statusPath)) {
199
+ return {
200
+ wikiDir,
201
+ statusPath,
202
+ bullets: [],
203
+ };
204
+ }
205
+
206
+ const bullets = fs.readFileSync(statusPath, 'utf8')
207
+ .split('\n')
208
+ .filter((line) => line.startsWith('- '))
209
+ .slice(0, 4);
210
+
211
+ return {
212
+ wikiDir,
213
+ statusPath,
214
+ bullets,
215
+ };
216
+ }
217
+
218
+ function walkMarkdownFiles(dir, output = []) {
219
+ if (!fs.existsSync(dir)) return output;
220
+
221
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
222
+ for (const entry of entries) {
223
+ const fullPath = path.join(dir, entry.name);
224
+ if (entry.isDirectory()) {
225
+ walkMarkdownFiles(fullPath, output);
226
+ } else if (entry.isFile() && entry.name.endsWith('.md')) {
227
+ output.push(fullPath);
228
+ }
229
+ }
230
+
231
+ return output;
232
+ }
233
+
234
+ function parseInlineArray(rawValue) {
235
+ return rawValue.slice(1, -1)
236
+ .split(',')
237
+ .map((item) => item.trim())
238
+ .filter(Boolean)
239
+ .map((item) => item.replace(/^['"]|['"]$/g, ''));
240
+ }
241
+
242
+ function parseFrontmatter(content) {
243
+ if (!content.startsWith('---\n')) return null;
244
+
245
+ const endIndex = content.indexOf('\n---', 4);
246
+ if (endIndex === -1) return null;
247
+
248
+ const yaml = content.slice(4, endIndex);
249
+ const frontmatter = {};
250
+ let currentKey = null;
251
+
252
+ for (const rawLine of yaml.split('\n')) {
253
+ const line = rawLine.replace(/\r$/, '');
254
+ const listMatch = line.match(/^\s+-\s+(.+)$/);
255
+ if (listMatch && currentKey) {
256
+ if (!Array.isArray(frontmatter[currentKey])) frontmatter[currentKey] = [];
257
+ frontmatter[currentKey].push(listMatch[1].trim().replace(/^['"]|['"]$/g, ''));
258
+ continue;
259
+ }
260
+
261
+ const keyValueMatch = line.match(/^([a-zA-Z0-9_-]+):\s*(.*)$/);
262
+ if (!keyValueMatch) continue;
263
+
264
+ currentKey = keyValueMatch[1];
265
+ const value = keyValueMatch[2].trim();
266
+ if (value === '') {
267
+ frontmatter[currentKey] = [];
268
+ } else if (value.startsWith('[') && value.endsWith(']')) {
269
+ frontmatter[currentKey] = parseInlineArray(value);
270
+ } else {
271
+ frontmatter[currentKey] = value.replace(/^['"]|['"]$/g, '');
272
+ }
273
+ }
274
+
275
+ return frontmatter;
276
+ }
277
+
278
+ function readWikiPages(projectRoot = process.cwd()) {
279
+ const wikiDir = path.join(projectRoot, WIKI_ROOT);
280
+ const pages = [];
281
+
282
+ for (const subdir of WIKI_SUBDIRS) {
283
+ const fullDir = path.join(wikiDir, subdir);
284
+ for (const filePath of walkMarkdownFiles(fullDir)) {
285
+ const content = fs.readFileSync(filePath, 'utf8');
286
+ const frontmatter = parseFrontmatter(content) || {};
287
+ pages.push({
288
+ filePath,
289
+ relativePath: path.relative(projectRoot, filePath).replace(/\\/g, '/'),
290
+ content,
291
+ frontmatter,
292
+ });
293
+ }
294
+ }
295
+
296
+ return pages;
297
+ }
298
+
299
+ function normalizeSourcePath(projectRoot, source) {
300
+ if (!source || /^https?:\/\//i.test(source)) return null;
301
+ if (path.isAbsolute(source)) return path.normalize(source);
302
+ return path.normalize(path.join(projectRoot, source));
303
+ }
304
+
305
+ function findStaleWikiPages(projectRoot = process.cwd()) {
306
+ return readWikiPages(projectRoot)
307
+ .map((page) => {
308
+ const sources = Array.isArray(page.frontmatter.sources) ? page.frontmatter.sources : [];
309
+ if (sources.length === 0) return null;
310
+
311
+ const lastCompiled = page.frontmatter.last_compiled;
312
+ if (!lastCompiled) {
313
+ return {
314
+ page: page.relativePath,
315
+ staleSource: sources[0],
316
+ reason: 'missing last_compiled',
317
+ };
318
+ }
319
+
320
+ const compiledDate = new Date(`${lastCompiled}T23:59:59`);
321
+ for (const source of sources) {
322
+ const normalized = normalizeSourcePath(projectRoot, source);
323
+ if (!normalized) continue;
324
+ if (!fs.existsSync(normalized)) {
325
+ return {
326
+ page: page.relativePath,
327
+ staleSource: source,
328
+ reason: 'missing source',
329
+ };
330
+ }
331
+
332
+ const stat = fs.statSync(normalized);
333
+ if (stat.mtime > compiledDate) {
334
+ return {
335
+ page: page.relativePath,
336
+ staleSource: source,
337
+ reason: 'source newer than last_compiled',
338
+ };
339
+ }
340
+ }
341
+
342
+ return null;
343
+ })
344
+ .filter(Boolean);
345
+ }
346
+
347
+ function extractWikiLinks(content) {
348
+ const matches = content.match(/\[\[(atris\/wiki\/[^\]]+?)\]\]/g) || [];
349
+ return matches.map((match) => match.slice(2, -2));
350
+ }
351
+
352
+ function findWikiOrphans(projectRoot = process.cwd()) {
353
+ const pages = readWikiPages(projectRoot);
354
+ const indexPath = path.join(projectRoot, WIKI_ROOT, 'index.md');
355
+ const indexContent = fs.existsSync(indexPath) ? fs.readFileSync(indexPath, 'utf8') : '';
356
+
357
+ const inboundLinks = new Map();
358
+ for (const page of pages) {
359
+ inboundLinks.set(page.relativePath, 0);
360
+ }
361
+
362
+ for (const page of pages) {
363
+ const links = extractWikiLinks(page.content);
364
+ for (const link of links) {
365
+ const normalized = link.replace(/\\/g, '/');
366
+ if (normalized !== page.relativePath && inboundLinks.has(normalized)) {
367
+ inboundLinks.set(normalized, inboundLinks.get(normalized) + 1);
368
+ }
369
+ }
370
+ }
371
+
372
+ return pages
373
+ .filter((page) => {
374
+ const indexed = indexContent.includes(`[[${page.relativePath}]]`);
375
+ const inboundCount = inboundLinks.get(page.relativePath) || 0;
376
+ return !indexed && inboundCount === 0;
377
+ })
378
+ .map((page) => page.relativePath);
379
+ }
380
+
381
+ function findSuggestedSources(projectRoot = process.cwd(), limit = 3) {
382
+ const candidates = [
383
+ 'README.md',
384
+ 'atris/CLAUDE.md',
385
+ 'atris/atris.md',
386
+ 'atris.md',
387
+ 'package.json',
388
+ 'commands/init.js',
389
+ 'commands/activate.js',
390
+ 'commands/wiki.js',
391
+ 'atris/team/navigator/MEMBER.md',
392
+ 'atris/team/executor/MEMBER.md',
393
+ 'atris/team/validator/MEMBER.md',
394
+ ];
395
+
396
+ const seen = new Set();
397
+ for (const page of readWikiPages(projectRoot)) {
398
+ const sources = Array.isArray(page.frontmatter.sources) ? page.frontmatter.sources : [];
399
+ for (const source of sources) {
400
+ const normalized = normalizeSourcePath(projectRoot, source);
401
+ if (normalized) seen.add(normalized);
402
+ }
403
+ }
404
+
405
+ const suggestions = [];
406
+ for (const candidate of candidates) {
407
+ const fullPath = path.join(projectRoot, candidate);
408
+ if (!fs.existsSync(fullPath)) continue;
409
+ if (seen.has(path.normalize(fullPath))) continue;
410
+ suggestions.push(candidate);
411
+ if (suggestions.length >= limit) break;
412
+ }
413
+
414
+ return suggestions;
415
+ }
416
+
417
+ function parseStatusBullets(content) {
418
+ const bullets = new Map();
419
+ for (const line of content.split('\n')) {
420
+ const match = line.match(/^- ([^:]+):\s*(.*)$/);
421
+ if (match) bullets.set(match[1], match[2]);
422
+ }
423
+ return bullets;
424
+ }
425
+
426
+ function writeWikiStatus(projectRoot = process.cwd(), report) {
427
+ const wikiDir = ensureWikiScaffold(projectRoot);
428
+ const statusPath = path.join(wikiDir, WIKI_STATUS_FILE);
429
+ const existing = fs.existsSync(statusPath) ? fs.readFileSync(statusPath, 'utf8') : '';
430
+ const bullets = parseStatusBullets(existing);
431
+
432
+ const lines = [
433
+ '# Atris Wiki Status',
434
+ '',
435
+ `- Last ingest: ${bullets.get('Last ingest') || 'never'}`,
436
+ `- Last lint: ${bullets.get('Last lint') || 'never'}`,
437
+ `- Last loop: ${today()} ${nowTime()}`,
438
+ `- Health: ${report.health}`,
439
+ `- Next move: ${report.nextMove}`,
440
+ '',
441
+ ];
442
+
443
+ fs.writeFileSync(statusPath, lines.join('\n'), 'utf8');
444
+ return statusPath;
445
+ }
446
+
447
+ function appendWikiLog(projectRoot = process.cwd(), summary, details = []) {
448
+ const wikiDir = ensureWikiScaffold(projectRoot);
449
+ const logPath = path.join(wikiDir, 'log.md');
450
+ let content = fs.existsSync(logPath) ? fs.readFileSync(logPath, 'utf8') : '# Atris Wiki Log\n';
451
+ const dateHeader = `## ${today()}`;
452
+ if (!content.includes(dateHeader)) {
453
+ if (!content.endsWith('\n')) content += '\n';
454
+ content += `\n${dateHeader}\n`;
455
+ }
456
+
457
+ if (!content.endsWith('\n')) content += '\n';
458
+ content += `- ${nowTime()} LOOP ${summary}\n`;
459
+ for (const detail of details) {
460
+ content += ` - ${detail}\n`;
461
+ }
462
+
463
+ fs.writeFileSync(logPath, content, 'utf8');
464
+ return logPath;
465
+ }
466
+
467
+ function formatSourceList(sourceValue) {
468
+ return sourceValue
469
+ .split(/\s+/)
470
+ .filter(Boolean)
471
+ .join(', ');
472
+ }
473
+
474
+ const WIKI_SCHEMA = `The wiki lives in ${WIKI_ROOT}/.
475
+
476
+ Structure:
477
+ - ${WIKI_ROOT}/wiki.md - protocol for future agents
478
+ - ${WIKI_ROOT}/index.md - catalog grouped by type
479
+ - ${WIKI_ROOT}/log.md - append-only activity log
480
+ - ${WIKI_ROOT}/STATUS.md - plain-English health summary
481
+ - ${WIKI_ROOT}/people/ - one page per human
482
+ - ${WIKI_ROOT}/systems/ - one page per tool, table, dashboard, service, or product
483
+ - ${WIKI_ROOT}/concepts/ - pattern and framework pages
484
+ - ${WIKI_ROOT}/${WIKI_BRIEFS_SUBDIR}/ - cross-cutting briefs referencing 3+ pages
485
+
486
+ Page format:
487
+ ---
488
+ type: person | system | concept | brief
489
+ slug: short-id
490
+ title: Human Readable
491
+ sources: [path/to/source1.md]
492
+ last_compiled: YYYY-MM-DD
493
+ created: YYYY-MM-DD
494
+ updated: YYYY-MM-DD
495
+ tags: [tag1, tag2]
496
+ ---
497
+ # Title
498
+ Body in markdown.
499
+ ## Cross-References
500
+ - [[atris/wiki/people/related.md]] - why related
501
+
502
+ Rules:
503
+ - Read every listed source fully before writing
504
+ - Merge new info into existing pages instead of replacing them
505
+ - Keep index.md, log.md, and STATUS.md current
506
+ - Flag contradictions directly instead of smoothing them over
507
+ - Never modify the raw source documents you ingested`;
508
+
509
+ function buildIngestPrompt(sourceValue) {
510
+ return `Atris wiki ingest: ${formatSourceList(sourceValue)}
511
+ ${WIKI_SCHEMA}
512
+
513
+ Workflow:
514
+ 1. Read every source in: ${sourceValue}
515
+ 2. Ensure ${WIKI_ROOT}/ exists with wiki.md, index.md, log.md, STATUS.md, and the 3 page subfolders
516
+ 3. Extract people, systems, and concepts worth preserving
517
+ 4. Create or update pages under ${WIKI_ROOT}/, merging with existing facts instead of replacing them
518
+ 5. Add cross-references using [[atris/wiki/...]] links
519
+ 6. Update ${WIKI_ROOT}/index.md with one-line descriptions of touched pages
520
+ 7. Append an INGEST entry to ${WIKI_ROOT}/log.md under today's date
521
+ 8. Refresh ${WIKI_ROOT}/STATUS.md in plain English for a non-technical reader
522
+
523
+ Quality bar:
524
+ - Ask clarifying questions if the source is ambiguous
525
+ - Capture the important facts, not filler
526
+ - Say what is uncertain
527
+ - Leave the wiki sharper than you found it`;
528
+ }
529
+
530
+ function buildQueryPrompt(question) {
531
+ return `Atris wiki query: ${question}
532
+
533
+ Read ${WIKI_ROOT}/index.md first, then the most relevant pages.
534
+ Answer from the wiki with direct references to page paths under ${WIKI_ROOT}/.
535
+ If the answer reveals a reusable insight, offer to save it as a brief page.`;
536
+ }
537
+
538
+ function buildLintPrompt() {
539
+ return `Atris wiki lint pass
540
+
541
+ Read ${WIKI_ROOT}/index.md, crawl the referenced pages, and inspect the local wiki.
542
+
543
+ Checks:
544
+ 1. Every page referenced by index.md exists
545
+ 2. Cross-references resolve
546
+ 3. Orphan pages are listed
547
+ 4. Contradictions are called out plainly
548
+ 5. Gaps worth ingesting next are listed concretely
549
+ 6. ${WIKI_ROOT}/STATUS.md is rewritten in plain English
550
+ 7. ${WIKI_ROOT}/log.md gets a LINT entry under today's date
551
+
552
+ Output:
553
+ - Clear summary for a non-technical reader
554
+ - Specific next ingest suggestions
555
+ - No hedging if the wiki is stale or messy`;
556
+ }
557
+
558
+ module.exports = {
559
+ WIKI_ROOT,
560
+ LEGACY_WIKI_ROOT,
561
+ WIKI_SUBDIRS,
562
+ WIKI_CONTENT_SUBDIRS,
563
+ WIKI_SCHEMA,
564
+ WIKI_STATUS_FILE,
565
+ ensureWikiScaffold,
566
+ findLocalWikiDir,
567
+ normalizeWikiOnlyPrefix,
568
+ readWikiStatus,
569
+ readWikiPages,
570
+ findStaleWikiPages,
571
+ findWikiOrphans,
572
+ findSuggestedSources,
573
+ writeWikiStatus,
574
+ appendWikiLog,
575
+ buildIngestPrompt,
576
+ buildQueryPrompt,
577
+ buildLintPrompt,
578
+ };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "atris",
3
- "version": "2.6.2",
4
- "description": "atrisDev (atris dev) - CLI for AI coding agents. Works with Claude Code, Cursor, Windsurf. Make any codebase AI-navigable.",
3
+ "version": "3.0.0",
4
+ "description": "Atris the autonomous coding loop. Works with Claude Code, Cursor, Windsurf, Codex. Endgame-driven autopilot, plain-language output, self-improving substrate. Type one command in any folder, walk away, come back to shipped commits.",
5
5
  "main": "bin/atris.js",
6
6
  "bin": {
7
7
  "atris": "bin/atris.js"
package/utils/api.js CHANGED
@@ -71,11 +71,18 @@ function httpRequest(urlString, options) {
71
71
  });
72
72
 
73
73
  req.on('error', reject);
74
+ // Socket idle timeout (fires if no data received for this duration)
74
75
  if (timeoutMs > 0) {
75
76
  req.setTimeout(timeoutMs, () => {
76
- req.destroy(new Error('Request timeout'));
77
+ req.destroy(new Error(`Request timeout after ${Math.round(timeoutMs / 1000)}s — try --timeout=300`));
77
78
  });
78
79
  }
80
+ // Hard deadline — kill request after 2x the timeout regardless of activity
81
+ const hardDeadline = timeoutMs > 0
82
+ ? setTimeout(() => { req.destroy(new Error(`Hard deadline exceeded (${Math.round(timeoutMs * 2 / 1000)}s)`)); }, timeoutMs * 2)
83
+ : null;
84
+ // Clear hard deadline when response completes
85
+ req.on('close', () => { if (hardDeadline) clearTimeout(hardDeadline); });
79
86
 
80
87
  if (options.body) {
81
88
  if (!req.hasHeader('Content-Length')) {
@@ -113,44 +120,49 @@ async function apiRequestJson(pathname, options = {}) {
113
120
  }
114
121
  }
115
122
 
116
- try {
117
- const result = await httpRequest(url, {
118
- method: options.method || 'GET',
119
- headers,
120
- body: bodyPayload,
121
- timeoutMs: options.timeoutMs,
122
- });
123
+ const maxRetries = options.retries != null ? options.retries : 1;
124
+ const retryableStatus = new Set([0, 502, 503, 504]);
125
+
126
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
127
+ try {
128
+ const result = await httpRequest(url, {
129
+ method: options.method || 'GET',
130
+ headers,
131
+ body: bodyPayload,
132
+ timeoutMs: options.timeoutMs,
133
+ });
123
134
 
124
- const text = result.body.toString('utf8');
125
- let data = null;
126
- if (text) {
127
- try {
128
- data = JSON.parse(text);
129
- } catch {
130
- data = null;
135
+ const text = result.body.toString('utf8');
136
+ let data = null;
137
+ if (text) {
138
+ try {
139
+ data = JSON.parse(text);
140
+ } catch {
141
+ data = null;
142
+ }
131
143
  }
132
- }
133
144
 
134
- const ok = result.status >= 200 && result.status < 300;
135
- const errorMessage = !ok
136
- ? (data && typeof data === 'object' && (data.detail || data.error || data.message)) || text || 'Request failed'
137
- : undefined;
138
-
139
- return {
140
- ok,
141
- status: result.status,
142
- data,
143
- text,
144
- error: errorMessage,
145
- };
146
- } catch (error) {
147
- return {
148
- ok: false,
149
- status: 0,
150
- data: null,
151
- text: '',
152
- error: error.message || 'Network error',
153
- };
145
+ const ok = result.status >= 200 && result.status < 300;
146
+
147
+ // Retry on transient server errors
148
+ if (!ok && retryableStatus.has(result.status) && attempt < maxRetries) {
149
+ await new Promise(r => setTimeout(r, 1000 * (attempt + 1)));
150
+ continue;
151
+ }
152
+
153
+ const errorMessage = !ok
154
+ ? (data && typeof data === 'object' && (data.detail || data.error || data.message)) || text || 'Request failed'
155
+ : undefined;
156
+
157
+ return { ok, status: result.status, data, text, error: errorMessage };
158
+ } catch (error) {
159
+ // Retry on network errors (timeout, connection reset)
160
+ if (attempt < maxRetries) {
161
+ await new Promise(r => setTimeout(r, 1000 * (attempt + 1)));
162
+ continue;
163
+ }
164
+ return { ok: false, status: 0, data: null, text: '', error: error.message || 'Network error' };
165
+ }
154
166
  }
155
167
  }
156
168
 
package/utils/auth.js CHANGED
@@ -579,6 +579,7 @@ module.exports = {
579
579
  autoSaveProfile,
580
580
  // Per-terminal sessions
581
581
  getTerminalSessionId,
582
+ getSessionsDir,
582
583
  setSessionProfile,
583
584
  getSessionProfile,
584
585
  clearSessionProfile,