@cleocode/core 2026.3.60 → 2026.3.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dist/cleo.js +36 -1
  2. package/dist/cleo.js.map +1 -1
  3. package/dist/index.js +120 -30
  4. package/dist/index.js.map +3 -3
  5. package/dist/internal.d.ts +6 -3
  6. package/dist/internal.d.ts.map +1 -1
  7. package/dist/internal.js +4 -2
  8. package/dist/internal.js.map +1 -1
  9. package/dist/phases/deps.d.ts +1 -1
  10. package/dist/phases/deps.d.ts.map +1 -1
  11. package/dist/phases/deps.js +5 -2
  12. package/dist/phases/deps.js.map +1 -1
  13. package/dist/repair.d.ts +7 -0
  14. package/dist/repair.d.ts.map +1 -1
  15. package/dist/repair.js +43 -2
  16. package/dist/repair.js.map +1 -1
  17. package/dist/routing/capability-matrix.d.ts.map +1 -1
  18. package/dist/routing/capability-matrix.js +7 -0
  19. package/dist/routing/capability-matrix.js.map +1 -1
  20. package/dist/sequence/index.js +1 -1
  21. package/dist/sequence/index.js.map +1 -1
  22. package/dist/stats/index.d.ts.map +1 -1
  23. package/dist/stats/index.js +4 -2
  24. package/dist/stats/index.js.map +1 -1
  25. package/dist/store/sqlite.d.ts.map +1 -1
  26. package/dist/store/sqlite.js +59 -5
  27. package/dist/store/sqlite.js.map +1 -1
  28. package/dist/system/backup.d.ts +15 -0
  29. package/dist/system/backup.d.ts.map +1 -1
  30. package/dist/system/backup.js +43 -1
  31. package/dist/system/backup.js.map +1 -1
  32. package/dist/tasks/add.d.ts.map +1 -1
  33. package/dist/tasks/add.js +66 -4
  34. package/dist/tasks/add.js.map +1 -1
  35. package/package.json +5 -5
  36. package/src/internal.ts +7 -4
  37. package/src/phases/deps.ts +5 -3
  38. package/src/repair.ts +53 -2
  39. package/src/routing/capability-matrix.ts +7 -0
  40. package/src/sequence/index.ts +1 -1
  41. package/src/stats/index.ts +4 -2
  42. package/src/store/sqlite.ts +72 -5
  43. package/src/system/backup.ts +52 -1
  44. package/src/tasks/__tests__/add.test.ts +3 -1
  45. package/src/tasks/add.ts +66 -5
package/src/internal.ts CHANGED
@@ -98,7 +98,7 @@ export type {
98
98
  RiskFactor,
99
99
  ValidationPrediction,
100
100
  } from './intelligence/types.js';
101
-
101
+ export { type AddIssueParams, type AddIssueResult, addIssue } from './issue/create.js';
102
102
  // Issue
103
103
  export { collectDiagnostics } from './issue/diagnostics.js';
104
104
  // Lib — shared primitives
@@ -240,6 +240,9 @@ export {
240
240
  } from './otel/index.js';
241
241
  // Paths (additional)
242
242
  export { getAgentOutputsAbsolute, getAgentsHome } from './paths.js';
243
+ // Phases — dependency graph (taskId-scoped critical path; distinct from tasks/graph-ops getCriticalPath)
244
+ export type { CriticalPathResult as DepsCriticalPathResult } from './phases/deps.js';
245
+ export { getCriticalPath as depsCriticalPath } from './phases/deps.js';
243
246
  export type { ListPhasesResult } from './phases/index.js';
244
247
  // Phases
245
248
  export { advancePhase, deletePhase, renamePhase, setPhase, startPhase } from './phases/index.js';
@@ -389,7 +392,7 @@ export {
389
392
  export { computeChecksum, readJson } from './store/json.js';
390
393
  export { createSession, getActiveSession } from './store/session-store.js';
391
394
  export { getDb, getNativeDb } from './store/sqlite.js';
392
- export { createTask } from './store/task-store.js';
395
+ export { createTask, getTask } from './store/task-store.js';
393
396
  export {
394
397
  auditLog,
395
398
  externalTaskLinks,
@@ -415,8 +418,8 @@ export type { ArchiveStatsResult } from './system/archive-stats.js';
415
418
  export { getArchiveStats } from './system/archive-stats.js';
416
419
  export type { AuditResult } from './system/audit.js';
417
420
  export { auditData } from './system/audit.js';
418
- export type { BackupResult, RestoreResult } from './system/backup.js';
419
- export { restoreBackup } from './system/backup.js';
421
+ export type { BackupEntry, BackupResult, RestoreResult } from './system/backup.js';
422
+ export { listSystemBackups, restoreBackup } from './system/backup.js';
420
423
  export type { CleanupResult } from './system/cleanup.js';
421
424
  export { cleanupSystem } from './system/cleanup.js';
422
425
  export type { DiagnosticsResult, HealthResult } from './system/health.js';
@@ -9,15 +9,17 @@
9
9
  import type { Task, TaskRef } from '@cleocode/contracts';
10
10
  import { ExitCode } from '@cleocode/contracts';
11
11
  import { CleoError } from '../errors.js';
12
- import type { DataAccessor } from '../store/data-accessor.js';
12
+ import { type DataAccessor, getAccessor } from '../store/data-accessor.js';
13
13
 
14
14
  /**
15
15
  * Load all tasks via targeted query.
16
+ * Creates a fresh accessor from cwd when none is provided.
16
17
  * @task T4659
17
18
  * @epic T4654
18
19
  */
19
- async function loadAllTasks(_cwd?: string, accessor?: DataAccessor): Promise<Task[]> {
20
- const { tasks } = await accessor!.queryTasks({});
20
+ async function loadAllTasks(cwd?: string, accessor?: DataAccessor): Promise<Task[]> {
21
+ const acc = accessor ?? (await getAccessor(cwd));
22
+ const { tasks } = await acc.queryTasks({});
21
23
  return tasks;
22
24
  }
23
25
 
package/src/repair.ts CHANGED
@@ -105,6 +105,56 @@ export async function repairMissingCompletedAt(
105
105
  };
106
106
  }
107
107
 
108
+ /**
109
+ * Detect and add missing required columns on the tasks table.
110
+ * Uses PRAGMA table_info to check actual schema vs required columns.
111
+ *
112
+ * @see https://github.com/anthropics/cleo/issues/63
113
+ */
114
+ export async function repairMissingColumns(
115
+ cwd: string | undefined,
116
+ dryRun: boolean,
117
+ ): Promise<RepairAction> {
118
+ const { getDb } = await import('./store/sqlite.js');
119
+ const db = await getDb(cwd);
120
+
121
+ // getDb() already calls ensureRequiredColumns(), so if we get here
122
+ // the column should exist. But we still report the action for visibility.
123
+ const { sql } = await import('drizzle-orm');
124
+ const columns = db.all<{ name: string }>(sql`PRAGMA table_info(tasks)`);
125
+ const existingCols = new Set(columns.map((c: { name: string }) => c.name));
126
+
127
+ const missingCols = ['pipeline_stage'].filter((c) => !existingCols.has(c));
128
+
129
+ if (missingCols.length === 0) {
130
+ return {
131
+ action: 'fix_missing_columns',
132
+ status: 'skipped',
133
+ details: 'All required columns present on tasks table',
134
+ };
135
+ }
136
+
137
+ if (dryRun) {
138
+ return {
139
+ action: 'fix_missing_columns',
140
+ status: 'preview',
141
+ details: `Would add missing column(s): ${missingCols.join(', ')}`,
142
+ };
143
+ }
144
+
145
+ // Columns should already be added by ensureRequiredColumns() in getDb(),
146
+ // but apply again defensively.
147
+ for (const col of missingCols) {
148
+ db.run(sql.raw(`ALTER TABLE tasks ADD COLUMN ${col} text`));
149
+ }
150
+
151
+ return {
152
+ action: 'fix_missing_columns',
153
+ status: 'applied',
154
+ details: `Added missing column(s): ${missingCols.join(', ')}`,
155
+ };
156
+ }
157
+
108
158
  /**
109
159
  * Run all repair functions.
110
160
  * Returns all actions taken (or previewed in dry-run mode).
@@ -113,9 +163,10 @@ export async function runAllRepairs(
113
163
  cwd: string | undefined,
114
164
  dryRun: boolean,
115
165
  ): Promise<RepairAction[]> {
116
- const [sizes, completedAt] = await Promise.all([
166
+ const [sizes, completedAt, columns] = await Promise.all([
117
167
  repairMissingSizes(cwd, dryRun),
118
168
  repairMissingCompletedAt(cwd, dryRun),
169
+ repairMissingColumns(cwd, dryRun),
119
170
  ]);
120
- return [sizes, completedAt];
171
+ return [sizes, completedAt, columns];
121
172
  }
@@ -469,6 +469,13 @@ const CAPABILITY_MATRIX: OperationCapability[] = [
469
469
  mode: 'native',
470
470
  preferredChannel: 'either',
471
471
  },
472
+ {
473
+ domain: 'admin',
474
+ operation: 'backup',
475
+ gateway: 'query',
476
+ mode: 'native',
477
+ preferredChannel: 'either',
478
+ },
472
479
  {
473
480
  domain: 'admin',
474
481
  operation: 'backup',
@@ -168,7 +168,7 @@ export async function showSequence(cwd?: string): Promise<Record<string, unknown
168
168
  counter: seq.counter,
169
169
  lastId: seq.lastId,
170
170
  checksum: seq.checksum,
171
- nextId: `T${seq.counter + 1}`,
171
+ nextId: `T${String(seq.counter + 1).padStart(3, '0')}`,
172
172
  };
173
173
  }
174
174
 
@@ -149,8 +149,10 @@ export async function getProjectStats(
149
149
  )
150
150
  .get();
151
151
  archivedCompleted = archivedDoneRow?.c ?? 0;
152
- // totalCompleted = currently done (not yet archived) + archived-as-completed
153
- totalCompleted = (statusMap['done'] ?? 0) + archivedCompleted;
152
+ // totalCompleted: use audit log as SSoT (same source as completedInPeriod) to ensure
153
+ // the two metrics are consistent. DB-based status counts under-count because they miss
154
+ // tasks that were completed then cancelled, deleted, or archived with a non-default reason.
155
+ totalCompleted = entries.filter(isComplete).length;
154
156
  } catch {
155
157
  // fallback to audit_log counts if DB unavailable
156
158
  totalCreated = entries.filter(isCreate).length;
@@ -462,18 +462,48 @@ function runMigrations(nativeDb: DatabaseSync, db: NodeSQLiteDatabase<typeof sch
462
462
  }
463
463
  }
464
464
 
465
+ // Fix #63: Reconcile stale migration journal entries from older CLEO versions.
466
+ // When the DB has __drizzle_migrations entries whose hashes don't match ANY
467
+ // local migration file, drizzle's migrate() either throws or tries to re-run
468
+ // all migrations (causing "table already exists" errors). Fix by clearing stale
469
+ // entries and marking all local migrations as applied (tables already exist),
470
+ // then relying on ensureRequiredColumns() to patch any schema gaps.
471
+ if (tableExists(nativeDb, '__drizzle_migrations') && tableExists(nativeDb, 'tasks')) {
472
+ const localMigrations = readMigrationFiles({ migrationsFolder });
473
+ const localHashes = new Set(localMigrations.map((m) => m.hash));
474
+ const dbEntries = nativeDb.prepare('SELECT hash FROM "__drizzle_migrations"').all() as Array<{
475
+ hash: string;
476
+ }>;
477
+ const hasOrphanedEntries = dbEntries.some((e) => !localHashes.has(e.hash));
478
+
479
+ if (hasOrphanedEntries) {
480
+ const log = getLogger('sqlite');
481
+ log.warn(
482
+ { orphaned: dbEntries.filter((e) => !localHashes.has(e.hash)).length },
483
+ 'Detected stale migration journal entries from a previous CLEO version. Reconciling.',
484
+ );
485
+ // Clear all entries and mark every local migration as applied.
486
+ // The existing tables were created by the previous version's migrations,
487
+ // so we skip re-running them. ensureRequiredColumns() fills any schema gaps.
488
+ nativeDb.exec('DELETE FROM "__drizzle_migrations"');
489
+ for (const m of localMigrations) {
490
+ nativeDb.exec(
491
+ `INSERT INTO "__drizzle_migrations" ("hash", "created_at") VALUES ('${m.hash}', ${m.folderMillis})`,
492
+ );
493
+ }
494
+ }
495
+ }
496
+
465
497
  // Run pending migrations via drizzle-orm/node-sqlite/migrator (synchronous).
466
498
  // The new migrator handles its own transactions. T5185: retry on SQLITE_BUSY.
467
- let lastError: unknown;
468
499
  for (let attempt = 1; attempt <= MAX_MIGRATION_RETRIES; attempt++) {
469
500
  try {
470
501
  migrate(db, { migrationsFolder });
471
- return;
502
+ break;
472
503
  } catch (err) {
473
504
  if (!isSqliteBusy(err) || attempt === MAX_MIGRATION_RETRIES) {
474
505
  throw err;
475
506
  }
476
- lastError = err;
477
507
  const delay = Math.min(
478
508
  MIGRATION_RETRY_BASE_DELAY_MS * 2 ** (attempt - 1) * (1 + Math.random() * 0.5),
479
509
  MIGRATION_RETRY_MAX_DELAY_MS,
@@ -481,8 +511,45 @@ function runMigrations(nativeDb: DatabaseSync, db: NodeSQLiteDatabase<typeof sch
481
511
  Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, Math.round(delay));
482
512
  }
483
513
  }
484
- /* c8 ignore next */
485
- throw lastError;
514
+
515
+ // Fix #63: Defensive column safety net — add any columns the schema expects
516
+ // but that are missing from the actual table. This catches gaps from stale
517
+ // migration journals, partial migrations, or version skew.
518
+ ensureRequiredColumns(nativeDb);
519
+ }
520
+
521
+ /**
522
+ * Required columns that MUST exist on the tasks table.
523
+ * Maps column name → ALTER TABLE ADD COLUMN DDL suffix.
524
+ * Only TEXT columns with no constraints are safe for ALTER TABLE ADD in SQLite.
525
+ *
526
+ * @see https://github.com/anthropics/cleo/issues/63
527
+ */
528
+ const REQUIRED_TASK_COLUMNS: Array<{ name: string; ddl: string }> = [
529
+ { name: 'pipeline_stage', ddl: 'text' },
530
+ ];
531
+
532
+ /**
533
+ * Ensure all required columns exist on the tasks table.
534
+ * Uses PRAGMA table_info to inspect the schema and adds any missing columns
535
+ * via ALTER TABLE ADD COLUMN. This is a safety net for databases where
536
+ * Drizzle migrations could not run due to journal corruption or version skew.
537
+ *
538
+ * @see https://github.com/anthropics/cleo/issues/63
539
+ */
540
+ function ensureRequiredColumns(nativeDb: DatabaseSync): void {
541
+ if (!tableExists(nativeDb, 'tasks')) return;
542
+
543
+ const columns = nativeDb.prepare('PRAGMA table_info(tasks)').all() as Array<{ name: string }>;
544
+ const existingCols = new Set(columns.map((c) => c.name));
545
+
546
+ for (const req of REQUIRED_TASK_COLUMNS) {
547
+ if (!existingCols.has(req.name)) {
548
+ const log = getLogger('sqlite');
549
+ log.warn({ column: req.name }, `Adding missing column tasks.${req.name} via ALTER TABLE`);
550
+ nativeDb.exec(`ALTER TABLE tasks ADD COLUMN ${req.name} ${req.ddl}`);
551
+ }
552
+ }
486
553
  }
487
554
 
488
555
  /**
@@ -3,7 +3,7 @@
3
3
  * @task T4783
4
4
  */
5
5
 
6
- import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';
6
+ import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'node:fs';
7
7
  import { join } from 'node:path';
8
8
  import { ExitCode } from '@cleocode/contracts';
9
9
  import { CleoError } from '../errors.js';
@@ -80,6 +80,57 @@ export function createBackup(
80
80
  return { backupId, path: backupDir, timestamp, type: btype, files: backedUp };
81
81
  }
82
82
 
83
+ /** A single backup entry returned by listSystemBackups. */
84
+ export interface BackupEntry {
85
+ backupId: string;
86
+ type: string;
87
+ timestamp: string;
88
+ note?: string;
89
+ files: string[];
90
+ }
91
+
92
+ /**
93
+ * List all available system backups (snapshot, safety, migration types).
94
+ * Reads `.meta.json` sidecar files written by createBackup.
95
+ * This is a pure read operation — it does not modify any files.
96
+ * @task T4783
97
+ */
98
+ export function listSystemBackups(projectRoot: string): BackupEntry[] {
99
+ const cleoDir = join(projectRoot, '.cleo');
100
+ const backupTypes = ['snapshot', 'safety', 'migration'];
101
+ const entries: BackupEntry[] = [];
102
+
103
+ for (const btype of backupTypes) {
104
+ const backupDir = join(cleoDir, 'backups', btype);
105
+ if (!existsSync(backupDir)) continue;
106
+ try {
107
+ const files = readdirSync(backupDir).filter((f) => f.endsWith('.meta.json'));
108
+ for (const metaFile of files) {
109
+ try {
110
+ const raw = readFileSync(join(backupDir, metaFile), 'utf-8');
111
+ const meta = JSON.parse(raw) as Partial<BackupEntry>;
112
+ if (meta.backupId && meta.timestamp) {
113
+ entries.push({
114
+ backupId: meta.backupId,
115
+ type: meta.type ?? btype,
116
+ timestamp: meta.timestamp,
117
+ note: meta.note,
118
+ files: meta.files ?? [],
119
+ });
120
+ }
121
+ } catch {
122
+ // skip malformed meta files
123
+ }
124
+ }
125
+ } catch {
126
+ // skip unreadable backup directories
127
+ }
128
+ }
129
+
130
+ // Sort newest first
131
+ return entries.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
132
+ }
133
+
83
134
  /** Restore from a backup. */
84
135
  export function restoreBackup(
85
136
  projectRoot: string,
@@ -286,7 +286,9 @@ describe('addTask (integration)', () => {
286
286
  accessor,
287
287
  );
288
288
  expect(result.dryRun).toBe(true);
289
- expect(result.task.id).toBe('T001');
289
+ // Dry run does not allocate a real sequence ID — the task is a preview only
290
+ expect(result.task.id).toBe('T???');
291
+ expect(result.task.title).toBe('Dry run task');
290
292
  });
291
293
 
292
294
  it('validates parent hierarchy', async () => {
package/src/tasks/add.ts CHANGED
@@ -645,6 +645,72 @@ export async function addTask(
645
645
  return { task: duplicate, duplicate: true };
646
646
  }
647
647
 
648
+ // Dry run: build a preview task without allocating a sequence ID or writing to the DB.
649
+ // Must be checked before allocateNextTaskId to avoid advancing the counter on no-op runs.
650
+ if (options.dryRun) {
651
+ const previewNow = new Date().toISOString();
652
+
653
+ // Resolve pipeline stage for the preview without any DB writes
654
+ let previewParentForStage: import('./pipeline-stage.js').ResolvedParent | null = null;
655
+ if (parentId) {
656
+ const previewParentTask = await dataAccessor.loadSingleTask(parentId);
657
+ previewParentForStage = previewParentTask
658
+ ? { pipelineStage: previewParentTask.pipelineStage, type: previewParentTask.type }
659
+ : null;
660
+ }
661
+ const previewPipelineStage = resolveDefaultPipelineStage({
662
+ explicitStage: options.pipelineStage,
663
+ taskType: taskType ?? null,
664
+ parentTask: previewParentForStage,
665
+ });
666
+ const previewPosition =
667
+ options.position !== undefined
668
+ ? options.position
669
+ : await dataAccessor.getNextPosition(parentId);
670
+
671
+ const previewTask: Task = {
672
+ id: 'T???',
673
+ title: options.title,
674
+ description: options.description,
675
+ status,
676
+ priority,
677
+ type: taskType,
678
+ parentId: parentId || null,
679
+ position: previewPosition,
680
+ positionVersion: 0,
681
+ size,
682
+ pipelineStage: previewPipelineStage,
683
+ createdAt: previewNow,
684
+ updatedAt: previewNow,
685
+ };
686
+ if (phase) previewTask.phase = phase;
687
+ if (options.labels?.length) previewTask.labels = options.labels.map((l) => l.trim());
688
+ if (options.files?.length) previewTask.files = options.files.map((f) => f.trim());
689
+ if (options.acceptance?.length)
690
+ previewTask.acceptance = options.acceptance.map((a) => a.trim());
691
+ if (options.depends?.length) previewTask.depends = options.depends.map((d) => d.trim());
692
+ if (options.notes) {
693
+ const previewNote = `${new Date()
694
+ .toISOString()
695
+ .replace('T', ' ')
696
+ .replace(/\.\d+Z$/, ' UTC')}: ${options.notes}`;
697
+ previewTask.notes = [previewNote];
698
+ }
699
+ if (status === 'blocked' && options.description) {
700
+ previewTask.blockedBy = options.description;
701
+ }
702
+ if (status === 'done') {
703
+ previewTask.completedAt = previewNow;
704
+ }
705
+ if (taskType !== 'epic') {
706
+ const verificationEnabledRaw = await getRawConfigValue('verification.enabled', cwd);
707
+ if (verificationEnabledRaw === true) {
708
+ previewTask.verification = buildDefaultVerification(previewNow);
709
+ }
710
+ }
711
+ return { task: previewTask, dryRun: true };
712
+ }
713
+
648
714
  const taskId = await allocateNextTaskId(cwd);
649
715
 
650
716
  const now = new Date().toISOString();
@@ -741,11 +807,6 @@ export async function addTask(
741
807
  }
742
808
  }
743
809
 
744
- // Dry run
745
- if (options.dryRun) {
746
- return { task, dryRun: true };
747
- }
748
-
749
810
  // Wrap all writes in a transaction for TOCTOU safety (T023)
750
811
  await dataAccessor.transaction(async (tx: TransactionAccessor) => {
751
812
  // Position shuffling via bulk SQL update (T025)