prjct-cli 1.18.0 → 1.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,55 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.19.0] - 2026-02-09
4
+
5
+ ### Features
6
+
7
+ - implement aggressive archival of stale storage data (PRJ-267) (#161)
8
+
9
+
10
+ ## [1.19.0] - 2026-02-09
11
+
12
+ ### Features
13
+
14
+ - **Aggressive archival of stale storage data** (PRJ-267): Automatic archival during `prjct sync` to keep LLM context lean
15
+ - Shipped features >90 days archived to SQLite `archives` table with 1-line summary
16
+ - Pending ideas >180 days marked `dormant` and excluded from LLM context
17
+ - Completed queue tasks >7 days auto-removed and archived
18
+ - Paused tasks >30 days archived with persistence (previously discarded)
19
+ - Memory log capped at 500 active entries, overflow archived
20
+
21
+ ### Implementation Details
22
+
23
+ New modules:
24
+ - `core/storage/archive-storage.ts` — Archive infrastructure: SQLite `archives` table, batch archival via transactions, restore, prune, stats
25
+ - `core/__tests__/storage/archive-storage.test.ts` — 13 tests covering all archival paths
26
+
27
+ Modified:
28
+ - `core/storage/database.ts` — Migration v2: `archives` table with entity_type, entity_id, entity_data, summary, reason columns
29
+ - `core/storage/shipped-storage.ts` — `archiveOldShipped()` method with 90-day retention policy
30
+ - `core/storage/ideas-storage.ts` — `markDormantIdeas()` method, `dormant` status excluded from markdown context
31
+ - `core/storage/queue-storage.ts` — `removeStaleCompleted()` method with 7-day retention
32
+ - `core/storage/state-storage.ts` — `archiveStalePausedTasks()` now persists to archive table before removal
33
+ - `core/services/memory-service.ts` — `capEntries()` method with 500-entry cap
34
+ - `core/services/sync-service.ts` — `archiveStaleData()` orchestrates all archival in parallel during sync
35
+ - `core/schemas/ideas.ts` + `core/types/storage.ts` — Added `dormant` to IdeaStatus enum
36
+
37
+ ### Test Plan
38
+
39
+ #### For QA
40
+ 1. Run `prjct sync` with >90d shipped features — verify archive and removal from context
41
+ 2. Run sync with >180d pending ideas — verify dormant status, excluded from `ideas.md`
42
+ 3. Run sync with >7d completed queue tasks — verify removal and archival
43
+ 4. Run sync with >30d paused tasks — verify archival to SQLite
44
+ 5. Create >500 memory entries, sync — verify cap at 500
45
+ 6. `bun test` — all 947+ tests pass
46
+ 7. Verify recent items are NOT archived
47
+
48
+ #### For Users
49
+ **What changed:** Storage data automatically cleaned up during sync. Old data archived, not deleted.
50
+ **How to use:** No action needed — runs automatically on every sync.
51
+ **Breaking changes:** Ideas can now have `dormant` status (new enum value).
52
+
3
53
  ## [1.18.0] - 2026-02-09
4
54
 
5
55
  ### Features
@@ -0,0 +1,455 @@
1
+ /**
2
+ * Archive Storage Tests (PRJ-267)
3
+ *
4
+ * Tests for the archive infrastructure and archival policies:
5
+ * - Archive table operations (insert, query, restore)
6
+ * - Shipped features archival (>90 days)
7
+ * - Ideas dormancy (>180 days pending)
8
+ * - Queue cleanup (>7 days completed)
9
+ * - Paused task archival (>30 days)
10
+ * - Memory log capping (500 entries)
11
+ */
12
+
13
+ import { afterEach, beforeEach, describe, expect, it } from 'bun:test'
14
+ import fs from 'node:fs/promises'
15
+ import os from 'node:os'
16
+ import path from 'node:path'
17
+ import pathManager from '../../infrastructure/path-manager'
18
+ import { ARCHIVE_POLICIES, archiveStorage } from '../../storage/archive-storage'
19
+ import { prjctDb } from '../../storage/database'
20
+ import { ideasStorage } from '../../storage/ideas-storage'
21
+ import { queueStorage } from '../../storage/queue-storage'
22
+ import { shippedStorage } from '../../storage/shipped-storage'
23
+ import { stateStorage } from '../../storage/state-storage'
24
+ import { getTimestamp } from '../../utils/date-helper'
25
+
26
+ // =============================================================================
27
+ // Test Setup
28
+ // =============================================================================
29
+
30
+ let tmpRoot: string
31
+ let testProjectId: string
32
+
33
+ const originalGetGlobalProjectPath = pathManager.getGlobalProjectPath.bind(pathManager)
34
+ const originalGetFilePath = pathManager.getFilePath.bind(pathManager)
35
+
36
+ function daysAgoISO(days: number): string {
37
+ const d = new Date()
38
+ d.setDate(d.getDate() - days)
39
+ return d.toISOString()
40
+ }
41
+
42
+ describe('Archive Storage', () => {
43
+ beforeEach(async () => {
44
+ tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'prjct-archive-test-'))
45
+ testProjectId = 'test-archive-project'
46
+
47
+ pathManager.getGlobalProjectPath = (projectId: string) => path.join(tmpRoot, projectId)
48
+
49
+ pathManager.getFilePath = (projectId: string, layer: string, filename: string) =>
50
+ path.join(tmpRoot, projectId, layer, filename)
51
+
52
+ // Ensure all required dirs exist
53
+ const dirs = ['context', 'memory', 'core', 'progress', 'planning', 'sync']
54
+ await Promise.all(
55
+ dirs.map((d) => fs.mkdir(path.join(tmpRoot, testProjectId, d), { recursive: true }))
56
+ )
57
+
58
+ // Create empty pending.json for event bus
59
+ await fs.writeFile(path.join(tmpRoot, testProjectId, 'sync', 'pending.json'), '[]', 'utf-8')
60
+
61
+ // Initialize the database (triggers migrations including archives table)
62
+ prjctDb.getDb(testProjectId)
63
+ })
64
+
65
+ afterEach(async () => {
66
+ prjctDb.close()
67
+ pathManager.getGlobalProjectPath = originalGetGlobalProjectPath
68
+ pathManager.getFilePath = originalGetFilePath
69
+
70
+ if (tmpRoot) {
71
+ await fs.rm(tmpRoot, { recursive: true, force: true })
72
+ }
73
+ })
74
+
75
+ // ===========================================================================
76
+ // Archive Table Operations
77
+ // ===========================================================================
78
+
79
+ describe('archive table', () => {
80
+ it('should archive a single item', () => {
81
+ const id = archiveStorage.archive(testProjectId, {
82
+ entityType: 'shipped',
83
+ entityId: 'ship-1',
84
+ entityData: { name: 'Feature A', version: '1.0.0' },
85
+ summary: 'Feature A v1.0.0',
86
+ reason: 'age',
87
+ })
88
+
89
+ expect(id).toBeTruthy()
90
+
91
+ const records = archiveStorage.getArchived(testProjectId, 'shipped')
92
+ expect(records).toHaveLength(1)
93
+ expect(records[0].entity_id).toBe('ship-1')
94
+ expect(records[0].summary).toBe('Feature A v1.0.0')
95
+ })
96
+
97
+ it('should archive multiple items in a transaction', () => {
98
+ const count = archiveStorage.archiveMany(testProjectId, [
99
+ { entityType: 'shipped', entityId: 's1', entityData: { a: 1 }, reason: 'age' },
100
+ { entityType: 'shipped', entityId: 's2', entityData: { a: 2 }, reason: 'age' },
101
+ { entityType: 'idea', entityId: 'i1', entityData: { b: 1 }, reason: 'dormant' },
102
+ ])
103
+
104
+ expect(count).toBe(3)
105
+
106
+ const stats = archiveStorage.getStats(testProjectId)
107
+ expect(stats.shipped).toBe(2)
108
+ expect(stats.idea).toBe(1)
109
+ expect(stats.total).toBe(3)
110
+ })
111
+
112
+ it('should restore an archived item', () => {
113
+ archiveStorage.archive(testProjectId, {
114
+ entityType: 'shipped',
115
+ entityId: 'ship-1',
116
+ entityData: { name: 'restored' },
117
+ reason: 'age',
118
+ })
119
+
120
+ const records = archiveStorage.getArchived(testProjectId)
121
+ expect(records).toHaveLength(1)
122
+
123
+ const data = archiveStorage.restore(testProjectId, records[0].id)
124
+ expect(data).toEqual({ name: 'restored' })
125
+
126
+ // Should be removed from archive
127
+ const after = archiveStorage.getArchived(testProjectId)
128
+ expect(after).toHaveLength(0)
129
+ })
130
+
131
+ it('should prune old archives', () => {
132
+ // Insert an archive with old timestamp
133
+ const db = prjctDb.getDb(testProjectId)
134
+ const oldDate = daysAgoISO(400)
135
+ db.prepare(
136
+ 'INSERT INTO archives (id, entity_type, entity_id, entity_data, archived_at, reason) VALUES (?, ?, ?, ?, ?, ?)'
137
+ ).run('old-1', 'shipped', 's1', '{}', oldDate, 'age')
138
+
139
+ archiveStorage.archive(testProjectId, {
140
+ entityType: 'shipped',
141
+ entityId: 's2',
142
+ entityData: {},
143
+ reason: 'age',
144
+ })
145
+
146
+ const pruned = archiveStorage.pruneOldArchives(testProjectId, 365)
147
+ expect(pruned).toBe(1)
148
+
149
+ const remaining = archiveStorage.getArchived(testProjectId)
150
+ expect(remaining).toHaveLength(1)
151
+ expect(remaining[0].entity_id).toBe('s2')
152
+ })
153
+ })
154
+
155
+ // ===========================================================================
156
+ // Shipped Features Archival
157
+ // ===========================================================================
158
+
159
+ describe('shipped archival', () => {
160
+ it('should archive shipped features older than 90 days', async () => {
161
+ // Write shipped data with old and recent items
162
+ await shippedStorage.write(testProjectId, {
163
+ shipped: [
164
+ { id: 'recent', name: 'Recent', version: '2.0.0', shippedAt: daysAgoISO(10) },
165
+ { id: 'old', name: 'Old', version: '1.0.0', shippedAt: daysAgoISO(100) },
166
+ ],
167
+ lastUpdated: getTimestamp(),
168
+ })
169
+
170
+ const archived = await shippedStorage.archiveOldShipped(testProjectId)
171
+ expect(archived).toBe(1)
172
+
173
+ // Verify active storage only has recent
174
+ const data = await shippedStorage.read(testProjectId)
175
+ expect(data.shipped).toHaveLength(1)
176
+ expect(data.shipped[0].id).toBe('recent')
177
+
178
+ // Verify archive table has old item
179
+ const records = archiveStorage.getArchived(testProjectId, 'shipped')
180
+ expect(records).toHaveLength(1)
181
+ expect(records[0].entity_id).toBe('old')
182
+ expect(records[0].summary).toBe('Old v1.0.0')
183
+ })
184
+
185
+ it('should not archive recent shipped features', async () => {
186
+ await shippedStorage.write(testProjectId, {
187
+ shipped: [
188
+ { id: 'r1', name: 'R1', version: '1.0.0', shippedAt: daysAgoISO(5) },
189
+ { id: 'r2', name: 'R2', version: '1.1.0', shippedAt: daysAgoISO(30) },
190
+ ],
191
+ lastUpdated: getTimestamp(),
192
+ })
193
+
194
+ const archived = await shippedStorage.archiveOldShipped(testProjectId)
195
+ expect(archived).toBe(0)
196
+
197
+ const data = await shippedStorage.read(testProjectId)
198
+ expect(data.shipped).toHaveLength(2)
199
+ })
200
+ })
201
+
202
+ // ===========================================================================
203
+ // Ideas Dormancy
204
+ // ===========================================================================
205
+
206
+ describe('ideas dormancy', () => {
207
+ it('should mark pending ideas older than 180 days as dormant', async () => {
208
+ await ideasStorage.write(testProjectId, {
209
+ ideas: [
210
+ {
211
+ id: 'new',
212
+ text: 'New idea',
213
+ status: 'pending',
214
+ priority: 'medium',
215
+ tags: [],
216
+ addedAt: daysAgoISO(10),
217
+ },
218
+ {
219
+ id: 'stale',
220
+ text: 'Stale idea',
221
+ status: 'pending',
222
+ priority: 'low',
223
+ tags: [],
224
+ addedAt: daysAgoISO(200),
225
+ },
226
+ {
227
+ id: 'converted',
228
+ text: 'Converted',
229
+ status: 'converted',
230
+ priority: 'high',
231
+ tags: [],
232
+ addedAt: daysAgoISO(300),
233
+ },
234
+ ],
235
+ lastUpdated: getTimestamp(),
236
+ })
237
+
238
+ const dormant = await ideasStorage.markDormantIdeas(testProjectId)
239
+ expect(dormant).toBe(1)
240
+
241
+ const data = await ideasStorage.read(testProjectId)
242
+ const stale = data.ideas.find((i) => i.id === 'stale')
243
+ expect(stale?.status).toBe('dormant')
244
+
245
+ // New idea should remain pending
246
+ const fresh = data.ideas.find((i) => i.id === 'new')
247
+ expect(fresh?.status).toBe('pending')
248
+
249
+ // Converted should remain converted
250
+ const conv = data.ideas.find((i) => i.id === 'converted')
251
+ expect(conv?.status).toBe('converted')
252
+
253
+ // Archive table should have the dormant idea
254
+ const records = archiveStorage.getArchived(testProjectId, 'idea')
255
+ expect(records).toHaveLength(1)
256
+ })
257
+
258
+ it('should exclude dormant ideas from markdown context', async () => {
259
+ await ideasStorage.write(testProjectId, {
260
+ ideas: [
261
+ {
262
+ id: 'active',
263
+ text: 'Active idea',
264
+ status: 'pending',
265
+ priority: 'medium',
266
+ tags: [],
267
+ addedAt: daysAgoISO(5),
268
+ },
269
+ {
270
+ id: 'dormant',
271
+ text: 'Dormant idea',
272
+ status: 'dormant',
273
+ priority: 'low',
274
+ tags: [],
275
+ addedAt: daysAgoISO(200),
276
+ },
277
+ ],
278
+ lastUpdated: getTimestamp(),
279
+ })
280
+
281
+ // Read the generated markdown context file
282
+ const contextPath = pathManager.getFilePath(testProjectId, 'planning', 'ideas.md')
283
+ const md = await fs.readFile(contextPath, 'utf-8')
284
+
285
+ expect(md).toContain('Active idea')
286
+ expect(md).not.toContain('Dormant idea')
287
+ expect(md).toContain('1 dormant idea(s) excluded from context')
288
+ })
289
+ })
290
+
291
+ // ===========================================================================
292
+ // Queue Cleanup
293
+ // ===========================================================================
294
+
295
+ describe('queue cleanup', () => {
296
+ it('should remove completed tasks older than 7 days', async () => {
297
+ await queueStorage.write(testProjectId, {
298
+ tasks: [
299
+ {
300
+ id: 'active',
301
+ description: 'Active',
302
+ type: 'feature',
303
+ priority: 'medium',
304
+ section: 'active',
305
+ createdAt: daysAgoISO(1),
306
+ completed: false,
307
+ },
308
+ {
309
+ id: 'recent-done',
310
+ description: 'Recent done',
311
+ type: 'feature',
312
+ priority: 'medium',
313
+ section: 'active',
314
+ createdAt: daysAgoISO(5),
315
+ completed: true,
316
+ completedAt: daysAgoISO(2),
317
+ },
318
+ {
319
+ id: 'old-done',
320
+ description: 'Old done',
321
+ type: 'feature',
322
+ priority: 'low',
323
+ section: 'active',
324
+ createdAt: daysAgoISO(30),
325
+ completed: true,
326
+ completedAt: daysAgoISO(10),
327
+ },
328
+ ],
329
+ lastUpdated: getTimestamp(),
330
+ })
331
+
332
+ const removed = await queueStorage.removeStaleCompleted(testProjectId)
333
+ expect(removed).toBe(1)
334
+
335
+ const data = await queueStorage.read(testProjectId)
336
+ expect(data.tasks).toHaveLength(2)
337
+ expect(data.tasks.map((t) => t.id).sort()).toEqual(['active', 'recent-done'])
338
+
339
+ // Archive should have the old completed task
340
+ const records = archiveStorage.getArchived(testProjectId, 'queue_task')
341
+ expect(records).toHaveLength(1)
342
+ expect(records[0].entity_id).toBe('old-done')
343
+ })
344
+ })
345
+
346
+ // ===========================================================================
347
+ // Paused Task Archival
348
+ // ===========================================================================
349
+
350
+ describe('paused task archival', () => {
351
+ it('should archive paused tasks older than 30 days', async () => {
352
+ await stateStorage.write(testProjectId, {
353
+ currentTask: null,
354
+ previousTask: null,
355
+ pausedTasks: [
356
+ {
357
+ id: 'recent',
358
+ description: 'Recent pause',
359
+ status: 'paused',
360
+ startedAt: daysAgoISO(35),
361
+ pausedAt: daysAgoISO(5),
362
+ },
363
+ {
364
+ id: 'stale',
365
+ description: 'Stale pause',
366
+ status: 'paused',
367
+ startedAt: daysAgoISO(60),
368
+ pausedAt: daysAgoISO(40),
369
+ },
370
+ ],
371
+ lastUpdated: getTimestamp(),
372
+ })
373
+
374
+ const archived = await stateStorage.archiveStalePausedTasks(testProjectId)
375
+ expect(archived).toHaveLength(1)
376
+ expect(archived[0].id).toBe('stale')
377
+
378
+ // Active state should only have recent
379
+ const state = await stateStorage.read(testProjectId)
380
+ expect(state.pausedTasks).toHaveLength(1)
381
+ expect(state.pausedTasks![0].id).toBe('recent')
382
+
383
+ // Archive table should have stale
384
+ const records = archiveStorage.getArchived(testProjectId, 'paused_task')
385
+ expect(records).toHaveLength(1)
386
+ expect(records[0].entity_id).toBe('stale')
387
+ })
388
+ })
389
+
390
+ // ===========================================================================
391
+ // Memory Log Capping
392
+ // ===========================================================================
393
+
394
+ describe('memory log capping', () => {
395
+ it('should cap memory entries at max limit', async () => {
396
+ const memoryPath = pathManager.getFilePath(testProjectId, 'memory', 'context.jsonl')
397
+
398
+ // Write more entries than the limit
399
+ const entries: string[] = []
400
+ const total = ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES + 50
401
+ for (let i = 0; i < total; i++) {
402
+ entries.push(
403
+ JSON.stringify({
404
+ timestamp: new Date(Date.now() - (total - i) * 1000).toISOString(),
405
+ action: `action-${i}`,
406
+ data: { index: i },
407
+ })
408
+ )
409
+ }
410
+ await fs.writeFile(memoryPath, `${entries.join('\n')}\n`, 'utf-8')
411
+
412
+ // Import and use memoryService
413
+ const { memoryService } = await import('../../services/memory-service')
414
+ const capped = await memoryService.capEntries(testProjectId)
415
+ expect(capped).toBe(50)
416
+
417
+ // File should now have exactly max entries
418
+ const content = await fs.readFile(memoryPath, 'utf-8')
419
+ const remaining = content.trim().split('\n').filter(Boolean)
420
+ expect(remaining).toHaveLength(ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES)
421
+
422
+ // Archive should have the overflow
423
+ const records = archiveStorage.getArchived(testProjectId, 'memory_entry')
424
+ expect(records).toHaveLength(50)
425
+ })
426
+
427
+ it('should not cap if under limit', async () => {
428
+ const memoryPath = pathManager.getFilePath(testProjectId, 'memory', 'context.jsonl')
429
+
430
+ const entries: string[] = []
431
+ for (let i = 0; i < 10; i++) {
432
+ entries.push(JSON.stringify({ timestamp: getTimestamp(), action: `a-${i}`, data: {} }))
433
+ }
434
+ await fs.writeFile(memoryPath, `${entries.join('\n')}\n`, 'utf-8')
435
+
436
+ const { memoryService } = await import('../../services/memory-service')
437
+ const capped = await memoryService.capEntries(testProjectId)
438
+ expect(capped).toBe(0)
439
+ })
440
+ })
441
+
442
+ // ===========================================================================
443
+ // Archive Policies Constants
444
+ // ===========================================================================
445
+
446
+ describe('archive policies', () => {
447
+ it('should have correct default policy values', () => {
448
+ expect(ARCHIVE_POLICIES.SHIPPED_RETENTION_DAYS).toBe(90)
449
+ expect(ARCHIVE_POLICIES.IDEA_DORMANT_DAYS).toBe(180)
450
+ expect(ARCHIVE_POLICIES.QUEUE_COMPLETED_DAYS).toBe(7)
451
+ expect(ARCHIVE_POLICIES.PAUSED_TASK_DAYS).toBe(30)
452
+ expect(ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES).toBe(500)
453
+ })
454
+ })
455
+ })
@@ -14,7 +14,7 @@ import { z } from 'zod'
14
14
  // =============================================================================
15
15
 
16
16
  export const IdeaPrioritySchema = z.enum(['low', 'medium', 'high'])
17
- export const IdeaStatusSchema = z.enum(['pending', 'converted', 'completed', 'archived'])
17
+ export const IdeaStatusSchema = z.enum(['pending', 'converted', 'completed', 'archived', 'dormant'])
18
18
  export const ImpactLevelSchema = z.enum(['high', 'medium', 'low'])
19
19
 
20
20
  export const ImpactEffortSchema = z.object({
@@ -6,6 +6,7 @@
6
6
 
7
7
  import configManager from '../infrastructure/config-manager'
8
8
  import pathManager from '../infrastructure/path-manager'
9
+ import { ARCHIVE_POLICIES, archiveStorage } from '../storage/archive-storage'
9
10
  import type { MemoryServiceEntry } from '../types'
10
11
  import { getErrorMessage, isNotFoundError } from '../types/fs'
11
12
  import { getTimestamp } from '../utils/date-helper'
@@ -133,6 +134,47 @@ export class MemoryService {
133
134
  return []
134
135
  }
135
136
  }
137
+
138
+ /**
139
+ * Cap memory log at max entries (PRJ-267).
140
+ * Moves overflow entries to archive table, keeps most recent entries.
141
+ * Returns count of archived entries.
142
+ */
143
+ async capEntries(projectId: string): Promise<number> {
144
+ try {
145
+ const memoryPath = pathManager.getFilePath(projectId, 'memory', 'context.jsonl')
146
+ const entries = await jsonlHelper.readJsonLines<MemoryServiceEntry>(memoryPath)
147
+
148
+ if (entries.length <= ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES) {
149
+ return 0
150
+ }
151
+
152
+ const overflow = entries.slice(0, entries.length - ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES)
153
+ const kept = entries.slice(-ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES)
154
+
155
+ // Archive overflow entries in batch
156
+ archiveStorage.archiveMany(
157
+ projectId,
158
+ overflow.map((entry, i) => ({
159
+ entityType: 'memory_entry' as const,
160
+ entityId: `memory-${entry.timestamp || i}`,
161
+ entityData: entry,
162
+ summary: entry.action,
163
+ reason: 'overflow',
164
+ }))
165
+ )
166
+
167
+ // Rewrite file with only kept entries
168
+ await jsonlHelper.writeJsonLines(memoryPath, kept)
169
+
170
+ return overflow.length
171
+ } catch (error) {
172
+ if (!isNotFoundError(error)) {
173
+ console.error(`Memory cap error: ${getErrorMessage(error)}`)
174
+ }
175
+ return 0
176
+ }
177
+ }
136
178
  }
137
179
 
138
180
  export const memoryService = new MemoryService()
@@ -37,8 +37,13 @@ import commandInstaller from '../infrastructure/command-installer'
37
37
  import configManager from '../infrastructure/config-manager'
38
38
  import pathManager from '../infrastructure/path-manager'
39
39
  import { analysisStorage } from '../storage/analysis-storage'
40
+ import { archiveStorage } from '../storage/archive-storage'
41
+ import { ideasStorage } from '../storage/ideas-storage'
40
42
  import { metricsStorage } from '../storage/metrics-storage'
41
43
  import { migrateJsonToSqlite } from '../storage/migrate-json'
44
+ import { queueStorage } from '../storage/queue-storage'
45
+ import { shippedStorage } from '../storage/shipped-storage'
46
+ import { stateStorage } from '../storage/state-storage'
42
47
  import type {
43
48
  GitData,
44
49
  IncrementalInfo,
@@ -56,6 +61,7 @@ import * as dateHelper from '../utils/date-helper'
56
61
  import log from '../utils/logger'
57
62
  import { ContextFileGenerator } from './context-generator'
58
63
  import { localStateGenerator } from './local-state-generator'
64
+ import { memoryService } from './memory-service'
59
65
  import { skillInstaller } from './skill-installer'
60
66
  import { StackDetector } from './stack-detector'
61
67
  import { syncVerifier } from './sync-verifier'
@@ -290,6 +296,9 @@ class SyncService {
290
296
  const duration = Date.now() - startTime
291
297
  const syncMetrics = await this.recordSyncMetrics(stats, contextFiles, agents, duration)
292
298
 
299
+ // 9b. Archive stale data (PRJ-267)
300
+ await this.archiveStaleData()
301
+
293
302
  // 10. Update global config and commands (CLI does EVERYTHING)
294
303
  // This ensures `prjct sync` from terminal updates global CLAUDE.md and commands
295
304
  await commandInstaller.installGlobalConfig()
@@ -1270,6 +1279,48 @@ You are the ${name} expert for this project. Apply best practices for the detect
1270
1279
  }
1271
1280
  }
1272
1281
 
1282
+ // ==========================================================================
1283
+ // ARCHIVAL (PRJ-267)
1284
+ // ==========================================================================
1285
+
1286
+ /**
1287
+ * Archive stale data across all storage types.
1288
+ * Runs during sync to keep active storage lean.
1289
+ */
1290
+ private async archiveStaleData(): Promise<void> {
1291
+ if (!this.projectId) return
1292
+
1293
+ try {
1294
+ const [shipped, dormant, staleQueue, stalePaused, memoryCapped] = await Promise.all([
1295
+ shippedStorage.archiveOldShipped(this.projectId).catch(() => 0),
1296
+ ideasStorage.markDormantIdeas(this.projectId).catch(() => 0),
1297
+ queueStorage.removeStaleCompleted(this.projectId).catch(() => 0),
1298
+ stateStorage.archiveStalePausedTasks(this.projectId).catch(() => []),
1299
+ memoryService.capEntries(this.projectId).catch(() => 0),
1300
+ ])
1301
+
1302
+ const totalArchived =
1303
+ shipped + dormant + staleQueue + (stalePaused as unknown[]).length + memoryCapped
1304
+
1305
+ if (totalArchived > 0) {
1306
+ log.info('Archived stale data', {
1307
+ shipped,
1308
+ dormant,
1309
+ staleQueue,
1310
+ stalePaused: (stalePaused as unknown[]).length,
1311
+ memoryCapped,
1312
+ total: totalArchived,
1313
+ })
1314
+
1315
+ // Record archive stats
1316
+ const stats = archiveStorage.getStats(this.projectId)
1317
+ log.debug('Archive stats', stats)
1318
+ }
1319
+ } catch (error) {
1320
+ log.debug('Archival failed (non-critical)', { error: getErrorMessage(error) })
1321
+ }
1322
+ }
1323
+
1273
1324
  // ==========================================================================
1274
1325
  // HELPERS
1275
1326
  // ==========================================================================