prjct-cli 1.18.0 → 1.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,127 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.20.0] - 2026-02-10
4
+
5
+ ### Features
6
+
7
+ - add retry with exponential backoff for agent and tool operations (#162)
8
+
9
+
10
+ ## [1.20.0] - 2026-02-09
11
+
12
+ ### Features
13
+
14
+ - **Retry with exponential backoff for agent and tool operations** (PRJ-271): Comprehensive retry infrastructure with error classification and circuit breaker
15
+ - RetryPolicy utility with configurable attempts, delays, and exponential backoff (1s→2s→4s)
16
+ - Automatic error classification: transient (EBUSY, EAGAIN, ETIMEDOUT) vs permanent (ENOENT, EPERM)
17
+ - Circuit breaker protection: opens after 5 consecutive failures, auto-closes after 60s
18
+ - Agent initialization retries (3 attempts with 1s base delay)
19
+ - Tool operations retry (Read/Write/Bash with 2 attempts)
20
+ - Resilient parallel agent generation using Promise.allSettled()
21
+
22
+ ### Implementation Details
23
+
24
+ Built RetryPolicy utility with exponential backoff, error classification, and circuit breaker. Integrated across agent initialization, tool operations, and parallel agent generation. The system now automatically retries transient failures while failing fast on permanent errors.
25
+
26
+ **New modules:**
27
+ - `core/utils/retry.ts` (320 lines) — Core retry infrastructure with RetryPolicy class, error classification, circuit breaker
28
+ - `core/__tests__/utils/retry.test.ts` (380 lines) — 21 comprehensive tests with 53 assertions
29
+ - `ACCEPTANCE-PRJ-271.md` — Full acceptance criteria verification (22 criteria verified)
30
+
31
+ **Modified modules:**
32
+ - `core/services/agent-service.ts` — Wrapped initialize() with retry policy (3 attempts)
33
+ - `core/agentic/tool-registry.ts` — Added retry to Read/Write/Bash tools (2 attempts each)
34
+ - `core/services/agent-generator.ts` — Changed to Promise.allSettled() with per-agent retry
35
+
36
+ **Key features:**
37
+ - Exponential backoff: 1s, 2s, 4s (configurable base/max)
38
+ - Error classification: automatic transient vs permanent detection
39
+ - Circuit breaker: per-operation tracking, 5 failure threshold, 60s cooldown
40
+ - Two default policies: defaultAgentRetryPolicy (3 attempts), defaultToolRetryPolicy (2 attempts)
41
+ - Zero breaking changes: all 968 existing tests pass
42
+
43
+ ### Learnings
44
+
45
+ - **RetryPolicy pattern:** Wrapping operations with retry execution provides clean separation of retry logic from business logic
46
+ - **Error classification strategies:** Using error code sets (EBUSY, EAGAIN) for transient vs (ENOENT, EPERM) for permanent enables automatic decision-making
47
+ - **Promise.allSettled() for resilient parallel operations:** Prevents one failure from blocking other operations, enables partial success
48
+ - **Circuit breaker implementation:** Per-operation state tracking prevents cascading failures while allowing recovery
49
+
50
+ ### Test Plan
51
+
52
+ #### For QA
53
+
54
+ 1. **Agent Initialization Retry**
55
+ - Temporarily make file system busy during agent initialization
56
+ - Verify agent initialization retries up to 3 times
57
+ - Confirm permanent errors (unsupported agent) fail immediately
58
+
59
+ 2. **Tool Operations Retry**
60
+ - Test Read/Write/Bash with transient errors (EBUSY, ETIMEDOUT)
61
+ - Verify operations retry automatically (2 attempts)
62
+ - Confirm permanent errors (ENOENT, EPERM) return null/false without retry
63
+
64
+ 3. **Circuit Breaker**
65
+ - Trigger 5 consecutive failures on same operation
66
+ - Verify circuit breaker opens and blocks further attempts
67
+ - Wait 60 seconds and verify circuit closes automatically
68
+
69
+ 4. **Parallel Agent Generation**
70
+ - Simulate one agent generation failure during sync
71
+ - Verify other agents generate successfully (Promise.allSettled behavior)
72
+ - Check logs for failure warnings
73
+
74
+ #### For Users
75
+
76
+ **What changed:** The system is now more resilient against transient failures. Operations like agent initialization, file reads/writes, and command execution will automatically retry when they encounter temporary errors (disk busy, timeouts, etc).
77
+
78
+ **How to use:** No action required - retry logic works automatically. Users will experience fewer random failures during normal operations.
79
+
80
+ **Breaking changes:** None. All changes are backward compatible. Existing tests (968 total) all pass.
81
+
82
+ ## [1.19.0] - 2026-02-09
83
+
84
+ ### Features
85
+
86
+ - **Aggressive archival of stale storage data** (PRJ-267): Automatic archival during `prjct sync` to keep LLM context lean
87
+ - Shipped features >90 days archived to SQLite `archives` table with 1-line summary
88
+ - Pending ideas >180 days marked `dormant` and excluded from LLM context
89
+ - Completed queue tasks >7 days auto-removed and archived
90
+ - Paused tasks >30 days archived with persistence (previously discarded)
91
+ - Memory log capped at 500 active entries, overflow archived
92
+
93
+ ### Implementation Details
94
+
95
+ New modules:
96
+ - `core/storage/archive-storage.ts` — Archive infrastructure: SQLite `archives` table, batch archival via transactions, restore, prune, stats
97
+ - `core/__tests__/storage/archive-storage.test.ts` — 13 tests covering all archival paths
98
+
99
+ Modified:
100
+ - `core/storage/database.ts` — Migration v2: `archives` table with entity_type, entity_id, entity_data, summary, reason columns
101
+ - `core/storage/shipped-storage.ts` — `archiveOldShipped()` method with 90-day retention policy
102
+ - `core/storage/ideas-storage.ts` — `markDormantIdeas()` method, `dormant` status excluded from markdown context
103
+ - `core/storage/queue-storage.ts` — `removeStaleCompleted()` method with 7-day retention
104
+ - `core/storage/state-storage.ts` — `archiveStalePausedTasks()` now persists to archive table before removal
105
+ - `core/services/memory-service.ts` — `capEntries()` method with 500-entry cap
106
+ - `core/services/sync-service.ts` — `archiveStaleData()` orchestrates all archival in parallel during sync
107
+ - `core/schemas/ideas.ts` + `core/types/storage.ts` — Added `dormant` to IdeaStatus enum
108
+
109
+ ### Test Plan
110
+
111
+ #### For QA
112
+ 1. Run `prjct sync` with >90d shipped features — verify archive and removal from context
113
+ 2. Run sync with >180d pending ideas — verify dormant status, excluded from `ideas.md`
114
+ 3. Run sync with >7d completed queue tasks — verify removal and archival
115
+ 4. Run sync with >30d paused tasks — verify archival to SQLite
116
+ 5. Create >500 memory entries, sync — verify cap at 500
117
+ 6. `bun test` — all 947+ tests pass
118
+ 7. Verify recent items are NOT archived
119
+
120
+ #### For Users
121
+ **What changed:** Storage data automatically cleaned up during sync. Old data archived, not deleted.
122
+ **How to use:** No action needed — runs automatically on every sync.
123
+ **Breaking changes:** Ideas can now have `dormant` status (new enum value).
124
+
3
125
  ## [1.18.0] - 2026-02-09
4
126
 
5
127
  ### Features
@@ -0,0 +1,455 @@
1
+ /**
2
+ * Archive Storage Tests (PRJ-267)
3
+ *
4
+ * Tests for the archive infrastructure and archival policies:
5
+ * - Archive table operations (insert, query, restore)
6
+ * - Shipped features archival (>90 days)
7
+ * - Ideas dormancy (>180 days pending)
8
+ * - Queue cleanup (>7 days completed)
9
+ * - Paused task archival (>30 days)
10
+ * - Memory log capping (500 entries)
11
+ */
12
+
13
+ import { afterEach, beforeEach, describe, expect, it } from 'bun:test'
14
+ import fs from 'node:fs/promises'
15
+ import os from 'node:os'
16
+ import path from 'node:path'
17
+ import pathManager from '../../infrastructure/path-manager'
18
+ import { ARCHIVE_POLICIES, archiveStorage } from '../../storage/archive-storage'
19
+ import { prjctDb } from '../../storage/database'
20
+ import { ideasStorage } from '../../storage/ideas-storage'
21
+ import { queueStorage } from '../../storage/queue-storage'
22
+ import { shippedStorage } from '../../storage/shipped-storage'
23
+ import { stateStorage } from '../../storage/state-storage'
24
+ import { getTimestamp } from '../../utils/date-helper'
25
+
26
+ // =============================================================================
27
+ // Test Setup
28
+ // =============================================================================
29
+
30
+ let tmpRoot: string
31
+ let testProjectId: string
32
+
33
+ const originalGetGlobalProjectPath = pathManager.getGlobalProjectPath.bind(pathManager)
34
+ const originalGetFilePath = pathManager.getFilePath.bind(pathManager)
35
+
36
+ function daysAgoISO(days: number): string {
37
+ const d = new Date()
38
+ d.setDate(d.getDate() - days)
39
+ return d.toISOString()
40
+ }
41
+
42
+ describe('Archive Storage', () => {
43
+ beforeEach(async () => {
44
+ tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'prjct-archive-test-'))
45
+ testProjectId = 'test-archive-project'
46
+
47
+ pathManager.getGlobalProjectPath = (projectId: string) => path.join(tmpRoot, projectId)
48
+
49
+ pathManager.getFilePath = (projectId: string, layer: string, filename: string) =>
50
+ path.join(tmpRoot, projectId, layer, filename)
51
+
52
+ // Ensure all required dirs exist
53
+ const dirs = ['context', 'memory', 'core', 'progress', 'planning', 'sync']
54
+ await Promise.all(
55
+ dirs.map((d) => fs.mkdir(path.join(tmpRoot, testProjectId, d), { recursive: true }))
56
+ )
57
+
58
+ // Create empty pending.json for event bus
59
+ await fs.writeFile(path.join(tmpRoot, testProjectId, 'sync', 'pending.json'), '[]', 'utf-8')
60
+
61
+ // Initialize the database (triggers migrations including archives table)
62
+ prjctDb.getDb(testProjectId)
63
+ })
64
+
65
+ afterEach(async () => {
66
+ prjctDb.close()
67
+ pathManager.getGlobalProjectPath = originalGetGlobalProjectPath
68
+ pathManager.getFilePath = originalGetFilePath
69
+
70
+ if (tmpRoot) {
71
+ await fs.rm(tmpRoot, { recursive: true, force: true })
72
+ }
73
+ })
74
+
75
+ // ===========================================================================
76
+ // Archive Table Operations
77
+ // ===========================================================================
78
+
79
+ describe('archive table', () => {
80
+ it('should archive a single item', () => {
81
+ const id = archiveStorage.archive(testProjectId, {
82
+ entityType: 'shipped',
83
+ entityId: 'ship-1',
84
+ entityData: { name: 'Feature A', version: '1.0.0' },
85
+ summary: 'Feature A v1.0.0',
86
+ reason: 'age',
87
+ })
88
+
89
+ expect(id).toBeTruthy()
90
+
91
+ const records = archiveStorage.getArchived(testProjectId, 'shipped')
92
+ expect(records).toHaveLength(1)
93
+ expect(records[0].entity_id).toBe('ship-1')
94
+ expect(records[0].summary).toBe('Feature A v1.0.0')
95
+ })
96
+
97
+ it('should archive multiple items in a transaction', () => {
98
+ const count = archiveStorage.archiveMany(testProjectId, [
99
+ { entityType: 'shipped', entityId: 's1', entityData: { a: 1 }, reason: 'age' },
100
+ { entityType: 'shipped', entityId: 's2', entityData: { a: 2 }, reason: 'age' },
101
+ { entityType: 'idea', entityId: 'i1', entityData: { b: 1 }, reason: 'dormant' },
102
+ ])
103
+
104
+ expect(count).toBe(3)
105
+
106
+ const stats = archiveStorage.getStats(testProjectId)
107
+ expect(stats.shipped).toBe(2)
108
+ expect(stats.idea).toBe(1)
109
+ expect(stats.total).toBe(3)
110
+ })
111
+
112
+ it('should restore an archived item', () => {
113
+ archiveStorage.archive(testProjectId, {
114
+ entityType: 'shipped',
115
+ entityId: 'ship-1',
116
+ entityData: { name: 'restored' },
117
+ reason: 'age',
118
+ })
119
+
120
+ const records = archiveStorage.getArchived(testProjectId)
121
+ expect(records).toHaveLength(1)
122
+
123
+ const data = archiveStorage.restore(testProjectId, records[0].id)
124
+ expect(data).toEqual({ name: 'restored' })
125
+
126
+ // Should be removed from archive
127
+ const after = archiveStorage.getArchived(testProjectId)
128
+ expect(after).toHaveLength(0)
129
+ })
130
+
131
+ it('should prune old archives', () => {
132
+ // Insert an archive with old timestamp
133
+ const db = prjctDb.getDb(testProjectId)
134
+ const oldDate = daysAgoISO(400)
135
+ db.prepare(
136
+ 'INSERT INTO archives (id, entity_type, entity_id, entity_data, archived_at, reason) VALUES (?, ?, ?, ?, ?, ?)'
137
+ ).run('old-1', 'shipped', 's1', '{}', oldDate, 'age')
138
+
139
+ archiveStorage.archive(testProjectId, {
140
+ entityType: 'shipped',
141
+ entityId: 's2',
142
+ entityData: {},
143
+ reason: 'age',
144
+ })
145
+
146
+ const pruned = archiveStorage.pruneOldArchives(testProjectId, 365)
147
+ expect(pruned).toBe(1)
148
+
149
+ const remaining = archiveStorage.getArchived(testProjectId)
150
+ expect(remaining).toHaveLength(1)
151
+ expect(remaining[0].entity_id).toBe('s2')
152
+ })
153
+ })
154
+
155
+ // ===========================================================================
156
+ // Shipped Features Archival
157
+ // ===========================================================================
158
+
159
+ describe('shipped archival', () => {
160
+ it('should archive shipped features older than 90 days', async () => {
161
+ // Write shipped data with old and recent items
162
+ await shippedStorage.write(testProjectId, {
163
+ shipped: [
164
+ { id: 'recent', name: 'Recent', version: '2.0.0', shippedAt: daysAgoISO(10) },
165
+ { id: 'old', name: 'Old', version: '1.0.0', shippedAt: daysAgoISO(100) },
166
+ ],
167
+ lastUpdated: getTimestamp(),
168
+ })
169
+
170
+ const archived = await shippedStorage.archiveOldShipped(testProjectId)
171
+ expect(archived).toBe(1)
172
+
173
+ // Verify active storage only has recent
174
+ const data = await shippedStorage.read(testProjectId)
175
+ expect(data.shipped).toHaveLength(1)
176
+ expect(data.shipped[0].id).toBe('recent')
177
+
178
+ // Verify archive table has old item
179
+ const records = archiveStorage.getArchived(testProjectId, 'shipped')
180
+ expect(records).toHaveLength(1)
181
+ expect(records[0].entity_id).toBe('old')
182
+ expect(records[0].summary).toBe('Old v1.0.0')
183
+ })
184
+
185
+ it('should not archive recent shipped features', async () => {
186
+ await shippedStorage.write(testProjectId, {
187
+ shipped: [
188
+ { id: 'r1', name: 'R1', version: '1.0.0', shippedAt: daysAgoISO(5) },
189
+ { id: 'r2', name: 'R2', version: '1.1.0', shippedAt: daysAgoISO(30) },
190
+ ],
191
+ lastUpdated: getTimestamp(),
192
+ })
193
+
194
+ const archived = await shippedStorage.archiveOldShipped(testProjectId)
195
+ expect(archived).toBe(0)
196
+
197
+ const data = await shippedStorage.read(testProjectId)
198
+ expect(data.shipped).toHaveLength(2)
199
+ })
200
+ })
201
+
202
+ // ===========================================================================
203
+ // Ideas Dormancy
204
+ // ===========================================================================
205
+
206
+ describe('ideas dormancy', () => {
207
+ it('should mark pending ideas older than 180 days as dormant', async () => {
208
+ await ideasStorage.write(testProjectId, {
209
+ ideas: [
210
+ {
211
+ id: 'new',
212
+ text: 'New idea',
213
+ status: 'pending',
214
+ priority: 'medium',
215
+ tags: [],
216
+ addedAt: daysAgoISO(10),
217
+ },
218
+ {
219
+ id: 'stale',
220
+ text: 'Stale idea',
221
+ status: 'pending',
222
+ priority: 'low',
223
+ tags: [],
224
+ addedAt: daysAgoISO(200),
225
+ },
226
+ {
227
+ id: 'converted',
228
+ text: 'Converted',
229
+ status: 'converted',
230
+ priority: 'high',
231
+ tags: [],
232
+ addedAt: daysAgoISO(300),
233
+ },
234
+ ],
235
+ lastUpdated: getTimestamp(),
236
+ })
237
+
238
+ const dormant = await ideasStorage.markDormantIdeas(testProjectId)
239
+ expect(dormant).toBe(1)
240
+
241
+ const data = await ideasStorage.read(testProjectId)
242
+ const stale = data.ideas.find((i) => i.id === 'stale')
243
+ expect(stale?.status).toBe('dormant')
244
+
245
+ // New idea should remain pending
246
+ const fresh = data.ideas.find((i) => i.id === 'new')
247
+ expect(fresh?.status).toBe('pending')
248
+
249
+ // Converted should remain converted
250
+ const conv = data.ideas.find((i) => i.id === 'converted')
251
+ expect(conv?.status).toBe('converted')
252
+
253
+ // Archive table should have the dormant idea
254
+ const records = archiveStorage.getArchived(testProjectId, 'idea')
255
+ expect(records).toHaveLength(1)
256
+ })
257
+
258
+ it('should exclude dormant ideas from markdown context', async () => {
259
+ await ideasStorage.write(testProjectId, {
260
+ ideas: [
261
+ {
262
+ id: 'active',
263
+ text: 'Active idea',
264
+ status: 'pending',
265
+ priority: 'medium',
266
+ tags: [],
267
+ addedAt: daysAgoISO(5),
268
+ },
269
+ {
270
+ id: 'dormant',
271
+ text: 'Dormant idea',
272
+ status: 'dormant',
273
+ priority: 'low',
274
+ tags: [],
275
+ addedAt: daysAgoISO(200),
276
+ },
277
+ ],
278
+ lastUpdated: getTimestamp(),
279
+ })
280
+
281
+ // Read the generated markdown context file
282
+ const contextPath = pathManager.getFilePath(testProjectId, 'planning', 'ideas.md')
283
+ const md = await fs.readFile(contextPath, 'utf-8')
284
+
285
+ expect(md).toContain('Active idea')
286
+ expect(md).not.toContain('Dormant idea')
287
+ expect(md).toContain('1 dormant idea(s) excluded from context')
288
+ })
289
+ })
290
+
291
+ // ===========================================================================
292
+ // Queue Cleanup
293
+ // ===========================================================================
294
+
295
+ describe('queue cleanup', () => {
296
+ it('should remove completed tasks older than 7 days', async () => {
297
+ await queueStorage.write(testProjectId, {
298
+ tasks: [
299
+ {
300
+ id: 'active',
301
+ description: 'Active',
302
+ type: 'feature',
303
+ priority: 'medium',
304
+ section: 'active',
305
+ createdAt: daysAgoISO(1),
306
+ completed: false,
307
+ },
308
+ {
309
+ id: 'recent-done',
310
+ description: 'Recent done',
311
+ type: 'feature',
312
+ priority: 'medium',
313
+ section: 'active',
314
+ createdAt: daysAgoISO(5),
315
+ completed: true,
316
+ completedAt: daysAgoISO(2),
317
+ },
318
+ {
319
+ id: 'old-done',
320
+ description: 'Old done',
321
+ type: 'feature',
322
+ priority: 'low',
323
+ section: 'active',
324
+ createdAt: daysAgoISO(30),
325
+ completed: true,
326
+ completedAt: daysAgoISO(10),
327
+ },
328
+ ],
329
+ lastUpdated: getTimestamp(),
330
+ })
331
+
332
+ const removed = await queueStorage.removeStaleCompleted(testProjectId)
333
+ expect(removed).toBe(1)
334
+
335
+ const data = await queueStorage.read(testProjectId)
336
+ expect(data.tasks).toHaveLength(2)
337
+ expect(data.tasks.map((t) => t.id).sort()).toEqual(['active', 'recent-done'])
338
+
339
+ // Archive should have the old completed task
340
+ const records = archiveStorage.getArchived(testProjectId, 'queue_task')
341
+ expect(records).toHaveLength(1)
342
+ expect(records[0].entity_id).toBe('old-done')
343
+ })
344
+ })
345
+
346
+ // ===========================================================================
347
+ // Paused Task Archival
348
+ // ===========================================================================
349
+
350
+ describe('paused task archival', () => {
351
+ it('should archive paused tasks older than 30 days', async () => {
352
+ await stateStorage.write(testProjectId, {
353
+ currentTask: null,
354
+ previousTask: null,
355
+ pausedTasks: [
356
+ {
357
+ id: 'recent',
358
+ description: 'Recent pause',
359
+ status: 'paused',
360
+ startedAt: daysAgoISO(35),
361
+ pausedAt: daysAgoISO(5),
362
+ },
363
+ {
364
+ id: 'stale',
365
+ description: 'Stale pause',
366
+ status: 'paused',
367
+ startedAt: daysAgoISO(60),
368
+ pausedAt: daysAgoISO(40),
369
+ },
370
+ ],
371
+ lastUpdated: getTimestamp(),
372
+ })
373
+
374
+ const archived = await stateStorage.archiveStalePausedTasks(testProjectId)
375
+ expect(archived).toHaveLength(1)
376
+ expect(archived[0].id).toBe('stale')
377
+
378
+ // Active state should only have recent
379
+ const state = await stateStorage.read(testProjectId)
380
+ expect(state.pausedTasks).toHaveLength(1)
381
+ expect(state.pausedTasks![0].id).toBe('recent')
382
+
383
+ // Archive table should have stale
384
+ const records = archiveStorage.getArchived(testProjectId, 'paused_task')
385
+ expect(records).toHaveLength(1)
386
+ expect(records[0].entity_id).toBe('stale')
387
+ })
388
+ })
389
+
390
+ // ===========================================================================
391
+ // Memory Log Capping
392
+ // ===========================================================================
393
+
394
+ describe('memory log capping', () => {
395
+ it('should cap memory entries at max limit', async () => {
396
+ const memoryPath = pathManager.getFilePath(testProjectId, 'memory', 'context.jsonl')
397
+
398
+ // Write more entries than the limit
399
+ const entries: string[] = []
400
+ const total = ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES + 50
401
+ for (let i = 0; i < total; i++) {
402
+ entries.push(
403
+ JSON.stringify({
404
+ timestamp: new Date(Date.now() - (total - i) * 1000).toISOString(),
405
+ action: `action-${i}`,
406
+ data: { index: i },
407
+ })
408
+ )
409
+ }
410
+ await fs.writeFile(memoryPath, `${entries.join('\n')}\n`, 'utf-8')
411
+
412
+ // Import and use memoryService
413
+ const { memoryService } = await import('../../services/memory-service')
414
+ const capped = await memoryService.capEntries(testProjectId)
415
+ expect(capped).toBe(50)
416
+
417
+ // File should now have exactly max entries
418
+ const content = await fs.readFile(memoryPath, 'utf-8')
419
+ const remaining = content.trim().split('\n').filter(Boolean)
420
+ expect(remaining).toHaveLength(ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES)
421
+
422
+ // Archive should have the overflow
423
+ const records = archiveStorage.getArchived(testProjectId, 'memory_entry')
424
+ expect(records).toHaveLength(50)
425
+ })
426
+
427
+ it('should not cap if under limit', async () => {
428
+ const memoryPath = pathManager.getFilePath(testProjectId, 'memory', 'context.jsonl')
429
+
430
+ const entries: string[] = []
431
+ for (let i = 0; i < 10; i++) {
432
+ entries.push(JSON.stringify({ timestamp: getTimestamp(), action: `a-${i}`, data: {} }))
433
+ }
434
+ await fs.writeFile(memoryPath, `${entries.join('\n')}\n`, 'utf-8')
435
+
436
+ const { memoryService } = await import('../../services/memory-service')
437
+ const capped = await memoryService.capEntries(testProjectId)
438
+ expect(capped).toBe(0)
439
+ })
440
+ })
441
+
442
+ // ===========================================================================
443
+ // Archive Policies Constants
444
+ // ===========================================================================
445
+
446
+ describe('archive policies', () => {
447
+ it('should have correct default policy values', () => {
448
+ expect(ARCHIVE_POLICIES.SHIPPED_RETENTION_DAYS).toBe(90)
449
+ expect(ARCHIVE_POLICIES.IDEA_DORMANT_DAYS).toBe(180)
450
+ expect(ARCHIVE_POLICIES.QUEUE_COMPLETED_DAYS).toBe(7)
451
+ expect(ARCHIVE_POLICIES.PAUSED_TASK_DAYS).toBe(30)
452
+ expect(ARCHIVE_POLICIES.MEMORY_MAX_ENTRIES).toBe(500)
453
+ })
454
+ })
455
+ })