prjct-cli 1.17.0 → 1.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,106 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.19.0] - 2026-02-09
4
+
5
+ ### Features
6
+
7
+ - implement aggressive archival of stale storage data (PRJ-267) (#161)
8
+
9
+
10
+ ## [1.19.0] - 2026-02-09
11
+
12
+ ### Features
13
+
14
+ - **Aggressive archival of stale storage data** (PRJ-267): Automatic archival during `prjct sync` to keep LLM context lean
15
+ - Shipped features >90 days archived to SQLite `archives` table with 1-line summary
16
+ - Pending ideas >180 days marked `dormant` and excluded from LLM context
17
+ - Completed queue tasks >7 days auto-removed and archived
18
+ - Paused tasks >30 days archived with persistence (previously discarded)
19
+ - Memory log capped at 500 active entries, overflow archived
20
+
21
+ ### Implementation Details
22
+
23
+ New modules:
24
+ - `core/storage/archive-storage.ts` — Archive infrastructure: SQLite `archives` table, batch archival via transactions, restore, prune, stats
25
+ - `core/__tests__/storage/archive-storage.test.ts` — 13 tests covering all archival paths
26
+
27
+ Modified:
28
+ - `core/storage/database.ts` — Migration v2: `archives` table with entity_type, entity_id, entity_data, summary, reason columns
29
+ - `core/storage/shipped-storage.ts` — `archiveOldShipped()` method with 90-day retention policy
30
+ - `core/storage/ideas-storage.ts` — `markDormantIdeas()` method, `dormant` status excluded from markdown context
31
+ - `core/storage/queue-storage.ts` — `removeStaleCompleted()` method with 7-day retention
32
+ - `core/storage/state-storage.ts` — `archiveStalePausedTasks()` now persists to archive table before removal
33
+ - `core/services/memory-service.ts` — `capEntries()` method with 500-entry cap
34
+ - `core/services/sync-service.ts` — `archiveStaleData()` orchestrates all archival in parallel during sync
35
+ - `core/schemas/ideas.ts` + `core/types/storage.ts` — Added `dormant` to IdeaStatus enum
36
+
37
+ ### Test Plan
38
+
39
+ #### For QA
40
+ 1. Run `prjct sync` with >90d shipped features — verify archive and removal from context
41
+ 2. Run sync with >180d pending ideas — verify dormant status, excluded from `ideas.md`
42
+ 3. Run sync with >7d completed queue tasks — verify removal and archival
43
+ 4. Run sync with >30d paused tasks — verify archival to SQLite
44
+ 5. Create >500 memory entries, sync — verify cap at 500
45
+ 6. `bun test` — all 947+ tests pass
46
+ 7. Verify recent items are NOT archived
47
+
48
+ #### For Users
49
+ **What changed:** Storage data automatically cleaned up during sync. Old data archived, not deleted.
50
+ **How to use:** No action needed — runs automatically on every sync.
51
+ **Breaking changes:** Ideas can now have `dormant` status (new enum value).
52
+
53
+ ## [1.18.0] - 2026-02-09
54
+
55
+ ### Features
56
+
57
+ - implement incremental sync with file hashing (PRJ-305) (#160)
58
+
59
+
60
+ ## [1.18.0] - 2026-02-09
61
+
62
+ ### Features
63
+
64
+ - **Incremental sync**: `prjct sync` now only re-analyzes files that changed since last sync (PRJ-305)
65
+ - File hashing with Bun.hash (xxHash64) — <100ms for 500 files
66
+ - Change propagation through import graph (1-level reverse edges)
67
+ - Conditional index rebuilds: BM25, import graph, co-change only when source files change
68
+ - Conditional agent regeneration: only when config files (package.json, tsconfig.json) change
69
+ - `prjct sync --full` flag to force complete re-analysis
70
+
71
+ ### Implementation Details
72
+
73
+ New modules:
74
+ - `core/domain/file-hasher.ts` — Hash computation via Bun.hash, SQLite registry using `index_checksums` table, diff detection (added/modified/deleted/unchanged)
75
+ - `core/domain/change-propagator.ts` — Import graph reverse-edge lookup for 1-level change propagation, domain classification for affected files
76
+
77
+ Modified:
78
+ - `core/services/sync-service.ts` — Incremental decision logic: detect changes → propagate → conditionally rebuild indexes and agents
79
+ - `core/services/watch-service.ts` — Passes accumulated `changedFiles` to sync options
80
+ - `core/types/project-sync.ts` — Added `full`, `changedFiles` to `SyncOptions` + `IncrementalInfo` result type
81
+ - CLI chain (`core/index.ts` → `commands.ts` → `analysis.ts`) — Wired `--full` flag through
82
+
83
+ ### Learnings
84
+
85
+ - Bun's `fs.readdir` with `withFileTypes` returns `Dirent<NonSharedBuffer>` — need `String()` cast for `.name`
86
+ - Existing `index_checksums` SQLite table was already set up (PRJ-303) — zero schema changes needed
87
+ - Import graph reverse edges (from PRJ-304) enable efficient 1-level propagation without rebuilding the graph
88
+
89
+ ### Test Plan
90
+
91
+ #### For QA
92
+ 1. Run `prjct sync` on fresh project (no hash cache) — should behave as full sync
93
+ 2. Run `prjct sync` again without changes — should skip index rebuilds and agent regeneration
94
+ 3. Modify a `.ts` file, run `prjct sync` — should detect change and rebuild indexes
95
+ 4. Modify `package.json`, run `prjct sync` — should regenerate agents
96
+ 5. Run `prjct sync --full` — should force complete re-analysis
97
+ 6. Run `prjct watch`, change a file — should pass changedFiles to sync
98
+
99
+ #### For Users
100
+ **What changed:** `prjct sync` is now incremental by default.
101
+ **How to use:** No changes needed. Use `prjct sync --full` to force complete re-analysis.
102
+ **Breaking changes:** None
103
+
3
104
  ## [1.17.0] - 2026-02-09
4
105
 
5
106
  ### Features
@@ -0,0 +1,100 @@
1
+ import { describe, expect, test } from 'bun:test'
2
+ import { affectedDomains, propagateChanges } from '../../domain/change-propagator'
3
+ import type { FileDiff } from '../../domain/file-hasher'
4
+
5
+ describe('change-propagator', () => {
6
+ // =========================================================================
7
+ // propagateChanges
8
+ // =========================================================================
9
+
10
+ describe('propagateChanges', () => {
11
+ test('returns direct changes when no import graph exists', () => {
12
+ const diff: FileDiff = {
13
+ added: ['src/new.ts'],
14
+ modified: ['src/changed.ts'],
15
+ deleted: ['src/removed.ts'],
16
+ unchanged: ['src/same.ts'],
17
+ }
18
+
19
+ // Use a fake projectId that won't have a graph
20
+ const result = propagateChanges(diff, 'nonexistent-project')
21
+
22
+ expect(result.directlyChanged).toEqual(['src/new.ts', 'src/changed.ts'])
23
+ expect(result.affectedByImports).toEqual([])
24
+ expect(result.allAffected).toEqual(['src/new.ts', 'src/changed.ts'])
25
+ expect(result.deleted).toEqual(['src/removed.ts'])
26
+ })
27
+
28
+ test('empty diff returns empty propagation', () => {
29
+ const diff: FileDiff = {
30
+ added: [],
31
+ modified: [],
32
+ deleted: [],
33
+ unchanged: ['src/a.ts', 'src/b.ts'],
34
+ }
35
+
36
+ const result = propagateChanges(diff, 'nonexistent-project')
37
+
38
+ expect(result.directlyChanged).toEqual([])
39
+ expect(result.affectedByImports).toEqual([])
40
+ expect(result.allAffected).toEqual([])
41
+ })
42
+ })
43
+
44
+ // =========================================================================
45
+ // affectedDomains
46
+ // =========================================================================
47
+
48
+ describe('affectedDomains', () => {
49
+ test('detects frontend files', () => {
50
+ const domains = affectedDomains([
51
+ 'src/components/Button.tsx',
52
+ 'src/pages/Home.jsx',
53
+ 'styles/main.css',
54
+ ])
55
+ expect(domains.has('frontend')).toBe(true)
56
+ expect(domains.has('uxui')).toBe(true)
57
+ })
58
+
59
+ test('detects backend files', () => {
60
+ const domains = affectedDomains(['core/services/auth.ts', 'core/domain/user.ts'])
61
+ expect(domains.has('backend')).toBe(true)
62
+ })
63
+
64
+ test('detects testing files', () => {
65
+ const domains = affectedDomains([
66
+ 'core/__tests__/auth.test.ts',
67
+ 'src/components/Button.spec.tsx',
68
+ ])
69
+ expect(domains.has('testing')).toBe(true)
70
+ })
71
+
72
+ test('detects devops files', () => {
73
+ const domains = affectedDomains(['Dockerfile', '.github/workflows/ci.yml'])
74
+ expect(domains.has('devops')).toBe(true)
75
+ })
76
+
77
+ test('detects database files', () => {
78
+ const domains = affectedDomains(['prisma/schema.prisma', 'db/migrations/001.sql'])
79
+ expect(domains.has('database')).toBe(true)
80
+ })
81
+
82
+ test('handles mixed domain files', () => {
83
+ const domains = affectedDomains([
84
+ 'src/components/Form.tsx', // frontend + uxui
85
+ 'core/services/api.ts', // backend
86
+ 'Dockerfile', // devops
87
+ 'core/__tests__/api.test.ts', // testing + backend
88
+ ])
89
+ expect(domains.has('frontend')).toBe(true)
90
+ expect(domains.has('backend')).toBe(true)
91
+ expect(domains.has('devops')).toBe(true)
92
+ expect(domains.has('testing')).toBe(true)
93
+ })
94
+
95
+ test('empty file list returns empty domains', () => {
96
+ const domains = affectedDomains([])
97
+ expect(domains.size).toBe(0)
98
+ })
99
+ })
100
+ })
@@ -0,0 +1,146 @@
1
+ import { describe, expect, test } from 'bun:test'
2
+ import path from 'node:path'
3
+ import { computeHashes, diffHashes, type FileHash } from '../../domain/file-hasher'
4
+
5
+ describe('file-hasher', () => {
6
+ // =========================================================================
7
+ // diffHashes
8
+ // =========================================================================
9
+
10
+ describe('diffHashes', () => {
11
+ const makeHash = (filePath: string, hash: string): FileHash => ({
12
+ path: filePath,
13
+ hash,
14
+ size: 100,
15
+ mtime: '2026-01-01T00:00:00.000Z',
16
+ })
17
+
18
+ test('detects added files', () => {
19
+ const current = new Map<string, FileHash>([
20
+ ['src/new-file.ts', makeHash('src/new-file.ts', 'xxh64:abc')],
21
+ ['src/existing.ts', makeHash('src/existing.ts', 'xxh64:def')],
22
+ ])
23
+ const stored = new Map<string, FileHash>([
24
+ ['src/existing.ts', makeHash('src/existing.ts', 'xxh64:def')],
25
+ ])
26
+
27
+ const diff = diffHashes(current, stored)
28
+ expect(diff.added).toEqual(['src/new-file.ts'])
29
+ expect(diff.modified).toEqual([])
30
+ expect(diff.unchanged).toEqual(['src/existing.ts'])
31
+ expect(diff.deleted).toEqual([])
32
+ })
33
+
34
+ test('detects modified files', () => {
35
+ const current = new Map<string, FileHash>([
36
+ ['src/changed.ts', makeHash('src/changed.ts', 'xxh64:new-hash')],
37
+ ])
38
+ const stored = new Map<string, FileHash>([
39
+ ['src/changed.ts', makeHash('src/changed.ts', 'xxh64:old-hash')],
40
+ ])
41
+
42
+ const diff = diffHashes(current, stored)
43
+ expect(diff.added).toEqual([])
44
+ expect(diff.modified).toEqual(['src/changed.ts'])
45
+ expect(diff.unchanged).toEqual([])
46
+ expect(diff.deleted).toEqual([])
47
+ })
48
+
49
+ test('detects deleted files', () => {
50
+ const current = new Map<string, FileHash>()
51
+ const stored = new Map<string, FileHash>([
52
+ ['src/removed.ts', makeHash('src/removed.ts', 'xxh64:abc')],
53
+ ])
54
+
55
+ const diff = diffHashes(current, stored)
56
+ expect(diff.added).toEqual([])
57
+ expect(diff.modified).toEqual([])
58
+ expect(diff.unchanged).toEqual([])
59
+ expect(diff.deleted).toEqual(['src/removed.ts'])
60
+ })
61
+
62
+ test('handles empty maps', () => {
63
+ const diff = diffHashes(new Map(), new Map())
64
+ expect(diff.added).toEqual([])
65
+ expect(diff.modified).toEqual([])
66
+ expect(diff.unchanged).toEqual([])
67
+ expect(diff.deleted).toEqual([])
68
+ })
69
+
70
+ test('handles first sync (no stored hashes)', () => {
71
+ const current = new Map<string, FileHash>([
72
+ ['src/a.ts', makeHash('src/a.ts', 'xxh64:1')],
73
+ ['src/b.ts', makeHash('src/b.ts', 'xxh64:2')],
74
+ ['src/c.ts', makeHash('src/c.ts', 'xxh64:3')],
75
+ ])
76
+ const stored = new Map<string, FileHash>()
77
+
78
+ const diff = diffHashes(current, stored)
79
+ expect(diff.added).toHaveLength(3)
80
+ expect(diff.modified).toEqual([])
81
+ expect(diff.unchanged).toEqual([])
82
+ expect(diff.deleted).toEqual([])
83
+ })
84
+
85
+ test('mixed changes: added + modified + deleted + unchanged', () => {
86
+ const current = new Map<string, FileHash>([
87
+ ['src/new.ts', makeHash('src/new.ts', 'xxh64:new')],
88
+ ['src/changed.ts', makeHash('src/changed.ts', 'xxh64:v2')],
89
+ ['src/same.ts', makeHash('src/same.ts', 'xxh64:same')],
90
+ ])
91
+ const stored = new Map<string, FileHash>([
92
+ ['src/changed.ts', makeHash('src/changed.ts', 'xxh64:v1')],
93
+ ['src/same.ts', makeHash('src/same.ts', 'xxh64:same')],
94
+ ['src/gone.ts', makeHash('src/gone.ts', 'xxh64:gone')],
95
+ ])
96
+
97
+ const diff = diffHashes(current, stored)
98
+ expect(diff.added).toEqual(['src/new.ts'])
99
+ expect(diff.modified).toEqual(['src/changed.ts'])
100
+ expect(diff.unchanged).toEqual(['src/same.ts'])
101
+ expect(diff.deleted).toEqual(['src/gone.ts'])
102
+ })
103
+ })
104
+
105
+ // =========================================================================
106
+ // computeHashes (integration — reads actual files)
107
+ // =========================================================================
108
+
109
+ describe('computeHashes', () => {
110
+ test('computes hashes for project files', async () => {
111
+ // Hash the prjct-cli project itself (small subset)
112
+ const projectPath = path.resolve(__dirname, '..', '..', '..')
113
+ const hashes = await computeHashes(projectPath)
114
+
115
+ // Should find many files
116
+ expect(hashes.size).toBeGreaterThan(50)
117
+
118
+ // Check a known file exists
119
+ const packageJson = hashes.get('package.json')
120
+ expect(packageJson).toBeDefined()
121
+ expect(packageJson!.hash).toMatch(/^(xxh64|fnv1a):/)
122
+ expect(packageJson!.size).toBeGreaterThan(0)
123
+ })
124
+
125
+ test('excludes node_modules and .git', async () => {
126
+ const projectPath = path.resolve(__dirname, '..', '..', '..')
127
+ const hashes = await computeHashes(projectPath)
128
+
129
+ for (const [filePath] of hashes) {
130
+ expect(filePath).not.toContain('node_modules')
131
+ expect(filePath).not.toContain('.git/')
132
+ }
133
+ })
134
+
135
+ test('hash is deterministic', async () => {
136
+ const projectPath = path.resolve(__dirname, '..', '..', '..')
137
+ const hashes1 = await computeHashes(projectPath)
138
+ const hashes2 = await computeHashes(projectPath)
139
+
140
+ // Same file should produce same hash
141
+ const pkg1 = hashes1.get('package.json')
142
+ const pkg2 = hashes2.get('package.json')
143
+ expect(pkg1?.hash).toBe(pkg2?.hash)
144
+ })
145
+ })
146
+ })