prjct-cli 1.17.0 → 1.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +51 -0
- package/core/__tests__/domain/change-propagator.test.ts +100 -0
- package/core/__tests__/domain/file-hasher.test.ts +146 -0
- package/core/commands/analysis.ts +9 -2
- package/core/commands/command-data.ts +3 -1
- package/core/commands/commands.ts +1 -0
- package/core/domain/change-propagator.ts +162 -0
- package/core/domain/file-hasher.ts +296 -0
- package/core/index.ts +1 -0
- package/core/services/sync-service.ts +127 -13
- package/core/services/watch-service.ts +1 -1
- package/core/types/index.ts +1 -0
- package/core/types/project-sync.ts +20 -0
- package/dist/bin/prjct.mjs +811 -471
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,56 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.18.0] - 2026-02-09
|
|
4
|
+
|
|
5
|
+
### Features
|
|
6
|
+
|
|
7
|
+
- implement incremental sync with file hashing (PRJ-305) (#160)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
## [1.18.0] - 2026-02-09
|
|
11
|
+
|
|
12
|
+
### Features
|
|
13
|
+
|
|
14
|
+
- **Incremental sync**: `prjct sync` now only re-analyzes files that changed since last sync (PRJ-305)
|
|
15
|
+
- File hashing with Bun.hash (xxHash64) — <100ms for 500 files
|
|
16
|
+
- Change propagation through import graph (1-level reverse edges)
|
|
17
|
+
- Conditional index rebuilds: BM25, import graph, co-change only when source files change
|
|
18
|
+
- Conditional agent regeneration: only when config files (package.json, tsconfig.json) change
|
|
19
|
+
- `prjct sync --full` flag to force complete re-analysis
|
|
20
|
+
|
|
21
|
+
### Implementation Details
|
|
22
|
+
|
|
23
|
+
New modules:
|
|
24
|
+
- `core/domain/file-hasher.ts` — Hash computation via Bun.hash, SQLite registry using `index_checksums` table, diff detection (added/modified/deleted/unchanged)
|
|
25
|
+
- `core/domain/change-propagator.ts` — Import graph reverse-edge lookup for 1-level change propagation, domain classification for affected files
|
|
26
|
+
|
|
27
|
+
Modified:
|
|
28
|
+
- `core/services/sync-service.ts` — Incremental decision logic: detect changes → propagate → conditionally rebuild indexes and agents
|
|
29
|
+
- `core/services/watch-service.ts` — Passes accumulated `changedFiles` to sync options
|
|
30
|
+
- `core/types/project-sync.ts` — Added `full`, `changedFiles` to `SyncOptions` + `IncrementalInfo` result type
|
|
31
|
+
- CLI chain (`core/index.ts` → `commands.ts` → `analysis.ts`) — Wired `--full` flag through
|
|
32
|
+
|
|
33
|
+
### Learnings
|
|
34
|
+
|
|
35
|
+
- Bun's `fs.readdir` with `withFileTypes` returns `Dirent<NonSharedBuffer>` — need `String()` cast for `.name`
|
|
36
|
+
- Existing `index_checksums` SQLite table was already set up (PRJ-303) — zero schema changes needed
|
|
37
|
+
- Import graph reverse edges (from PRJ-304) enable efficient 1-level propagation without rebuilding the graph
|
|
38
|
+
|
|
39
|
+
### Test Plan
|
|
40
|
+
|
|
41
|
+
#### For QA
|
|
42
|
+
1. Run `prjct sync` on fresh project (no hash cache) — should behave as full sync
|
|
43
|
+
2. Run `prjct sync` again without changes — should skip index rebuilds and agent regeneration
|
|
44
|
+
3. Modify a `.ts` file, run `prjct sync` — should detect change and rebuild indexes
|
|
45
|
+
4. Modify `package.json`, run `prjct sync` — should regenerate agents
|
|
46
|
+
5. Run `prjct sync --full` — should force complete re-analysis
|
|
47
|
+
6. Run `prjct watch`, change a file — should pass changedFiles to sync
|
|
48
|
+
|
|
49
|
+
#### For Users
|
|
50
|
+
**What changed:** `prjct sync` is now incremental by default.
|
|
51
|
+
**How to use:** No changes needed. Use `prjct sync --full` to force complete re-analysis.
|
|
52
|
+
**Breaking changes:** None
|
|
53
|
+
|
|
3
54
|
## [1.17.0] - 2026-02-09
|
|
4
55
|
|
|
5
56
|
### Features
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { describe, expect, test } from 'bun:test'
|
|
2
|
+
import { affectedDomains, propagateChanges } from '../../domain/change-propagator'
|
|
3
|
+
import type { FileDiff } from '../../domain/file-hasher'
|
|
4
|
+
|
|
5
|
+
describe('change-propagator', () => {
|
|
6
|
+
// =========================================================================
|
|
7
|
+
// propagateChanges
|
|
8
|
+
// =========================================================================
|
|
9
|
+
|
|
10
|
+
describe('propagateChanges', () => {
|
|
11
|
+
test('returns direct changes when no import graph exists', () => {
|
|
12
|
+
const diff: FileDiff = {
|
|
13
|
+
added: ['src/new.ts'],
|
|
14
|
+
modified: ['src/changed.ts'],
|
|
15
|
+
deleted: ['src/removed.ts'],
|
|
16
|
+
unchanged: ['src/same.ts'],
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Use a fake projectId that won't have a graph
|
|
20
|
+
const result = propagateChanges(diff, 'nonexistent-project')
|
|
21
|
+
|
|
22
|
+
expect(result.directlyChanged).toEqual(['src/new.ts', 'src/changed.ts'])
|
|
23
|
+
expect(result.affectedByImports).toEqual([])
|
|
24
|
+
expect(result.allAffected).toEqual(['src/new.ts', 'src/changed.ts'])
|
|
25
|
+
expect(result.deleted).toEqual(['src/removed.ts'])
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
test('empty diff returns empty propagation', () => {
|
|
29
|
+
const diff: FileDiff = {
|
|
30
|
+
added: [],
|
|
31
|
+
modified: [],
|
|
32
|
+
deleted: [],
|
|
33
|
+
unchanged: ['src/a.ts', 'src/b.ts'],
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const result = propagateChanges(diff, 'nonexistent-project')
|
|
37
|
+
|
|
38
|
+
expect(result.directlyChanged).toEqual([])
|
|
39
|
+
expect(result.affectedByImports).toEqual([])
|
|
40
|
+
expect(result.allAffected).toEqual([])
|
|
41
|
+
})
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
// =========================================================================
|
|
45
|
+
// affectedDomains
|
|
46
|
+
// =========================================================================
|
|
47
|
+
|
|
48
|
+
describe('affectedDomains', () => {
|
|
49
|
+
test('detects frontend files', () => {
|
|
50
|
+
const domains = affectedDomains([
|
|
51
|
+
'src/components/Button.tsx',
|
|
52
|
+
'src/pages/Home.jsx',
|
|
53
|
+
'styles/main.css',
|
|
54
|
+
])
|
|
55
|
+
expect(domains.has('frontend')).toBe(true)
|
|
56
|
+
expect(domains.has('uxui')).toBe(true)
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
test('detects backend files', () => {
|
|
60
|
+
const domains = affectedDomains(['core/services/auth.ts', 'core/domain/user.ts'])
|
|
61
|
+
expect(domains.has('backend')).toBe(true)
|
|
62
|
+
})
|
|
63
|
+
|
|
64
|
+
test('detects testing files', () => {
|
|
65
|
+
const domains = affectedDomains([
|
|
66
|
+
'core/__tests__/auth.test.ts',
|
|
67
|
+
'src/components/Button.spec.tsx',
|
|
68
|
+
])
|
|
69
|
+
expect(domains.has('testing')).toBe(true)
|
|
70
|
+
})
|
|
71
|
+
|
|
72
|
+
test('detects devops files', () => {
|
|
73
|
+
const domains = affectedDomains(['Dockerfile', '.github/workflows/ci.yml'])
|
|
74
|
+
expect(domains.has('devops')).toBe(true)
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
test('detects database files', () => {
|
|
78
|
+
const domains = affectedDomains(['prisma/schema.prisma', 'db/migrations/001.sql'])
|
|
79
|
+
expect(domains.has('database')).toBe(true)
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
test('handles mixed domain files', () => {
|
|
83
|
+
const domains = affectedDomains([
|
|
84
|
+
'src/components/Form.tsx', // frontend + uxui
|
|
85
|
+
'core/services/api.ts', // backend
|
|
86
|
+
'Dockerfile', // devops
|
|
87
|
+
'core/__tests__/api.test.ts', // testing + backend
|
|
88
|
+
])
|
|
89
|
+
expect(domains.has('frontend')).toBe(true)
|
|
90
|
+
expect(domains.has('backend')).toBe(true)
|
|
91
|
+
expect(domains.has('devops')).toBe(true)
|
|
92
|
+
expect(domains.has('testing')).toBe(true)
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
test('empty file list returns empty domains', () => {
|
|
96
|
+
const domains = affectedDomains([])
|
|
97
|
+
expect(domains.size).toBe(0)
|
|
98
|
+
})
|
|
99
|
+
})
|
|
100
|
+
})
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import { describe, expect, test } from 'bun:test'
|
|
2
|
+
import path from 'node:path'
|
|
3
|
+
import { computeHashes, diffHashes, type FileHash } from '../../domain/file-hasher'
|
|
4
|
+
|
|
5
|
+
describe('file-hasher', () => {
|
|
6
|
+
// =========================================================================
|
|
7
|
+
// diffHashes
|
|
8
|
+
// =========================================================================
|
|
9
|
+
|
|
10
|
+
describe('diffHashes', () => {
|
|
11
|
+
const makeHash = (filePath: string, hash: string): FileHash => ({
|
|
12
|
+
path: filePath,
|
|
13
|
+
hash,
|
|
14
|
+
size: 100,
|
|
15
|
+
mtime: '2026-01-01T00:00:00.000Z',
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
test('detects added files', () => {
|
|
19
|
+
const current = new Map<string, FileHash>([
|
|
20
|
+
['src/new-file.ts', makeHash('src/new-file.ts', 'xxh64:abc')],
|
|
21
|
+
['src/existing.ts', makeHash('src/existing.ts', 'xxh64:def')],
|
|
22
|
+
])
|
|
23
|
+
const stored = new Map<string, FileHash>([
|
|
24
|
+
['src/existing.ts', makeHash('src/existing.ts', 'xxh64:def')],
|
|
25
|
+
])
|
|
26
|
+
|
|
27
|
+
const diff = diffHashes(current, stored)
|
|
28
|
+
expect(diff.added).toEqual(['src/new-file.ts'])
|
|
29
|
+
expect(diff.modified).toEqual([])
|
|
30
|
+
expect(diff.unchanged).toEqual(['src/existing.ts'])
|
|
31
|
+
expect(diff.deleted).toEqual([])
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
test('detects modified files', () => {
|
|
35
|
+
const current = new Map<string, FileHash>([
|
|
36
|
+
['src/changed.ts', makeHash('src/changed.ts', 'xxh64:new-hash')],
|
|
37
|
+
])
|
|
38
|
+
const stored = new Map<string, FileHash>([
|
|
39
|
+
['src/changed.ts', makeHash('src/changed.ts', 'xxh64:old-hash')],
|
|
40
|
+
])
|
|
41
|
+
|
|
42
|
+
const diff = diffHashes(current, stored)
|
|
43
|
+
expect(diff.added).toEqual([])
|
|
44
|
+
expect(diff.modified).toEqual(['src/changed.ts'])
|
|
45
|
+
expect(diff.unchanged).toEqual([])
|
|
46
|
+
expect(diff.deleted).toEqual([])
|
|
47
|
+
})
|
|
48
|
+
|
|
49
|
+
test('detects deleted files', () => {
|
|
50
|
+
const current = new Map<string, FileHash>()
|
|
51
|
+
const stored = new Map<string, FileHash>([
|
|
52
|
+
['src/removed.ts', makeHash('src/removed.ts', 'xxh64:abc')],
|
|
53
|
+
])
|
|
54
|
+
|
|
55
|
+
const diff = diffHashes(current, stored)
|
|
56
|
+
expect(diff.added).toEqual([])
|
|
57
|
+
expect(diff.modified).toEqual([])
|
|
58
|
+
expect(diff.unchanged).toEqual([])
|
|
59
|
+
expect(diff.deleted).toEqual(['src/removed.ts'])
|
|
60
|
+
})
|
|
61
|
+
|
|
62
|
+
test('handles empty maps', () => {
|
|
63
|
+
const diff = diffHashes(new Map(), new Map())
|
|
64
|
+
expect(diff.added).toEqual([])
|
|
65
|
+
expect(diff.modified).toEqual([])
|
|
66
|
+
expect(diff.unchanged).toEqual([])
|
|
67
|
+
expect(diff.deleted).toEqual([])
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
test('handles first sync (no stored hashes)', () => {
|
|
71
|
+
const current = new Map<string, FileHash>([
|
|
72
|
+
['src/a.ts', makeHash('src/a.ts', 'xxh64:1')],
|
|
73
|
+
['src/b.ts', makeHash('src/b.ts', 'xxh64:2')],
|
|
74
|
+
['src/c.ts', makeHash('src/c.ts', 'xxh64:3')],
|
|
75
|
+
])
|
|
76
|
+
const stored = new Map<string, FileHash>()
|
|
77
|
+
|
|
78
|
+
const diff = diffHashes(current, stored)
|
|
79
|
+
expect(diff.added).toHaveLength(3)
|
|
80
|
+
expect(diff.modified).toEqual([])
|
|
81
|
+
expect(diff.unchanged).toEqual([])
|
|
82
|
+
expect(diff.deleted).toEqual([])
|
|
83
|
+
})
|
|
84
|
+
|
|
85
|
+
test('mixed changes: added + modified + deleted + unchanged', () => {
|
|
86
|
+
const current = new Map<string, FileHash>([
|
|
87
|
+
['src/new.ts', makeHash('src/new.ts', 'xxh64:new')],
|
|
88
|
+
['src/changed.ts', makeHash('src/changed.ts', 'xxh64:v2')],
|
|
89
|
+
['src/same.ts', makeHash('src/same.ts', 'xxh64:same')],
|
|
90
|
+
])
|
|
91
|
+
const stored = new Map<string, FileHash>([
|
|
92
|
+
['src/changed.ts', makeHash('src/changed.ts', 'xxh64:v1')],
|
|
93
|
+
['src/same.ts', makeHash('src/same.ts', 'xxh64:same')],
|
|
94
|
+
['src/gone.ts', makeHash('src/gone.ts', 'xxh64:gone')],
|
|
95
|
+
])
|
|
96
|
+
|
|
97
|
+
const diff = diffHashes(current, stored)
|
|
98
|
+
expect(diff.added).toEqual(['src/new.ts'])
|
|
99
|
+
expect(diff.modified).toEqual(['src/changed.ts'])
|
|
100
|
+
expect(diff.unchanged).toEqual(['src/same.ts'])
|
|
101
|
+
expect(diff.deleted).toEqual(['src/gone.ts'])
|
|
102
|
+
})
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
// =========================================================================
|
|
106
|
+
// computeHashes (integration — reads actual files)
|
|
107
|
+
// =========================================================================
|
|
108
|
+
|
|
109
|
+
describe('computeHashes', () => {
|
|
110
|
+
test('computes hashes for project files', async () => {
|
|
111
|
+
// Hash the prjct-cli project itself (small subset)
|
|
112
|
+
const projectPath = path.resolve(__dirname, '..', '..', '..')
|
|
113
|
+
const hashes = await computeHashes(projectPath)
|
|
114
|
+
|
|
115
|
+
// Should find many files
|
|
116
|
+
expect(hashes.size).toBeGreaterThan(50)
|
|
117
|
+
|
|
118
|
+
// Check a known file exists
|
|
119
|
+
const packageJson = hashes.get('package.json')
|
|
120
|
+
expect(packageJson).toBeDefined()
|
|
121
|
+
expect(packageJson!.hash).toMatch(/^(xxh64|fnv1a):/)
|
|
122
|
+
expect(packageJson!.size).toBeGreaterThan(0)
|
|
123
|
+
})
|
|
124
|
+
|
|
125
|
+
test('excludes node_modules and .git', async () => {
|
|
126
|
+
const projectPath = path.resolve(__dirname, '..', '..', '..')
|
|
127
|
+
const hashes = await computeHashes(projectPath)
|
|
128
|
+
|
|
129
|
+
for (const [filePath] of hashes) {
|
|
130
|
+
expect(filePath).not.toContain('node_modules')
|
|
131
|
+
expect(filePath).not.toContain('.git/')
|
|
132
|
+
}
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
test('hash is deterministic', async () => {
|
|
136
|
+
const projectPath = path.resolve(__dirname, '..', '..', '..')
|
|
137
|
+
const hashes1 = await computeHashes(projectPath)
|
|
138
|
+
const hashes2 = await computeHashes(projectPath)
|
|
139
|
+
|
|
140
|
+
// Same file should produce same hash
|
|
141
|
+
const pkg1 = hashes1.get('package.json')
|
|
142
|
+
const pkg2 = hashes2.get('package.json')
|
|
143
|
+
expect(pkg1?.hash).toBe(pkg2?.hash)
|
|
144
|
+
})
|
|
145
|
+
})
|
|
146
|
+
})
|
|
@@ -235,6 +235,7 @@ export class AnalysisCommands extends PrjctCommandsBase {
|
|
|
235
235
|
yes?: boolean
|
|
236
236
|
json?: boolean
|
|
237
237
|
package?: string
|
|
238
|
+
full?: boolean
|
|
238
239
|
} = {}
|
|
239
240
|
): Promise<CommandResult> {
|
|
240
241
|
try {
|
|
@@ -308,7 +309,10 @@ export class AnalysisCommands extends PrjctCommandsBase {
|
|
|
308
309
|
}
|
|
309
310
|
|
|
310
311
|
// Do a dry-run sync to see what would change
|
|
311
|
-
const result = await syncService.sync(projectPath, {
|
|
312
|
+
const result = await syncService.sync(projectPath, {
|
|
313
|
+
aiTools: options.aiTools,
|
|
314
|
+
full: options.full,
|
|
315
|
+
})
|
|
312
316
|
|
|
313
317
|
if (!result.success) {
|
|
314
318
|
if (isNonInteractive) {
|
|
@@ -453,7 +457,10 @@ export class AnalysisCommands extends PrjctCommandsBase {
|
|
|
453
457
|
out.spin('Syncing project...')
|
|
454
458
|
|
|
455
459
|
// Use syncService to do EVERYTHING in one call
|
|
456
|
-
const result = await syncService.sync(projectPath, {
|
|
460
|
+
const result = await syncService.sync(projectPath, {
|
|
461
|
+
aiTools: options.aiTools,
|
|
462
|
+
full: options.full,
|
|
463
|
+
})
|
|
457
464
|
|
|
458
465
|
if (!result.success) {
|
|
459
466
|
out.fail(result.error || 'Sync failed')
|
|
@@ -166,11 +166,13 @@ export const COMMANDS: CommandMeta[] = [
|
|
|
166
166
|
name: 'sync',
|
|
167
167
|
group: 'core',
|
|
168
168
|
description: 'Sync project state and update workflow agents',
|
|
169
|
-
usage: { claude: '/p:sync', terminal: 'prjct sync [--package=<name>]' },
|
|
169
|
+
usage: { claude: '/p:sync', terminal: 'prjct sync [--package=<name>] [--full]' },
|
|
170
170
|
implemented: true,
|
|
171
171
|
hasTemplate: true,
|
|
172
172
|
requiresProject: true,
|
|
173
173
|
features: [
|
|
174
|
+
'Incremental sync: only re-analyzes changed files (default)',
|
|
175
|
+
'Force full sync: --full bypasses incremental cache',
|
|
174
176
|
'Monorepo support: --package=<name> for single package sync',
|
|
175
177
|
'Nested PRJCT.md inheritance',
|
|
176
178
|
'Per-package CLAUDE.md generation',
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Change Propagator — Import-Based Change Detection
|
|
3
|
+
*
|
|
4
|
+
* When a file changes, files that import it may also need re-analysis.
|
|
5
|
+
* Uses the import graph (PRJ-304) to propagate changes 1 level deep
|
|
6
|
+
* through the reverse dependency chain.
|
|
7
|
+
*
|
|
8
|
+
* Example: If `auth.ts` changes, and `user-service.ts` imports `auth.ts`,
|
|
9
|
+
* then `user-service.ts` is also marked as "affected" even though its
|
|
10
|
+
* content hash didn't change.
|
|
11
|
+
*
|
|
12
|
+
* @module domain/change-propagator
|
|
13
|
+
* @version 1.0.0
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import type { FileDiff } from './file-hasher'
|
|
17
|
+
import { loadGraph } from './import-graph'
|
|
18
|
+
|
|
19
|
+
// =============================================================================
|
|
20
|
+
// Types
|
|
21
|
+
// =============================================================================
|
|
22
|
+
|
|
23
|
+
export interface PropagatedChanges {
|
|
24
|
+
/** Files that changed directly (added + modified from hash diff) */
|
|
25
|
+
directlyChanged: string[]
|
|
26
|
+
/** Files that import a directly changed file (1 level deep) */
|
|
27
|
+
affectedByImports: string[]
|
|
28
|
+
/** Union of directlyChanged + affectedByImports (deduplicated) */
|
|
29
|
+
allAffected: string[]
|
|
30
|
+
/** Files that were deleted */
|
|
31
|
+
deleted: string[]
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// =============================================================================
|
|
35
|
+
// Propagation
|
|
36
|
+
// =============================================================================
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Given a file diff, propagate changes through the import graph.
|
|
40
|
+
*
|
|
41
|
+
* For each changed file, find all files that import it (reverse edges)
|
|
42
|
+
* at depth 1. These "affected" files should be re-analyzed because
|
|
43
|
+
* their imports have changed behavior.
|
|
44
|
+
*
|
|
45
|
+
* @param diff - The raw file diff from hash comparison
|
|
46
|
+
* @param projectId - Project ID for loading the import graph
|
|
47
|
+
* @returns Propagated changes including affected importers
|
|
48
|
+
*/
|
|
49
|
+
export function propagateChanges(diff: FileDiff, projectId: string): PropagatedChanges {
|
|
50
|
+
const directlyChanged = [...diff.added, ...diff.modified]
|
|
51
|
+
const directSet = new Set(directlyChanged)
|
|
52
|
+
const affected = new Set<string>()
|
|
53
|
+
|
|
54
|
+
// Try to load import graph for reverse-edge lookup
|
|
55
|
+
const graph = loadGraph(projectId)
|
|
56
|
+
|
|
57
|
+
if (graph) {
|
|
58
|
+
// For each directly changed file, find its reverse edges (files that import it)
|
|
59
|
+
for (const changedFile of directlyChanged) {
|
|
60
|
+
const importers = graph.reverse[changedFile]
|
|
61
|
+
if (importers) {
|
|
62
|
+
for (const importer of importers) {
|
|
63
|
+
// Only add if not already directly changed
|
|
64
|
+
if (!directSet.has(importer)) {
|
|
65
|
+
affected.add(importer)
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const affectedByImports = Array.from(affected)
|
|
73
|
+
const allAffected = [...directlyChanged, ...affectedByImports]
|
|
74
|
+
|
|
75
|
+
return {
|
|
76
|
+
directlyChanged,
|
|
77
|
+
affectedByImports,
|
|
78
|
+
deleted: diff.deleted,
|
|
79
|
+
allAffected,
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Determine which domain agents need regeneration based on changed files.
|
|
85
|
+
*
|
|
86
|
+
* Maps file extensions and paths to domains:
|
|
87
|
+
* - .tsx/.jsx/.css/.scss/.html/.vue/.svelte → frontend
|
|
88
|
+
* - .ts/.js (non-test, non-config) → backend
|
|
89
|
+
* - .test.ts/.spec.ts → testing
|
|
90
|
+
* - Dockerfile/.dockerignore → devops
|
|
91
|
+
* - .sql/prisma/drizzle → database
|
|
92
|
+
*
|
|
93
|
+
* Returns the set of domain names that have affected files.
|
|
94
|
+
*/
|
|
95
|
+
export function affectedDomains(changedFiles: string[]): Set<string> {
|
|
96
|
+
const domains = new Set<string>()
|
|
97
|
+
|
|
98
|
+
for (const file of changedFiles) {
|
|
99
|
+
const lower = file.toLowerCase()
|
|
100
|
+
|
|
101
|
+
// Frontend indicators
|
|
102
|
+
if (
|
|
103
|
+
lower.endsWith('.tsx') ||
|
|
104
|
+
lower.endsWith('.jsx') ||
|
|
105
|
+
lower.endsWith('.css') ||
|
|
106
|
+
lower.endsWith('.scss') ||
|
|
107
|
+
lower.endsWith('.vue') ||
|
|
108
|
+
lower.endsWith('.svelte') ||
|
|
109
|
+
lower.includes('/components/') ||
|
|
110
|
+
lower.includes('/pages/') ||
|
|
111
|
+
lower.includes('/app/')
|
|
112
|
+
) {
|
|
113
|
+
domains.add('frontend')
|
|
114
|
+
domains.add('uxui')
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Testing indicators
|
|
118
|
+
if (
|
|
119
|
+
lower.includes('.test.') ||
|
|
120
|
+
lower.includes('.spec.') ||
|
|
121
|
+
lower.includes('__tests__') ||
|
|
122
|
+
lower.includes('/test/')
|
|
123
|
+
) {
|
|
124
|
+
domains.add('testing')
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// DevOps indicators
|
|
128
|
+
if (
|
|
129
|
+
lower.includes('dockerfile') ||
|
|
130
|
+
lower.includes('docker-compose') ||
|
|
131
|
+
lower.includes('.dockerignore') ||
|
|
132
|
+
lower.includes('.github/') ||
|
|
133
|
+
lower.includes('ci/') ||
|
|
134
|
+
lower.includes('cd/')
|
|
135
|
+
) {
|
|
136
|
+
domains.add('devops')
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Database indicators
|
|
140
|
+
if (
|
|
141
|
+
lower.endsWith('.sql') ||
|
|
142
|
+
lower.includes('prisma') ||
|
|
143
|
+
lower.includes('drizzle') ||
|
|
144
|
+
lower.includes('migration') ||
|
|
145
|
+
lower.includes('/db/')
|
|
146
|
+
) {
|
|
147
|
+
domains.add('database')
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Backend indicators (TypeScript/JavaScript that isn't clearly frontend/test)
|
|
151
|
+
if (
|
|
152
|
+
(lower.endsWith('.ts') || lower.endsWith('.js')) &&
|
|
153
|
+
!lower.includes('.test.') &&
|
|
154
|
+
!lower.includes('.spec.') &&
|
|
155
|
+
!lower.endsWith('.d.ts')
|
|
156
|
+
) {
|
|
157
|
+
domains.add('backend')
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return domains
|
|
162
|
+
}
|