@getmikk/watcher 1.6.0 → 1.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/daemon.ts +10 -0
- package/src/file-watcher.ts +34 -14
- package/src/incremental-analyzer.ts +12 -7
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@getmikk/watcher",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.7.1",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"dev": "tsc --watch"
|
|
22
22
|
},
|
|
23
23
|
"dependencies": {
|
|
24
|
-
"@getmikk/core": "^1.
|
|
24
|
+
"@getmikk/core": "^1.7.1",
|
|
25
25
|
"chokidar": "^4.0.0"
|
|
26
26
|
},
|
|
27
27
|
"devDependencies": {
|
package/src/daemon.ts
CHANGED
|
@@ -70,6 +70,16 @@ export class WatcherDaemon {
|
|
|
70
70
|
this.analyzer.addParsedFile(file)
|
|
71
71
|
}
|
|
72
72
|
|
|
73
|
+
// Seed the file watcher's hash store with initial hashes so the first
|
|
74
|
+
// change to any file can be properly deduplicated by content.
|
|
75
|
+
const initialHashes = new Map<string, string>()
|
|
76
|
+
for (const file of parsedFiles) {
|
|
77
|
+
if (file.hash) {
|
|
78
|
+
initialHashes.set(file.path.replace(/\\/g, '/'), file.hash)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
this.watcher.seedHashes(initialHashes)
|
|
82
|
+
|
|
73
83
|
// Subscribe to file changes with debouncing
|
|
74
84
|
this.watcher.on(async (event: WatcherEvent) => {
|
|
75
85
|
if (event.type === 'file:changed') {
|
package/src/file-watcher.ts
CHANGED
|
@@ -16,25 +16,37 @@ export class FileWatcher {
|
|
|
16
16
|
|
|
17
17
|
/** Start watching — non-blocking */
|
|
18
18
|
start(): void {
|
|
19
|
-
|
|
20
|
-
|
|
19
|
+
const excludesRegexes = this.config.exclude.map(
|
|
20
|
+
pattern => new RegExp(pattern.replace(/\*/g, '.*').replace(/\//g, '[\\\\/]'))
|
|
21
|
+
)
|
|
22
|
+
const includeExts = ['.ts', '.tsx']
|
|
23
|
+
|
|
24
|
+
this.watcher = watch(this.config.projectRoot, {
|
|
25
|
+
ignored: (testPath: string, stats?: import('fs').Stats) => {
|
|
26
|
+
// Ignore matching exclude patterns
|
|
27
|
+
if (excludesRegexes.some(r => r.test(testPath))) return true
|
|
28
|
+
// Keep directories so we can recurse
|
|
29
|
+
if (!stats || stats.isDirectory()) return false
|
|
30
|
+
// Ignore non-matching file extensions
|
|
31
|
+
return !includeExts.some(ext => testPath.endsWith(ext))
|
|
32
|
+
},
|
|
21
33
|
cwd: this.config.projectRoot,
|
|
22
34
|
ignoreInitial: true,
|
|
23
35
|
persistent: true,
|
|
24
36
|
awaitWriteFinish: {
|
|
25
|
-
stabilityThreshold:
|
|
26
|
-
pollInterval:
|
|
37
|
+
stabilityThreshold: 300,
|
|
38
|
+
pollInterval: 50,
|
|
27
39
|
},
|
|
28
40
|
})
|
|
29
41
|
|
|
30
|
-
this.watcher.on('
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
42
|
+
this.watcher.on('all', (event, relativePath) => {
|
|
43
|
+
if (event === 'change') {
|
|
44
|
+
this.handleChange(relativePath, 'changed')
|
|
45
|
+
} else if (event === 'add') {
|
|
46
|
+
this.handleChange(relativePath, 'added')
|
|
47
|
+
} else if (event === 'unlink') {
|
|
48
|
+
this.handleChange(relativePath, 'deleted')
|
|
49
|
+
}
|
|
38
50
|
})
|
|
39
51
|
}
|
|
40
52
|
|
|
@@ -49,11 +61,18 @@ export class FileWatcher {
|
|
|
49
61
|
this.handlers.push(handler)
|
|
50
62
|
}
|
|
51
63
|
|
|
52
|
-
/**
|
|
64
|
+
/** Seed the initial hash for a file (called at startup for all known files) */
|
|
53
65
|
setHash(filePath: string, hash: string): void {
|
|
54
66
|
this.hashStore.set(filePath, hash)
|
|
55
67
|
}
|
|
56
68
|
|
|
69
|
+
/** Bulk-seed hashes for all known files so first-change dedup works correctly */
|
|
70
|
+
seedHashes(entries: ReadonlyMap<string, string>): void {
|
|
71
|
+
for (const [p, h] of entries) {
|
|
72
|
+
this.hashStore.set(p.replace(/\\/g, '/'), h)
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
57
76
|
private async handleChange(relativePath: string, type: FileChangeEvent['type']): Promise<void> {
|
|
58
77
|
const fullPath = path.join(this.config.projectRoot, relativePath)
|
|
59
78
|
const normalizedPath = relativePath.replace(/\\/g, '/')
|
|
@@ -68,7 +87,8 @@ export class FileWatcher {
|
|
|
68
87
|
}
|
|
69
88
|
}
|
|
70
89
|
|
|
71
|
-
|
|
90
|
+
// Skip only when both hashes are known and identical (true no-op change)
|
|
91
|
+
if (oldHash !== null && newHash !== null && oldHash === newHash) return
|
|
72
92
|
|
|
73
93
|
if (newHash) this.hashStore.set(normalizedPath, newHash)
|
|
74
94
|
if (type === 'deleted') this.hashStore.delete(normalizedPath)
|
|
@@ -44,31 +44,36 @@ export class IncrementalAnalyzer {
|
|
|
44
44
|
return this.runFullAnalysis(events)
|
|
45
45
|
}
|
|
46
46
|
|
|
47
|
-
// Incremental: process each event
|
|
48
|
-
|
|
49
|
-
let combinedImpacted: string[] = []
|
|
47
|
+
// Incremental: process each event, collecting changed file paths
|
|
48
|
+
const changedFilePaths: string[] = []
|
|
50
49
|
|
|
51
50
|
for (const event of events) {
|
|
52
51
|
if (event.type === 'deleted') {
|
|
53
52
|
this.parsedFiles.delete(event.path)
|
|
54
|
-
|
|
53
|
+
changedFilePaths.push(event.path)
|
|
55
54
|
} else {
|
|
56
55
|
const parsed = await this.parseWithRaceCheck(event.path)
|
|
57
56
|
if (parsed) {
|
|
58
57
|
this.parsedFiles.set(event.path, parsed)
|
|
59
58
|
}
|
|
60
|
-
|
|
59
|
+
changedFilePaths.push(event.path)
|
|
61
60
|
}
|
|
62
61
|
}
|
|
63
62
|
|
|
64
|
-
// Rebuild graph from all parsed files
|
|
63
|
+
// Rebuild graph from all parsed files BEFORE deriving node IDs,
|
|
64
|
+
// so newly-added files are present in the graph when we look them up.
|
|
65
65
|
const allParsedFiles = [...this.parsedFiles.values()]
|
|
66
66
|
const builder = new GraphBuilder()
|
|
67
67
|
this.graph = builder.build(allParsedFiles)
|
|
68
68
|
|
|
69
|
+
// Map changed file paths → graph node IDs using the updated graph
|
|
70
|
+
const changedNodeIds = [...new Set(
|
|
71
|
+
changedFilePaths.flatMap(fp => this.findAffectedNodes(fp))
|
|
72
|
+
)]
|
|
73
|
+
|
|
69
74
|
// Run impact analysis on all changed nodes
|
|
70
75
|
const analyzer = new ImpactAnalyzer(this.graph)
|
|
71
|
-
const impactResult = analyzer.analyze(
|
|
76
|
+
const impactResult = analyzer.analyze(changedNodeIds)
|
|
72
77
|
|
|
73
78
|
// Recompile lock
|
|
74
79
|
const compiler = new LockCompiler()
|