@getmikk/watcher 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json ADDED
@@ -0,0 +1,27 @@
1
+ {
2
+ "name": "@getmikk/watcher",
3
+ "version": "1.2.0",
4
+ "type": "module",
5
+ "main": "./dist/index.js",
6
+ "types": "./dist/index.d.ts",
7
+ "exports": {
8
+ ".": {
9
+ "import": "./dist/index.js",
10
+ "types": "./dist/index.d.ts"
11
+ }
12
+ },
13
+ "scripts": {
14
+ "build": "tsc",
15
+ "test": "bun test",
16
+ "publish": "npm publish --access public",
17
+ "dev": "tsc --watch"
18
+ },
19
+ "dependencies": {
20
+ "@getmikk/core": "workspace:*",
21
+ "chokidar": "^4.0.0"
22
+ },
23
+ "devDependencies": {
24
+ "typescript": "^5.7.0",
25
+ "@types/node": "^22.0.0"
26
+ }
27
+ }
package/src/daemon.ts ADDED
@@ -0,0 +1,232 @@
1
+ import * as path from 'node:path'
2
+ import * as fs from 'node:fs/promises'
3
+ import {
4
+ GraphBuilder, LockCompiler, LockReader, ContractReader,
5
+ parseFiles, readFileContent, discoverFiles, logger,
6
+ type DependencyGraph, type MikkLock, type MikkContract
7
+ } from '@getmikk/core'
8
+ import { FileWatcher } from './file-watcher.js'
9
+ import { IncrementalAnalyzer } from './incremental-analyzer.js'
10
+ import type { WatcherConfig, WatcherEvent, FileChangeEvent } from './types.js'
11
+
12
+ /** Sync state persisted to .mikk/sync-state.json */
13
+ interface SyncState {
14
+ status: 'clean' | 'syncing' | 'drifted' | 'conflict'
15
+ lastUpdated: number
16
+ filesInFlight?: number
17
+ rootHash?: string
18
+ error?: string
19
+ }
20
+
21
+ /**
22
+ * WatcherDaemon — long-running background process.
23
+ * Starts the FileWatcher, handles the IncrementalAnalyzer,
24
+ * writes updates to the lock file, and manages sync state.
25
+ *
26
+ * Features:
27
+ * - Debounces file changes (100ms window)
28
+ * - Batch threshold: if > 15 files in a batch, runs full analysis
29
+ * - PID file for single-instance enforcement
30
+ * - Atomic sync state writes
31
+ */
32
+ export class WatcherDaemon {
33
+ private watcher: FileWatcher
34
+ private analyzer: IncrementalAnalyzer | null = null
35
+ private lock: MikkLock | null = null
36
+ private contract: MikkContract | null = null
37
+ private handlers: ((event: WatcherEvent) => void)[] = []
38
+ private pendingEvents: FileChangeEvent[] = []
39
+ private debounceTimer: ReturnType<typeof setTimeout> | null = null
40
+ private processing = false
41
+
42
+ constructor(private config: WatcherConfig) {
43
+ this.watcher = new FileWatcher(config)
44
+ }
45
+
46
+ async start(): Promise<void> {
47
+ // Write PID file for single-instance enforcement
48
+ await this.writePidFile()
49
+
50
+ // Load existing contract and lock
51
+ const contractReader = new ContractReader()
52
+ const lockReader = new LockReader()
53
+ const contractPath = path.join(this.config.projectRoot, 'mikk.json')
54
+ const lockPath = path.join(this.config.projectRoot, 'mikk.lock.json')
55
+
56
+ this.contract = await contractReader.read(contractPath)
57
+ this.lock = await lockReader.read(lockPath)
58
+
59
+ // Parse all files to populate the analyzer
60
+ const filePaths = await discoverFiles(this.config.projectRoot)
61
+ const parsedFiles = await parseFiles(filePaths, this.config.projectRoot, (fp) =>
62
+ readFileContent(fp)
63
+ )
64
+ const graph = new GraphBuilder().build(parsedFiles)
65
+
66
+ this.analyzer = new IncrementalAnalyzer(graph, this.lock, this.contract, this.config.projectRoot)
67
+
68
+ // Add all parsed files to the analyzer
69
+ for (const file of parsedFiles) {
70
+ this.analyzer.addParsedFile(file)
71
+ }
72
+
73
+ // Subscribe to file changes with debouncing
74
+ this.watcher.on(async (event: WatcherEvent) => {
75
+ if (event.type === 'file:changed') {
76
+ this.enqueueChange(event.data)
77
+ }
78
+ // Forward events to external handlers
79
+ for (const handler of this.handlers) {
80
+ handler(event)
81
+ }
82
+ })
83
+
84
+ this.watcher.start()
85
+ await this.writeSyncState({ status: 'clean', lastUpdated: Date.now() })
86
+ logger.info('Mikk watcher started', { watching: this.config.include })
87
+ }
88
+
89
+ async stop(): Promise<void> {
90
+ await this.watcher.stop()
91
+ if (this.debounceTimer) clearTimeout(this.debounceTimer)
92
+ await this.removePidFile()
93
+ logger.info('Mikk watcher stopped')
94
+ }
95
+
96
+ on(handler: (event: WatcherEvent) => void): void {
97
+ this.handlers.push(handler)
98
+ }
99
+
100
+ // ─── Debounce & Batch Processing ──────────────────────────────
101
+
102
+ private enqueueChange(event: FileChangeEvent): void {
103
+ this.pendingEvents.push(event)
104
+
105
+ // Reset the debounce timer
106
+ if (this.debounceTimer) clearTimeout(this.debounceTimer)
107
+ this.debounceTimer = setTimeout(() => {
108
+ this.flushPendingEvents()
109
+ }, this.config.debounceMs || 100)
110
+ }
111
+
112
+ private async flushPendingEvents(): Promise<void> {
113
+ if (this.processing || this.pendingEvents.length === 0) return
114
+ this.processing = true
115
+
116
+ const events = [...this.pendingEvents]
117
+ this.pendingEvents = []
118
+
119
+ // Deduplicate by path (keep latest event per file)
120
+ const byPath = new Map<string, FileChangeEvent>()
121
+ for (const event of events) {
122
+ byPath.set(event.path, event)
123
+ }
124
+ const dedupedEvents = [...byPath.values()]
125
+
126
+ await this.writeSyncState({
127
+ status: 'syncing',
128
+ lastUpdated: Date.now(),
129
+ filesInFlight: dedupedEvents.length,
130
+ })
131
+
132
+ try {
133
+ await this.processBatch(dedupedEvents)
134
+ await this.writeSyncState({
135
+ status: 'clean',
136
+ lastUpdated: Date.now(),
137
+ })
138
+ } catch (err: any) {
139
+ await this.writeSyncState({
140
+ status: 'drifted',
141
+ lastUpdated: Date.now(),
142
+ error: err.message,
143
+ })
144
+ } finally {
145
+ this.processing = false
146
+
147
+ // If more events arrived during processing, flush again
148
+ if (this.pendingEvents.length > 0) {
149
+ this.flushPendingEvents()
150
+ }
151
+ }
152
+ }
153
+
154
+ private async processBatch(events: FileChangeEvent[]): Promise<void> {
155
+ if (!this.analyzer || !this.lock) return
156
+
157
+ try {
158
+ const result = await this.analyzer.analyzeBatch(events)
159
+ this.lock = result.lock
160
+
161
+ // Write updated lock
162
+ const lockPath = path.join(this.config.projectRoot, 'mikk.lock.json')
163
+ await fs.writeFile(lockPath, JSON.stringify(this.lock, null, 2), 'utf-8')
164
+
165
+ // Log batch info
166
+ if (result.mode === 'full') {
167
+ logger.info('Full re-analysis completed', {
168
+ filesChanged: events.length,
169
+ reason: 'Large batch detected (> 15 files)',
170
+ })
171
+ }
172
+
173
+ // Emit graph:updated event
174
+ for (const handler of this.handlers) {
175
+ handler({
176
+ type: 'graph:updated',
177
+ data: {
178
+ changedNodes: result.impactResult.changed,
179
+ impactedNodes: result.impactResult.impacted,
180
+ },
181
+ })
182
+ }
183
+
184
+ logger.info('Lock file updated', {
185
+ filesChanged: events.length,
186
+ mode: result.mode,
187
+ impactedNodes: result.impactResult.impacted.length,
188
+ })
189
+ } catch (err: any) {
190
+ logger.error('Failed to analyze file changes', {
191
+ files: events.map(e => e.path),
192
+ error: err.message,
193
+ })
194
+ for (const handler of this.handlers) {
195
+ handler({
196
+ type: 'sync:drifted',
197
+ data: {
198
+ reason: err.message,
199
+ affectedModules: events.flatMap(e => e.affectedModuleIds),
200
+ },
201
+ })
202
+ }
203
+ throw err
204
+ }
205
+ }
206
+
207
+ // ─── Sync State ───────────────────────────────────────────────
208
+
209
+ /** Write sync state atomically (write to temp, then rename) */
210
+ private async writeSyncState(state: SyncState): Promise<void> {
211
+ const mikkDir = path.join(this.config.projectRoot, '.mikk')
212
+ await fs.mkdir(mikkDir, { recursive: true })
213
+ const statePath = path.join(mikkDir, 'sync-state.json')
214
+ const tmpPath = statePath + '.tmp'
215
+ await fs.writeFile(tmpPath, JSON.stringify(state, null, 2), 'utf-8')
216
+ await fs.rename(tmpPath, statePath)
217
+ }
218
+
219
+ // ─── PID File ─────────────────────────────────────────────────
220
+
221
+ private async writePidFile(): Promise<void> {
222
+ const mikkDir = path.join(this.config.projectRoot, '.mikk')
223
+ await fs.mkdir(mikkDir, { recursive: true })
224
+ const pidPath = path.join(mikkDir, 'watcher.pid')
225
+ await fs.writeFile(pidPath, String(process.pid), 'utf-8')
226
+ }
227
+
228
+ private async removePidFile(): Promise<void> {
229
+ const pidPath = path.join(this.config.projectRoot, '.mikk', 'watcher.pid')
230
+ try { await fs.unlink(pidPath) } catch { /* ignore if missing */ }
231
+ }
232
+ }
@@ -0,0 +1,93 @@
1
+ import * as path from 'node:path'
2
+ import { watch } from 'chokidar'
3
+ import { hashFile } from '@getmikk/core'
4
+ import type { WatcherConfig, WatcherEvent, FileChangeEvent } from './types.js'
5
+
6
+ /**
7
+ * FileWatcher — wraps Chokidar to watch filesystem for changes.
8
+ * Computes hash of changed files and emits typed events.
9
+ */
10
+ export class FileWatcher {
11
+ private watcher: ReturnType<typeof watch> | null = null
12
+ private handlers: ((event: WatcherEvent) => void)[] = []
13
+ private hashStore = new Map<string, string>()
14
+
15
+ constructor(private config: WatcherConfig) { }
16
+
17
+ /** Start watching — non-blocking */
18
+ start(): void {
19
+ this.watcher = watch(this.config.include, {
20
+ ignored: this.config.exclude,
21
+ cwd: this.config.projectRoot,
22
+ ignoreInitial: true,
23
+ persistent: true,
24
+ awaitWriteFinish: {
25
+ stabilityThreshold: 50,
26
+ pollInterval: 10,
27
+ },
28
+ })
29
+
30
+ this.watcher.on('change', (relativePath: string) => {
31
+ this.handleChange(relativePath, 'changed')
32
+ })
33
+ this.watcher.on('add', (relativePath: string) => {
34
+ this.handleChange(relativePath, 'added')
35
+ })
36
+ this.watcher.on('unlink', (relativePath: string) => {
37
+ this.handleChange(relativePath, 'deleted')
38
+ })
39
+ }
40
+
41
+ /** Stop watching */
42
+ async stop(): Promise<void> {
43
+ await this.watcher?.close()
44
+ this.watcher = null
45
+ }
46
+
47
+ /** Register an event handler */
48
+ on(handler: (event: WatcherEvent) => void): void {
49
+ this.handlers.push(handler)
50
+ }
51
+
52
+ /** Set initial hash for a file */
53
+ setHash(filePath: string, hash: string): void {
54
+ this.hashStore.set(filePath, hash)
55
+ }
56
+
57
+ private async handleChange(relativePath: string, type: FileChangeEvent['type']): Promise<void> {
58
+ const fullPath = path.join(this.config.projectRoot, relativePath)
59
+ const normalizedPath = relativePath.replace(/\\/g, '/')
60
+ const oldHash = this.hashStore.get(normalizedPath) || null
61
+
62
+ let newHash: string | null = null
63
+ if (type !== 'deleted') {
64
+ try {
65
+ newHash = await hashFile(fullPath)
66
+ } catch {
67
+ return // File might have been deleted before we could read it
68
+ }
69
+ }
70
+
71
+ if (oldHash === newHash) return // Content unchanged
72
+
73
+ if (newHash) this.hashStore.set(normalizedPath, newHash)
74
+ if (type === 'deleted') this.hashStore.delete(normalizedPath)
75
+
76
+ const event: FileChangeEvent = {
77
+ type,
78
+ path: normalizedPath,
79
+ oldHash,
80
+ newHash,
81
+ timestamp: Date.now(),
82
+ affectedModuleIds: [], // filled by IncrementalAnalyzer
83
+ }
84
+
85
+ this.emit({ type: 'file:changed', data: event })
86
+ }
87
+
88
+ private emit(event: WatcherEvent): void {
89
+ for (const handler of this.handlers) {
90
+ handler(event)
91
+ }
92
+ }
93
+ }
@@ -0,0 +1,192 @@
1
+ import * as fs from 'node:fs/promises'
2
+ import * as path from 'node:path'
3
+ import {
4
+ getParser, GraphBuilder, ImpactAnalyzer, LockCompiler, hashFile,
5
+ type ParsedFile, type DependencyGraph, type MikkLock, type MikkContract, type ImpactResult
6
+ } from '@getmikk/core'
7
+ import type { FileChangeEvent } from './types.js'
8
+
9
+ /** Threshold: if batch size exceeds this, run full re-analysis */
10
+ const FULL_ANALYSIS_THRESHOLD = 15
11
+
12
+ /** Max retries for race-condition re-hash check */
13
+ const MAX_RETRIES = 3
14
+
15
+ /**
16
+ * IncrementalAnalyzer — re-parses only changed files, updates graph nodes,
17
+ * and recomputes affected module hashes. O(changed files) not O(whole repo).
18
+ *
19
+ * Supports batch analysis: if > 15 files change at once (e.g. git checkout),
20
+ * runs a full re-analysis instead of incremental.
21
+ *
22
+ * Race condition handling: after parsing, re-hashes the file and re-parses
23
+ * if the content changed during parsing (up to 3 retries).
24
+ */
25
+ export class IncrementalAnalyzer {
26
+ private parsedFiles: Map<string, ParsedFile> = new Map()
27
+
28
+ constructor(
29
+ private graph: DependencyGraph,
30
+ private lock: MikkLock,
31
+ private contract: MikkContract,
32
+ private projectRoot: string
33
+ ) { }
34
+
35
+ /** Handle a batch of file change events (debounced by daemon) */
36
+ async analyzeBatch(events: FileChangeEvent[]): Promise<{
37
+ graph: DependencyGraph
38
+ lock: MikkLock
39
+ impactResult: ImpactResult
40
+ mode: 'incremental' | 'full'
41
+ }> {
42
+ // If too many changes at once, run full analysis
43
+ if (events.length > FULL_ANALYSIS_THRESHOLD) {
44
+ return this.runFullAnalysis(events)
45
+ }
46
+
47
+ // Incremental: process each event
48
+ let combinedChanged: string[] = []
49
+ let combinedImpacted: string[] = []
50
+
51
+ for (const event of events) {
52
+ if (event.type === 'deleted') {
53
+ this.parsedFiles.delete(event.path)
54
+ combinedChanged.push(event.path)
55
+ } else {
56
+ const parsed = await this.parseWithRaceCheck(event.path)
57
+ if (parsed) {
58
+ this.parsedFiles.set(event.path, parsed)
59
+ }
60
+ combinedChanged.push(...this.findAffectedNodes(event.path))
61
+ }
62
+ }
63
+
64
+ // Rebuild graph from all parsed files
65
+ const allParsedFiles = [...this.parsedFiles.values()]
66
+ const builder = new GraphBuilder()
67
+ this.graph = builder.build(allParsedFiles)
68
+
69
+ // Run impact analysis on all changed nodes
70
+ const analyzer = new ImpactAnalyzer(this.graph)
71
+ const impactResult = analyzer.analyze([...new Set(combinedChanged)])
72
+
73
+ // Recompile lock
74
+ const compiler = new LockCompiler()
75
+ this.lock = compiler.compile(this.graph, this.contract, allParsedFiles)
76
+
77
+ return { graph: this.graph, lock: this.lock, impactResult, mode: 'incremental' }
78
+ }
79
+
80
+ /** Handle a single file change event */
81
+ async analyze(event: FileChangeEvent): Promise<{
82
+ graph: DependencyGraph
83
+ lock: MikkLock
84
+ impactResult: ImpactResult
85
+ }> {
86
+ const result = await this.analyzeBatch([event])
87
+ return { graph: result.graph, lock: result.lock, impactResult: result.impactResult }
88
+ }
89
+
90
+ /** Add a parsed file to the tracker */
91
+ addParsedFile(file: ParsedFile): void {
92
+ this.parsedFiles.set(file.path, file)
93
+ }
94
+
95
+ /** Get the current parsed file count */
96
+ get fileCount(): number {
97
+ return this.parsedFiles.size
98
+ }
99
+
100
+ // ─── Private ──────────────────────────────────────────────────
101
+
102
+ /**
103
+ * Parse a file with race-condition detection.
104
+ * After parsing, re-hash the file. If the hash differs from what we started with,
105
+ * the file changed during parsing — re-parse (up to MAX_RETRIES).
106
+ */
107
+ private async parseWithRaceCheck(changedFile: string): Promise<ParsedFile | null> {
108
+ const fullPath = path.join(this.projectRoot, changedFile)
109
+
110
+ for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
111
+ try {
112
+ const content = await fs.readFile(fullPath, 'utf-8')
113
+ const parser = getParser(changedFile)
114
+ const parsedFile = parser.parse(changedFile, content)
115
+
116
+ // Race condition check: re-hash after parse
117
+ try {
118
+ const postParseHash = await hashFile(fullPath)
119
+ if (postParseHash === parsedFile.hash) {
120
+ return parsedFile // Content stable
121
+ }
122
+ // Content changed during parse — retry
123
+ } catch {
124
+ return parsedFile // File may have been deleted, return what we have
125
+ }
126
+ } catch {
127
+ return null // File unreadable
128
+ }
129
+ }
130
+
131
+ // Exhausted retries — parse one final time and accept
132
+ try {
133
+ const content = await fs.readFile(fullPath, 'utf-8')
134
+ const parser = getParser(changedFile)
135
+ return parser.parse(changedFile, content)
136
+ } catch {
137
+ return null
138
+ }
139
+ }
140
+
141
+ /** Run a full re-analysis (for large batches like git checkout) */
142
+ private async runFullAnalysis(events: FileChangeEvent[]): Promise<{
143
+ graph: DependencyGraph
144
+ lock: MikkLock
145
+ impactResult: ImpactResult
146
+ mode: 'full'
147
+ }> {
148
+ // Remove deleted files
149
+ for (const event of events) {
150
+ if (event.type === 'deleted') {
151
+ this.parsedFiles.delete(event.path)
152
+ }
153
+ }
154
+
155
+ // Re-parse all non-deleted changed files
156
+ const nonDeleted = events.filter(e => e.type !== 'deleted')
157
+ await Promise.all(nonDeleted.map(async (event) => {
158
+ const parsed = await this.parseWithRaceCheck(event.path)
159
+ if (parsed) {
160
+ this.parsedFiles.set(event.path, parsed)
161
+ }
162
+ }))
163
+
164
+ // Full rebuild
165
+ const allParsedFiles = [...this.parsedFiles.values()]
166
+ const builder = new GraphBuilder()
167
+ this.graph = builder.build(allParsedFiles)
168
+
169
+ const compiler = new LockCompiler()
170
+ this.lock = compiler.compile(this.graph, this.contract, allParsedFiles)
171
+
172
+ const changedPaths = events.map(e => e.path)
173
+
174
+ return {
175
+ graph: this.graph,
176
+ lock: this.lock,
177
+ impactResult: {
178
+ changed: changedPaths,
179
+ impacted: [],
180
+ depth: 0,
181
+ confidence: 'low', // Full rebuild = can't determine precise impact
182
+ },
183
+ mode: 'full',
184
+ }
185
+ }
186
+
187
+ private findAffectedNodes(filePath: string): string[] {
188
+ return [...this.graph.nodes.values()]
189
+ .filter(n => n.file === filePath)
190
+ .map(n => n.id)
191
+ }
192
+ }
package/src/index.ts ADDED
@@ -0,0 +1,4 @@
1
+ export { FileWatcher } from './file-watcher.js'
2
+ export { IncrementalAnalyzer } from './incremental-analyzer.js'
3
+ export { WatcherDaemon } from './daemon.js'
4
+ export type { FileChangeEvent, WatcherConfig, WatcherEvent } from './types.js'
package/src/types.ts ADDED
@@ -0,0 +1,25 @@
1
+ /** File change event emitted when a source file is added, changed, or deleted */
2
+ export interface FileChangeEvent {
3
+ type: 'added' | 'changed' | 'deleted'
4
+ path: string
5
+ oldHash: string | null
6
+ newHash: string | null
7
+ timestamp: number
8
+ affectedModuleIds: string[]
9
+ }
10
+
11
+ /** Configuration for the watcher */
12
+ export interface WatcherConfig {
13
+ projectRoot: string
14
+ include: string[] // ["src/**/*.ts"]
15
+ exclude: string[] // ["node_modules", ".mikk", "dist"]
16
+ debounceMs: number // 100
17
+ }
18
+
19
+ /** Typed watcher events */
20
+ export type WatcherEvent =
21
+ | { type: 'file:changed'; data: FileChangeEvent }
22
+ | { type: 'module:updated'; data: { moduleId: string; newHash: string } }
23
+ | { type: 'graph:updated'; data: { changedNodes: string[]; impactedNodes: string[] } }
24
+ | { type: 'sync:clean'; data: { rootHash: string } }
25
+ | { type: 'sync:drifted'; data: { reason: string; affectedModules: string[] } }
@@ -0,0 +1,5 @@
1
+ import { expect, test } from "bun:test";
2
+
3
+ test("smoke test - watcher", () => {
4
+ expect(true).toBe(true);
5
+ });
package/tsconfig.json ADDED
@@ -0,0 +1,15 @@
1
+ {
2
+ "extends": "../../tsconfig.base.json",
3
+ "compilerOptions": {
4
+ "outDir": "dist",
5
+ "rootDir": "src"
6
+ },
7
+ "include": [
8
+ "src/**/*"
9
+ ],
10
+ "exclude": [
11
+ "node_modules",
12
+ "dist",
13
+ "tests"
14
+ ]
15
+ }