@getmikk/watcher 1.2.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +265 -0
- package/package.json +31 -27
- package/src/daemon.ts +232 -232
- package/src/file-watcher.ts +93 -93
- package/src/incremental-analyzer.ts +192 -192
- package/src/index.ts +4 -4
- package/src/types.ts +25 -25
- package/tests/smoke.test.ts +5 -5
- package/tsconfig.json +14 -14
|
@@ -1,192 +1,192 @@
|
|
|
1
|
-
import * as fs from 'node:fs/promises'
|
|
2
|
-
import * as path from 'node:path'
|
|
3
|
-
import {
|
|
4
|
-
getParser, GraphBuilder, ImpactAnalyzer, LockCompiler, hashFile,
|
|
5
|
-
type ParsedFile, type DependencyGraph, type MikkLock, type MikkContract, type ImpactResult
|
|
6
|
-
} from '@getmikk/core'
|
|
7
|
-
import type { FileChangeEvent } from './types.js'
|
|
8
|
-
|
|
9
|
-
/** Threshold: if batch size exceeds this, run full re-analysis */
|
|
10
|
-
const FULL_ANALYSIS_THRESHOLD = 15
|
|
11
|
-
|
|
12
|
-
/** Max retries for race-condition re-hash check */
|
|
13
|
-
const MAX_RETRIES = 3
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* IncrementalAnalyzer — re-parses only changed files, updates graph nodes,
|
|
17
|
-
* and recomputes affected module hashes. O(changed files) not O(whole repo).
|
|
18
|
-
*
|
|
19
|
-
* Supports batch analysis: if > 15 files change at once (e.g. git checkout),
|
|
20
|
-
* runs a full re-analysis instead of incremental.
|
|
21
|
-
*
|
|
22
|
-
* Race condition handling: after parsing, re-hashes the file and re-parses
|
|
23
|
-
* if the content changed during parsing (up to 3 retries).
|
|
24
|
-
*/
|
|
25
|
-
export class IncrementalAnalyzer {
|
|
26
|
-
private parsedFiles: Map<string, ParsedFile> = new Map()
|
|
27
|
-
|
|
28
|
-
constructor(
|
|
29
|
-
private graph: DependencyGraph,
|
|
30
|
-
private lock: MikkLock,
|
|
31
|
-
private contract: MikkContract,
|
|
32
|
-
private projectRoot: string
|
|
33
|
-
) { }
|
|
34
|
-
|
|
35
|
-
/** Handle a batch of file change events (debounced by daemon) */
|
|
36
|
-
async analyzeBatch(events: FileChangeEvent[]): Promise<{
|
|
37
|
-
graph: DependencyGraph
|
|
38
|
-
lock: MikkLock
|
|
39
|
-
impactResult: ImpactResult
|
|
40
|
-
mode: 'incremental' | 'full'
|
|
41
|
-
}> {
|
|
42
|
-
// If too many changes at once, run full analysis
|
|
43
|
-
if (events.length > FULL_ANALYSIS_THRESHOLD) {
|
|
44
|
-
return this.runFullAnalysis(events)
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
// Incremental: process each event
|
|
48
|
-
let combinedChanged: string[] = []
|
|
49
|
-
let combinedImpacted: string[] = []
|
|
50
|
-
|
|
51
|
-
for (const event of events) {
|
|
52
|
-
if (event.type === 'deleted') {
|
|
53
|
-
this.parsedFiles.delete(event.path)
|
|
54
|
-
combinedChanged.push(event.path)
|
|
55
|
-
} else {
|
|
56
|
-
const parsed = await this.parseWithRaceCheck(event.path)
|
|
57
|
-
if (parsed) {
|
|
58
|
-
this.parsedFiles.set(event.path, parsed)
|
|
59
|
-
}
|
|
60
|
-
combinedChanged.push(...this.findAffectedNodes(event.path))
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
// Rebuild graph from all parsed files
|
|
65
|
-
const allParsedFiles = [...this.parsedFiles.values()]
|
|
66
|
-
const builder = new GraphBuilder()
|
|
67
|
-
this.graph = builder.build(allParsedFiles)
|
|
68
|
-
|
|
69
|
-
// Run impact analysis on all changed nodes
|
|
70
|
-
const analyzer = new ImpactAnalyzer(this.graph)
|
|
71
|
-
const impactResult = analyzer.analyze([...new Set(combinedChanged)])
|
|
72
|
-
|
|
73
|
-
// Recompile lock
|
|
74
|
-
const compiler = new LockCompiler()
|
|
75
|
-
this.lock = compiler.compile(this.graph, this.contract, allParsedFiles)
|
|
76
|
-
|
|
77
|
-
return { graph: this.graph, lock: this.lock, impactResult, mode: 'incremental' }
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
/** Handle a single file change event */
|
|
81
|
-
async analyze(event: FileChangeEvent): Promise<{
|
|
82
|
-
graph: DependencyGraph
|
|
83
|
-
lock: MikkLock
|
|
84
|
-
impactResult: ImpactResult
|
|
85
|
-
}> {
|
|
86
|
-
const result = await this.analyzeBatch([event])
|
|
87
|
-
return { graph: result.graph, lock: result.lock, impactResult: result.impactResult }
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
/** Add a parsed file to the tracker */
|
|
91
|
-
addParsedFile(file: ParsedFile): void {
|
|
92
|
-
this.parsedFiles.set(file.path, file)
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
/** Get the current parsed file count */
|
|
96
|
-
get fileCount(): number {
|
|
97
|
-
return this.parsedFiles.size
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
// ─── Private ──────────────────────────────────────────────────
|
|
101
|
-
|
|
102
|
-
/**
|
|
103
|
-
* Parse a file with race-condition detection.
|
|
104
|
-
* After parsing, re-hash the file. If the hash differs from what we started with,
|
|
105
|
-
* the file changed during parsing — re-parse (up to MAX_RETRIES).
|
|
106
|
-
*/
|
|
107
|
-
private async parseWithRaceCheck(changedFile: string): Promise<ParsedFile | null> {
|
|
108
|
-
const fullPath = path.join(this.projectRoot, changedFile)
|
|
109
|
-
|
|
110
|
-
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
|
|
111
|
-
try {
|
|
112
|
-
const content = await fs.readFile(fullPath, 'utf-8')
|
|
113
|
-
const parser = getParser(changedFile)
|
|
114
|
-
const parsedFile = parser.parse(changedFile, content)
|
|
115
|
-
|
|
116
|
-
// Race condition check: re-hash after parse
|
|
117
|
-
try {
|
|
118
|
-
const postParseHash = await hashFile(fullPath)
|
|
119
|
-
if (postParseHash === parsedFile.hash) {
|
|
120
|
-
return parsedFile // Content stable
|
|
121
|
-
}
|
|
122
|
-
// Content changed during parse — retry
|
|
123
|
-
} catch {
|
|
124
|
-
return parsedFile // File may have been deleted, return what we have
|
|
125
|
-
}
|
|
126
|
-
} catch {
|
|
127
|
-
return null // File unreadable
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
// Exhausted retries — parse one final time and accept
|
|
132
|
-
try {
|
|
133
|
-
const content = await fs.readFile(fullPath, 'utf-8')
|
|
134
|
-
const parser = getParser(changedFile)
|
|
135
|
-
return parser.parse(changedFile, content)
|
|
136
|
-
} catch {
|
|
137
|
-
return null
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
/** Run a full re-analysis (for large batches like git checkout) */
|
|
142
|
-
private async runFullAnalysis(events: FileChangeEvent[]): Promise<{
|
|
143
|
-
graph: DependencyGraph
|
|
144
|
-
lock: MikkLock
|
|
145
|
-
impactResult: ImpactResult
|
|
146
|
-
mode: 'full'
|
|
147
|
-
}> {
|
|
148
|
-
// Remove deleted files
|
|
149
|
-
for (const event of events) {
|
|
150
|
-
if (event.type === 'deleted') {
|
|
151
|
-
this.parsedFiles.delete(event.path)
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
// Re-parse all non-deleted changed files
|
|
156
|
-
const nonDeleted = events.filter(e => e.type !== 'deleted')
|
|
157
|
-
await Promise.all(nonDeleted.map(async (event) => {
|
|
158
|
-
const parsed = await this.parseWithRaceCheck(event.path)
|
|
159
|
-
if (parsed) {
|
|
160
|
-
this.parsedFiles.set(event.path, parsed)
|
|
161
|
-
}
|
|
162
|
-
}))
|
|
163
|
-
|
|
164
|
-
// Full rebuild
|
|
165
|
-
const allParsedFiles = [...this.parsedFiles.values()]
|
|
166
|
-
const builder = new GraphBuilder()
|
|
167
|
-
this.graph = builder.build(allParsedFiles)
|
|
168
|
-
|
|
169
|
-
const compiler = new LockCompiler()
|
|
170
|
-
this.lock = compiler.compile(this.graph, this.contract, allParsedFiles)
|
|
171
|
-
|
|
172
|
-
const changedPaths = events.map(e => e.path)
|
|
173
|
-
|
|
174
|
-
return {
|
|
175
|
-
graph: this.graph,
|
|
176
|
-
lock: this.lock,
|
|
177
|
-
impactResult: {
|
|
178
|
-
changed: changedPaths,
|
|
179
|
-
impacted: [],
|
|
180
|
-
depth: 0,
|
|
181
|
-
confidence: 'low', // Full rebuild = can't determine precise impact
|
|
182
|
-
},
|
|
183
|
-
mode: 'full',
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
private findAffectedNodes(filePath: string): string[] {
|
|
188
|
-
return [...this.graph.nodes.values()]
|
|
189
|
-
.filter(n => n.file === filePath)
|
|
190
|
-
.map(n => n.id)
|
|
191
|
-
}
|
|
192
|
-
}
|
|
1
|
+
import * as fs from 'node:fs/promises'
|
|
2
|
+
import * as path from 'node:path'
|
|
3
|
+
import {
|
|
4
|
+
getParser, GraphBuilder, ImpactAnalyzer, LockCompiler, hashFile,
|
|
5
|
+
type ParsedFile, type DependencyGraph, type MikkLock, type MikkContract, type ImpactResult
|
|
6
|
+
} from '@getmikk/core'
|
|
7
|
+
import type { FileChangeEvent } from './types.js'
|
|
8
|
+
|
|
9
|
+
/** Threshold: if batch size exceeds this, run full re-analysis */
|
|
10
|
+
const FULL_ANALYSIS_THRESHOLD = 15
|
|
11
|
+
|
|
12
|
+
/** Max retries for race-condition re-hash check */
|
|
13
|
+
const MAX_RETRIES = 3
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* IncrementalAnalyzer — re-parses only changed files, updates graph nodes,
|
|
17
|
+
* and recomputes affected module hashes. O(changed files) not O(whole repo).
|
|
18
|
+
*
|
|
19
|
+
* Supports batch analysis: if > 15 files change at once (e.g. git checkout),
|
|
20
|
+
* runs a full re-analysis instead of incremental.
|
|
21
|
+
*
|
|
22
|
+
* Race condition handling: after parsing, re-hashes the file and re-parses
|
|
23
|
+
* if the content changed during parsing (up to 3 retries).
|
|
24
|
+
*/
|
|
25
|
+
export class IncrementalAnalyzer {
|
|
26
|
+
private parsedFiles: Map<string, ParsedFile> = new Map()
|
|
27
|
+
|
|
28
|
+
constructor(
|
|
29
|
+
private graph: DependencyGraph,
|
|
30
|
+
private lock: MikkLock,
|
|
31
|
+
private contract: MikkContract,
|
|
32
|
+
private projectRoot: string
|
|
33
|
+
) { }
|
|
34
|
+
|
|
35
|
+
/** Handle a batch of file change events (debounced by daemon) */
|
|
36
|
+
async analyzeBatch(events: FileChangeEvent[]): Promise<{
|
|
37
|
+
graph: DependencyGraph
|
|
38
|
+
lock: MikkLock
|
|
39
|
+
impactResult: ImpactResult
|
|
40
|
+
mode: 'incremental' | 'full'
|
|
41
|
+
}> {
|
|
42
|
+
// If too many changes at once, run full analysis
|
|
43
|
+
if (events.length > FULL_ANALYSIS_THRESHOLD) {
|
|
44
|
+
return this.runFullAnalysis(events)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Incremental: process each event
|
|
48
|
+
let combinedChanged: string[] = []
|
|
49
|
+
let combinedImpacted: string[] = []
|
|
50
|
+
|
|
51
|
+
for (const event of events) {
|
|
52
|
+
if (event.type === 'deleted') {
|
|
53
|
+
this.parsedFiles.delete(event.path)
|
|
54
|
+
combinedChanged.push(event.path)
|
|
55
|
+
} else {
|
|
56
|
+
const parsed = await this.parseWithRaceCheck(event.path)
|
|
57
|
+
if (parsed) {
|
|
58
|
+
this.parsedFiles.set(event.path, parsed)
|
|
59
|
+
}
|
|
60
|
+
combinedChanged.push(...this.findAffectedNodes(event.path))
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Rebuild graph from all parsed files
|
|
65
|
+
const allParsedFiles = [...this.parsedFiles.values()]
|
|
66
|
+
const builder = new GraphBuilder()
|
|
67
|
+
this.graph = builder.build(allParsedFiles)
|
|
68
|
+
|
|
69
|
+
// Run impact analysis on all changed nodes
|
|
70
|
+
const analyzer = new ImpactAnalyzer(this.graph)
|
|
71
|
+
const impactResult = analyzer.analyze([...new Set(combinedChanged)])
|
|
72
|
+
|
|
73
|
+
// Recompile lock
|
|
74
|
+
const compiler = new LockCompiler()
|
|
75
|
+
this.lock = compiler.compile(this.graph, this.contract, allParsedFiles)
|
|
76
|
+
|
|
77
|
+
return { graph: this.graph, lock: this.lock, impactResult, mode: 'incremental' }
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/** Handle a single file change event */
|
|
81
|
+
async analyze(event: FileChangeEvent): Promise<{
|
|
82
|
+
graph: DependencyGraph
|
|
83
|
+
lock: MikkLock
|
|
84
|
+
impactResult: ImpactResult
|
|
85
|
+
}> {
|
|
86
|
+
const result = await this.analyzeBatch([event])
|
|
87
|
+
return { graph: result.graph, lock: result.lock, impactResult: result.impactResult }
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/** Add a parsed file to the tracker */
|
|
91
|
+
addParsedFile(file: ParsedFile): void {
|
|
92
|
+
this.parsedFiles.set(file.path, file)
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/** Get the current parsed file count */
|
|
96
|
+
get fileCount(): number {
|
|
97
|
+
return this.parsedFiles.size
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// ─── Private ──────────────────────────────────────────────────
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Parse a file with race-condition detection.
|
|
104
|
+
* After parsing, re-hash the file. If the hash differs from what we started with,
|
|
105
|
+
* the file changed during parsing — re-parse (up to MAX_RETRIES).
|
|
106
|
+
*/
|
|
107
|
+
private async parseWithRaceCheck(changedFile: string): Promise<ParsedFile | null> {
|
|
108
|
+
const fullPath = path.join(this.projectRoot, changedFile)
|
|
109
|
+
|
|
110
|
+
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
|
|
111
|
+
try {
|
|
112
|
+
const content = await fs.readFile(fullPath, 'utf-8')
|
|
113
|
+
const parser = getParser(changedFile)
|
|
114
|
+
const parsedFile = parser.parse(changedFile, content)
|
|
115
|
+
|
|
116
|
+
// Race condition check: re-hash after parse
|
|
117
|
+
try {
|
|
118
|
+
const postParseHash = await hashFile(fullPath)
|
|
119
|
+
if (postParseHash === parsedFile.hash) {
|
|
120
|
+
return parsedFile // Content stable
|
|
121
|
+
}
|
|
122
|
+
// Content changed during parse — retry
|
|
123
|
+
} catch {
|
|
124
|
+
return parsedFile // File may have been deleted, return what we have
|
|
125
|
+
}
|
|
126
|
+
} catch {
|
|
127
|
+
return null // File unreadable
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Exhausted retries — parse one final time and accept
|
|
132
|
+
try {
|
|
133
|
+
const content = await fs.readFile(fullPath, 'utf-8')
|
|
134
|
+
const parser = getParser(changedFile)
|
|
135
|
+
return parser.parse(changedFile, content)
|
|
136
|
+
} catch {
|
|
137
|
+
return null
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/** Run a full re-analysis (for large batches like git checkout) */
|
|
142
|
+
private async runFullAnalysis(events: FileChangeEvent[]): Promise<{
|
|
143
|
+
graph: DependencyGraph
|
|
144
|
+
lock: MikkLock
|
|
145
|
+
impactResult: ImpactResult
|
|
146
|
+
mode: 'full'
|
|
147
|
+
}> {
|
|
148
|
+
// Remove deleted files
|
|
149
|
+
for (const event of events) {
|
|
150
|
+
if (event.type === 'deleted') {
|
|
151
|
+
this.parsedFiles.delete(event.path)
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Re-parse all non-deleted changed files
|
|
156
|
+
const nonDeleted = events.filter(e => e.type !== 'deleted')
|
|
157
|
+
await Promise.all(nonDeleted.map(async (event) => {
|
|
158
|
+
const parsed = await this.parseWithRaceCheck(event.path)
|
|
159
|
+
if (parsed) {
|
|
160
|
+
this.parsedFiles.set(event.path, parsed)
|
|
161
|
+
}
|
|
162
|
+
}))
|
|
163
|
+
|
|
164
|
+
// Full rebuild
|
|
165
|
+
const allParsedFiles = [...this.parsedFiles.values()]
|
|
166
|
+
const builder = new GraphBuilder()
|
|
167
|
+
this.graph = builder.build(allParsedFiles)
|
|
168
|
+
|
|
169
|
+
const compiler = new LockCompiler()
|
|
170
|
+
this.lock = compiler.compile(this.graph, this.contract, allParsedFiles)
|
|
171
|
+
|
|
172
|
+
const changedPaths = events.map(e => e.path)
|
|
173
|
+
|
|
174
|
+
return {
|
|
175
|
+
graph: this.graph,
|
|
176
|
+
lock: this.lock,
|
|
177
|
+
impactResult: {
|
|
178
|
+
changed: changedPaths,
|
|
179
|
+
impacted: [],
|
|
180
|
+
depth: 0,
|
|
181
|
+
confidence: 'low', // Full rebuild = can't determine precise impact
|
|
182
|
+
},
|
|
183
|
+
mode: 'full',
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
private findAffectedNodes(filePath: string): string[] {
|
|
188
|
+
return [...this.graph.nodes.values()]
|
|
189
|
+
.filter(n => n.file === filePath)
|
|
190
|
+
.map(n => n.id)
|
|
191
|
+
}
|
|
192
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export { FileWatcher } from './file-watcher.js'
|
|
2
|
-
export { IncrementalAnalyzer } from './incremental-analyzer.js'
|
|
3
|
-
export { WatcherDaemon } from './daemon.js'
|
|
4
|
-
export type { FileChangeEvent, WatcherConfig, WatcherEvent } from './types.js'
|
|
1
|
+
export { FileWatcher } from './file-watcher.js'
|
|
2
|
+
export { IncrementalAnalyzer } from './incremental-analyzer.js'
|
|
3
|
+
export { WatcherDaemon } from './daemon.js'
|
|
4
|
+
export type { FileChangeEvent, WatcherConfig, WatcherEvent } from './types.js'
|
package/src/types.ts
CHANGED
|
@@ -1,25 +1,25 @@
|
|
|
1
|
-
/** File change event emitted when a source file is added, changed, or deleted */
|
|
2
|
-
export interface FileChangeEvent {
|
|
3
|
-
type: 'added' | 'changed' | 'deleted'
|
|
4
|
-
path: string
|
|
5
|
-
oldHash: string | null
|
|
6
|
-
newHash: string | null
|
|
7
|
-
timestamp: number
|
|
8
|
-
affectedModuleIds: string[]
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
/** Configuration for the watcher */
|
|
12
|
-
export interface WatcherConfig {
|
|
13
|
-
projectRoot: string
|
|
14
|
-
include: string[] // ["src/**/*.ts"]
|
|
15
|
-
exclude: string[] // ["node_modules", ".mikk", "dist"]
|
|
16
|
-
debounceMs: number // 100
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
/** Typed watcher events */
|
|
20
|
-
export type WatcherEvent =
|
|
21
|
-
| { type: 'file:changed'; data: FileChangeEvent }
|
|
22
|
-
| { type: 'module:updated'; data: { moduleId: string; newHash: string } }
|
|
23
|
-
| { type: 'graph:updated'; data: { changedNodes: string[]; impactedNodes: string[] } }
|
|
24
|
-
| { type: 'sync:clean'; data: { rootHash: string } }
|
|
25
|
-
| { type: 'sync:drifted'; data: { reason: string; affectedModules: string[] } }
|
|
1
|
+
/** File change event emitted when a source file is added, changed, or deleted */
|
|
2
|
+
export interface FileChangeEvent {
|
|
3
|
+
type: 'added' | 'changed' | 'deleted'
|
|
4
|
+
path: string
|
|
5
|
+
oldHash: string | null
|
|
6
|
+
newHash: string | null
|
|
7
|
+
timestamp: number
|
|
8
|
+
affectedModuleIds: string[]
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
/** Configuration for the watcher */
|
|
12
|
+
export interface WatcherConfig {
|
|
13
|
+
projectRoot: string
|
|
14
|
+
include: string[] // ["src/**/*.ts"]
|
|
15
|
+
exclude: string[] // ["node_modules", ".mikk", "dist"]
|
|
16
|
+
debounceMs: number // 100
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/** Typed watcher events */
|
|
20
|
+
export type WatcherEvent =
|
|
21
|
+
| { type: 'file:changed'; data: FileChangeEvent }
|
|
22
|
+
| { type: 'module:updated'; data: { moduleId: string; newHash: string } }
|
|
23
|
+
| { type: 'graph:updated'; data: { changedNodes: string[]; impactedNodes: string[] } }
|
|
24
|
+
| { type: 'sync:clean'; data: { rootHash: string } }
|
|
25
|
+
| { type: 'sync:drifted'; data: { reason: string; affectedModules: string[] } }
|
package/tests/smoke.test.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { expect, test } from "bun:test";
|
|
2
|
-
|
|
3
|
-
test("smoke test - watcher", () => {
|
|
4
|
-
expect(true).toBe(true);
|
|
5
|
-
});
|
|
1
|
+
import { expect, test } from "bun:test";
|
|
2
|
+
|
|
3
|
+
test("smoke test - watcher", () => {
|
|
4
|
+
expect(true).toBe(true);
|
|
5
|
+
});
|
package/tsconfig.json
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
|
-
{
|
|
2
|
-
"extends": "../../tsconfig.base.json",
|
|
3
|
-
"compilerOptions": {
|
|
4
|
-
"outDir": "dist",
|
|
5
|
-
"rootDir": "src"
|
|
6
|
-
},
|
|
7
|
-
"include": [
|
|
8
|
-
"src/**/*"
|
|
9
|
-
],
|
|
10
|
-
"exclude": [
|
|
11
|
-
"node_modules",
|
|
12
|
-
"dist",
|
|
13
|
-
"tests"
|
|
14
|
-
]
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../tsconfig.base.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"outDir": "dist",
|
|
5
|
+
"rootDir": "src"
|
|
6
|
+
},
|
|
7
|
+
"include": [
|
|
8
|
+
"src/**/*"
|
|
9
|
+
],
|
|
10
|
+
"exclude": [
|
|
11
|
+
"node_modules",
|
|
12
|
+
"dist",
|
|
13
|
+
"tests"
|
|
14
|
+
]
|
|
15
15
|
}
|