@bfra.me/doc-sync 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +288 -0
- package/lib/chunk-6NKAJT2M.js +1233 -0
- package/lib/chunk-DR6UG237.js +1027 -0
- package/lib/chunk-G5KKGJYO.js +1560 -0
- package/lib/chunk-ROLA7SBB.js +12 -0
- package/lib/cli/index.d.ts +1 -0
- package/lib/cli/index.js +397 -0
- package/lib/generators/index.d.ts +170 -0
- package/lib/generators/index.js +76 -0
- package/lib/index.d.ts +141 -0
- package/lib/index.js +118 -0
- package/lib/parsers/index.d.ts +264 -0
- package/lib/parsers/index.js +113 -0
- package/lib/types.d.ts +388 -0
- package/lib/types.js +7 -0
- package/package.json +99 -0
- package/src/cli/commands/index.ts +3 -0
- package/src/cli/commands/sync.ts +146 -0
- package/src/cli/commands/validate.ts +151 -0
- package/src/cli/commands/watch.ts +74 -0
- package/src/cli/index.ts +71 -0
- package/src/cli/types.ts +19 -0
- package/src/cli/ui.ts +123 -0
- package/src/generators/api-reference-generator.ts +268 -0
- package/src/generators/code-example-formatter.ts +313 -0
- package/src/generators/component-mapper.ts +383 -0
- package/src/generators/content-merger.ts +295 -0
- package/src/generators/frontmatter-generator.ts +277 -0
- package/src/generators/index.ts +56 -0
- package/src/generators/mdx-generator.ts +289 -0
- package/src/index.ts +131 -0
- package/src/orchestrator/index.ts +21 -0
- package/src/orchestrator/package-scanner.ts +276 -0
- package/src/orchestrator/sync-orchestrator.ts +382 -0
- package/src/orchestrator/validation-pipeline.ts +328 -0
- package/src/parsers/export-analyzer.ts +335 -0
- package/src/parsers/guards.ts +350 -0
- package/src/parsers/index.ts +82 -0
- package/src/parsers/jsdoc-extractor.ts +313 -0
- package/src/parsers/package-info.ts +267 -0
- package/src/parsers/readme-parser.ts +334 -0
- package/src/parsers/typescript-parser.ts +299 -0
- package/src/types.ts +423 -0
- package/src/utils/index.ts +13 -0
- package/src/utils/safe-patterns.ts +280 -0
- package/src/utils/sanitization.ts +164 -0
- package/src/watcher/change-detector.ts +138 -0
- package/src/watcher/debouncer.ts +168 -0
- package/src/watcher/file-watcher.ts +164 -0
- package/src/watcher/index.ts +27 -0
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @bfra.me/doc-sync/utils/sanitization - Sanitization utilities for MDX content
|
|
3
|
+
* Provides comprehensive XSS prevention for user-generated content
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import {sanitizeInput} from '@bfra.me/es/validation'
|
|
7
|
+
import escapeHtml from 'escape-html'
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Sanitize HTML content for MDX context
|
|
11
|
+
* Escapes all HTML entities and JSX curly braces to prevent XSS
|
|
12
|
+
*
|
|
13
|
+
* @param content - The content to sanitize
|
|
14
|
+
* @returns Sanitized content safe for MDX rendering
|
|
15
|
+
*
|
|
16
|
+
* @example
|
|
17
|
+
* ```ts
|
|
18
|
+
* const safe = sanitizeForMDX('<script>alert("xss")</script>')
|
|
19
|
+
* // Returns: '<script>alert("xss")</script>'
|
|
20
|
+
* ```
|
|
21
|
+
*/
|
|
22
|
+
export function sanitizeForMDX(content: string): string {
|
|
23
|
+
// Use existing sanitizeInput from @bfra.me/es/validation
|
|
24
|
+
// This escapes: & < > " ' /
|
|
25
|
+
const escaped = sanitizeInput(content, {trim: false})
|
|
26
|
+
|
|
27
|
+
// Additionally escape JSX curly braces for MDX safety
|
|
28
|
+
return escaped.replaceAll('{', '{').replaceAll('}', '}')
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Sanitize value for use in HTML/JSX attribute
|
|
33
|
+
* Uses escape-html library for proper attribute encoding
|
|
34
|
+
*
|
|
35
|
+
* @param value - The attribute value to sanitize
|
|
36
|
+
* @returns Sanitized value safe for attribute context
|
|
37
|
+
*
|
|
38
|
+
* @example
|
|
39
|
+
* ```ts
|
|
40
|
+
* const safe = sanitizeAttribute('value" onload="alert(1)')
|
|
41
|
+
* // Returns: 'value" onload="alert(1)'
|
|
42
|
+
* ```
|
|
43
|
+
*/
|
|
44
|
+
export function sanitizeAttribute(value: string): string {
|
|
45
|
+
return escapeHtml(value)
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* JSX attribute parsed from a tag
|
|
50
|
+
*/
|
|
51
|
+
interface JSXAttribute {
|
|
52
|
+
readonly name: string
|
|
53
|
+
readonly value: string | null
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Parse JSX tag attributes safely without using complex regex
|
|
58
|
+
* Uses a simple state machine approach to avoid ReDoS vulnerabilities
|
|
59
|
+
*
|
|
60
|
+
* @param tag - The complete JSX tag string (e.g., '<Badge text="hello" />')
|
|
61
|
+
* @returns Array of parsed attributes
|
|
62
|
+
*
|
|
63
|
+
* @example
|
|
64
|
+
* ```ts
|
|
65
|
+
* const attrs = parseJSXAttributes('<Card title="Hello" icon="star" />')
|
|
66
|
+
* // Returns: [{name: 'title', value: 'Hello'}, {name: 'icon', value: 'star'}]
|
|
67
|
+
* ```
|
|
68
|
+
*/
|
|
69
|
+
export function parseJSXAttributes(tag: string): readonly JSXAttribute[] {
|
|
70
|
+
const attrs: JSXAttribute[] = []
|
|
71
|
+
|
|
72
|
+
const spaceIndex = tag.indexOf(' ')
|
|
73
|
+
if (spaceIndex === -1) return attrs
|
|
74
|
+
|
|
75
|
+
const closeIndex = tag.lastIndexOf('>')
|
|
76
|
+
if (closeIndex === -1) return attrs
|
|
77
|
+
|
|
78
|
+
const attrRegion = tag.slice(spaceIndex + 1, closeIndex).trim()
|
|
79
|
+
let i = 0
|
|
80
|
+
|
|
81
|
+
while (i < attrRegion.length) {
|
|
82
|
+
while (i < attrRegion.length && /\s/.test(attrRegion.charAt(i))) i++
|
|
83
|
+
if (i >= attrRegion.length) break
|
|
84
|
+
|
|
85
|
+
let name = ''
|
|
86
|
+
while (i < attrRegion.length && /[\w-]/.test(attrRegion.charAt(i))) {
|
|
87
|
+
name += attrRegion[i]
|
|
88
|
+
i++
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (!name) break
|
|
92
|
+
|
|
93
|
+
while (i < attrRegion.length && /\s/.test(attrRegion.charAt(i))) i++
|
|
94
|
+
|
|
95
|
+
if (i >= attrRegion.length || attrRegion[i] !== '=') {
|
|
96
|
+
attrs.push({name, value: null})
|
|
97
|
+
continue
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
i++
|
|
101
|
+
while (i < attrRegion.length && /\s/.test(attrRegion.charAt(i))) i++
|
|
102
|
+
|
|
103
|
+
if (i >= attrRegion.length) {
|
|
104
|
+
attrs.push({name, value: ''})
|
|
105
|
+
break
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
let value = ''
|
|
109
|
+
const quote = attrRegion[i]
|
|
110
|
+
if (quote === '"' || quote === "'") {
|
|
111
|
+
i++
|
|
112
|
+
while (i < attrRegion.length && attrRegion[i] !== quote) {
|
|
113
|
+
value += attrRegion[i]
|
|
114
|
+
i++
|
|
115
|
+
}
|
|
116
|
+
if (i < attrRegion.length) i++
|
|
117
|
+
} else {
|
|
118
|
+
while (i < attrRegion.length && !/[\s/>]/.test(attrRegion.charAt(i))) {
|
|
119
|
+
value += attrRegion[i]
|
|
120
|
+
i++
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
attrs.push({name, value})
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return attrs
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Sanitize a complete JSX tag including all attributes
|
|
132
|
+
* Parses the tag and escapes all attribute values to prevent XSS
|
|
133
|
+
*
|
|
134
|
+
* @param tag - The complete JSX tag string
|
|
135
|
+
* @returns Sanitized JSX tag safe for rendering
|
|
136
|
+
*
|
|
137
|
+
* @example
|
|
138
|
+
* ```ts
|
|
139
|
+
* const safe = sanitizeJSXTag('<Badge text="v1.0.0" onclick="alert(1)" />')
|
|
140
|
+
* // Returns: '<Badge text="v1.0.0" onclick="alert(1)" />' (with escaped values)
|
|
141
|
+
* ```
|
|
142
|
+
*/
|
|
143
|
+
export function sanitizeJSXTag(tag: string): string {
|
|
144
|
+
// Extract tag name
|
|
145
|
+
const tagMatch = tag.match(/^<([A-Z][a-zA-Z0-9]*)/)
|
|
146
|
+
if (!tagMatch || typeof tagMatch[1] !== 'string' || tagMatch[1].length === 0) {
|
|
147
|
+
// Not a valid JSX tag, escape everything
|
|
148
|
+
return escapeHtml(tag)
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const tagName = tagMatch[1]
|
|
152
|
+
const selfClosing = tag.endsWith('/>')
|
|
153
|
+
const attributes = parseJSXAttributes(tag)
|
|
154
|
+
|
|
155
|
+
// Sanitize each attribute value
|
|
156
|
+
const sanitizedAttrs = attributes.map(({name, value}) => {
|
|
157
|
+
if (value === null) return name
|
|
158
|
+
const escaped = escapeHtml(value)
|
|
159
|
+
return `${name}="${escaped}"`
|
|
160
|
+
})
|
|
161
|
+
|
|
162
|
+
const attrString = sanitizedAttrs.length > 0 ? ` ${sanitizedAttrs.join(' ')}` : ''
|
|
163
|
+
return `<${tagName}${attrString}${selfClosing ? ' />' : '>'}`
|
|
164
|
+
}
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import type {ChangeDetector} from '@bfra.me/es/watcher'
|
|
2
|
+
import type {FileChangeEvent, PackageInfo} from '../types'
|
|
3
|
+
|
|
4
|
+
import {createChangeDetector as createBaseDetector} from '@bfra.me/es/watcher'
|
|
5
|
+
|
|
6
|
+
import {categorizeFile, type FileCategory} from './file-watcher'
|
|
7
|
+
|
|
8
|
+
export interface DocChangeDetectorOptions {
|
|
9
|
+
readonly algorithm?: 'sha256' | 'md5'
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface PackageChangeAnalysis {
|
|
13
|
+
readonly packageName: string
|
|
14
|
+
readonly needsRegeneration: boolean
|
|
15
|
+
readonly changedCategories: readonly FileCategory[]
|
|
16
|
+
readonly changedFiles: readonly string[]
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export interface DocChangeDetector {
|
|
20
|
+
readonly hasChanged: (filePath: string) => Promise<boolean>
|
|
21
|
+
readonly record: (filePath: string) => Promise<void>
|
|
22
|
+
readonly recordPackage: (pkg: PackageInfo, files: readonly string[]) => Promise<void>
|
|
23
|
+
readonly clear: (filePath: string) => void
|
|
24
|
+
readonly clearAll: () => void
|
|
25
|
+
readonly analyzeChanges: (events: readonly FileChangeEvent[]) => Promise<PackageChangeAnalysis[]>
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function createDocChangeDetector(options: DocChangeDetectorOptions = {}): DocChangeDetector {
|
|
29
|
+
const baseDetector: ChangeDetector = createBaseDetector({algorithm: options.algorithm})
|
|
30
|
+
const packageFiles = new Map<string, Set<string>>()
|
|
31
|
+
|
|
32
|
+
return {
|
|
33
|
+
async hasChanged(filePath: string): Promise<boolean> {
|
|
34
|
+
return baseDetector.hasChanged(filePath)
|
|
35
|
+
},
|
|
36
|
+
|
|
37
|
+
async record(filePath: string): Promise<void> {
|
|
38
|
+
await baseDetector.record(filePath)
|
|
39
|
+
},
|
|
40
|
+
|
|
41
|
+
async recordPackage(pkg: PackageInfo, files: readonly string[]): Promise<void> {
|
|
42
|
+
const fileSet = new Set(files)
|
|
43
|
+
packageFiles.set(pkg.name, fileSet)
|
|
44
|
+
|
|
45
|
+
await Promise.all(files.map(async file => baseDetector.record(file)))
|
|
46
|
+
},
|
|
47
|
+
|
|
48
|
+
clear(filePath: string): void {
|
|
49
|
+
baseDetector.clear(filePath)
|
|
50
|
+
|
|
51
|
+
for (const fileSet of packageFiles.values()) {
|
|
52
|
+
fileSet.delete(filePath)
|
|
53
|
+
}
|
|
54
|
+
},
|
|
55
|
+
|
|
56
|
+
clearAll(): void {
|
|
57
|
+
baseDetector.clearAll()
|
|
58
|
+
packageFiles.clear()
|
|
59
|
+
},
|
|
60
|
+
|
|
61
|
+
async analyzeChanges(events: readonly FileChangeEvent[]): Promise<PackageChangeAnalysis[]> {
|
|
62
|
+
const packageChanges = new Map<string, {categories: Set<FileCategory>; files: string[]}>()
|
|
63
|
+
|
|
64
|
+
for (const event of events) {
|
|
65
|
+
const packageName = event.packageName ?? '__unknown__'
|
|
66
|
+
const category = categorizeFile(event.path)
|
|
67
|
+
|
|
68
|
+
let analysis = packageChanges.get(packageName)
|
|
69
|
+
if (analysis === undefined) {
|
|
70
|
+
analysis = {categories: new Set(), files: []}
|
|
71
|
+
packageChanges.set(packageName, analysis)
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (category !== 'unknown') {
|
|
75
|
+
analysis.categories.add(category)
|
|
76
|
+
}
|
|
77
|
+
analysis.files.push(event.path)
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const results: PackageChangeAnalysis[] = []
|
|
81
|
+
|
|
82
|
+
for (const [packageName, analysis] of packageChanges) {
|
|
83
|
+
const changedCategories = [...analysis.categories]
|
|
84
|
+
|
|
85
|
+
// Package needs regeneration if any documentation-relevant files changed
|
|
86
|
+
const needsRegeneration =
|
|
87
|
+
changedCategories.includes('readme') ||
|
|
88
|
+
changedCategories.includes('package-json') ||
|
|
89
|
+
changedCategories.includes('source')
|
|
90
|
+
|
|
91
|
+
results.push({
|
|
92
|
+
packageName,
|
|
93
|
+
needsRegeneration,
|
|
94
|
+
changedCategories,
|
|
95
|
+
changedFiles: analysis.files,
|
|
96
|
+
})
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return results
|
|
100
|
+
},
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
export type RegenerationScope = 'full' | 'api-only' | 'readme-only' | 'metadata-only' | 'none'
|
|
105
|
+
|
|
106
|
+
export function determineRegenerationScope(
|
|
107
|
+
changedCategories: readonly FileCategory[],
|
|
108
|
+
): RegenerationScope {
|
|
109
|
+
const hasReadme = changedCategories.includes('readme')
|
|
110
|
+
const hasSource = changedCategories.includes('source')
|
|
111
|
+
const hasPackageJson = changedCategories.includes('package-json')
|
|
112
|
+
|
|
113
|
+
if (hasReadme && hasSource) {
|
|
114
|
+
return 'full'
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
if (hasSource) {
|
|
118
|
+
return 'api-only'
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if (hasReadme) {
|
|
122
|
+
return 'readme-only'
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
if (hasPackageJson) {
|
|
126
|
+
return 'metadata-only'
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return 'none'
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
export async function hasAnyFileChanged(
|
|
133
|
+
detector: DocChangeDetector,
|
|
134
|
+
files: readonly string[],
|
|
135
|
+
): Promise<boolean> {
|
|
136
|
+
const results = await Promise.all(files.map(async file => detector.hasChanged(file)))
|
|
137
|
+
return results.some(changed => changed)
|
|
138
|
+
}
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import type {Debouncer} from '@bfra.me/es/watcher'
|
|
2
|
+
import type {FileChangeEvent} from '../types'
|
|
3
|
+
|
|
4
|
+
import {createDebouncer as createBaseDebouncer} from '@bfra.me/es/watcher'
|
|
5
|
+
|
|
6
|
+
export interface DocDebouncerOptions {
|
|
7
|
+
readonly debounceMs?: number
|
|
8
|
+
readonly maxWaitMs?: number
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export type BatchChangeHandler = (events: readonly FileChangeEvent[]) => void | Promise<void>
|
|
12
|
+
|
|
13
|
+
export interface DocDebouncer {
|
|
14
|
+
readonly add: (event: FileChangeEvent) => void
|
|
15
|
+
readonly addAll: (events: readonly FileChangeEvent[]) => void
|
|
16
|
+
readonly flush: () => void
|
|
17
|
+
readonly cancel: () => void
|
|
18
|
+
readonly getPendingCount: () => number
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function createDocDebouncer(
|
|
22
|
+
handler: BatchChangeHandler,
|
|
23
|
+
options: DocDebouncerOptions = {},
|
|
24
|
+
): DocDebouncer {
|
|
25
|
+
const {debounceMs = 300, maxWaitMs = 5000} = options
|
|
26
|
+
|
|
27
|
+
let pendingEvents: FileChangeEvent[] = []
|
|
28
|
+
let maxWaitTimeout: ReturnType<typeof setTimeout> | undefined
|
|
29
|
+
|
|
30
|
+
function processEvents(events: FileChangeEvent[]): void {
|
|
31
|
+
if (maxWaitTimeout !== undefined) {
|
|
32
|
+
clearTimeout(maxWaitTimeout)
|
|
33
|
+
maxWaitTimeout = undefined
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const deduplicated = deduplicateEvents(events)
|
|
37
|
+
if (deduplicated.length > 0) {
|
|
38
|
+
Promise.resolve(handler(deduplicated)).catch(error => {
|
|
39
|
+
console.error('[doc-sync] Error in batch handler:', error)
|
|
40
|
+
})
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const baseDebouncer: Debouncer<FileChangeEvent> = createBaseDebouncer(events => {
|
|
45
|
+
processEvents(events)
|
|
46
|
+
pendingEvents = []
|
|
47
|
+
}, debounceMs)
|
|
48
|
+
|
|
49
|
+
function startMaxWaitTimer(): void {
|
|
50
|
+
if (maxWaitTimeout === undefined) {
|
|
51
|
+
maxWaitTimeout = setTimeout(() => {
|
|
52
|
+
baseDebouncer.flush()
|
|
53
|
+
}, maxWaitMs)
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
return {
|
|
58
|
+
add(event: FileChangeEvent): void {
|
|
59
|
+
pendingEvents.push(event)
|
|
60
|
+
startMaxWaitTimer()
|
|
61
|
+
baseDebouncer.add(event)
|
|
62
|
+
},
|
|
63
|
+
|
|
64
|
+
addAll(events: readonly FileChangeEvent[]): void {
|
|
65
|
+
for (const event of events) {
|
|
66
|
+
pendingEvents.push(event)
|
|
67
|
+
baseDebouncer.add(event)
|
|
68
|
+
}
|
|
69
|
+
if (events.length > 0) {
|
|
70
|
+
startMaxWaitTimer()
|
|
71
|
+
}
|
|
72
|
+
},
|
|
73
|
+
|
|
74
|
+
flush(): void {
|
|
75
|
+
if (maxWaitTimeout !== undefined) {
|
|
76
|
+
clearTimeout(maxWaitTimeout)
|
|
77
|
+
maxWaitTimeout = undefined
|
|
78
|
+
}
|
|
79
|
+
baseDebouncer.flush()
|
|
80
|
+
},
|
|
81
|
+
|
|
82
|
+
cancel(): void {
|
|
83
|
+
if (maxWaitTimeout !== undefined) {
|
|
84
|
+
clearTimeout(maxWaitTimeout)
|
|
85
|
+
maxWaitTimeout = undefined
|
|
86
|
+
}
|
|
87
|
+
pendingEvents = []
|
|
88
|
+
baseDebouncer.cancel()
|
|
89
|
+
},
|
|
90
|
+
|
|
91
|
+
getPendingCount(): number {
|
|
92
|
+
return pendingEvents.length
|
|
93
|
+
},
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
export function deduplicateEvents(events: readonly FileChangeEvent[]): FileChangeEvent[] {
|
|
98
|
+
const latestByPath = new Map<string, FileChangeEvent>()
|
|
99
|
+
|
|
100
|
+
for (const event of events) {
|
|
101
|
+
const existing = latestByPath.get(event.path)
|
|
102
|
+
|
|
103
|
+
// Keep the most recent event for each path
|
|
104
|
+
if (existing === undefined || event.timestamp > existing.timestamp) {
|
|
105
|
+
latestByPath.set(event.path, event)
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return [...latestByPath.values()]
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
export function consolidateEvents(events: readonly FileChangeEvent[]): FileChangeEvent[] {
|
|
113
|
+
const byPath = new Map<string, FileChangeEvent[]>()
|
|
114
|
+
|
|
115
|
+
for (const event of events) {
|
|
116
|
+
const existing = byPath.get(event.path)
|
|
117
|
+
if (existing === undefined) {
|
|
118
|
+
byPath.set(event.path, [event])
|
|
119
|
+
} else {
|
|
120
|
+
existing.push(event)
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
const consolidated: FileChangeEvent[] = []
|
|
125
|
+
|
|
126
|
+
for (const [, pathEvents] of byPath) {
|
|
127
|
+
const firstEvent = pathEvents[0]
|
|
128
|
+
if (firstEvent === undefined) {
|
|
129
|
+
continue
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (pathEvents.length === 1) {
|
|
133
|
+
consolidated.push(firstEvent)
|
|
134
|
+
continue
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// Sort by timestamp to get event sequence
|
|
138
|
+
pathEvents.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime())
|
|
139
|
+
|
|
140
|
+
const first = pathEvents[0]
|
|
141
|
+
const last = pathEvents.at(-1)
|
|
142
|
+
|
|
143
|
+
if (first === undefined || last === undefined) {
|
|
144
|
+
continue
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// If file was added then removed, skip entirely
|
|
148
|
+
if (first.type === 'add' && last.type === 'unlink') {
|
|
149
|
+
continue
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// If file was removed then added, treat as change
|
|
153
|
+
if (first.type === 'unlink' && last.type === 'add') {
|
|
154
|
+
consolidated.push({
|
|
155
|
+
type: 'change',
|
|
156
|
+
path: last.path,
|
|
157
|
+
packageName: last.packageName,
|
|
158
|
+
timestamp: last.timestamp,
|
|
159
|
+
})
|
|
160
|
+
continue
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Otherwise use the latest event
|
|
164
|
+
consolidated.push(last)
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
return consolidated
|
|
168
|
+
}
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
import type {WatcherOptions} from '@bfra.me/es/watcher'
|
|
2
|
+
import type {FileChangeEvent} from '../types'
|
|
3
|
+
|
|
4
|
+
import path from 'node:path'
|
|
5
|
+
import process from 'node:process'
|
|
6
|
+
|
|
7
|
+
import {createFileWatcher as createBaseWatcher} from '@bfra.me/es/watcher'
|
|
8
|
+
|
|
9
|
+
export interface DocWatcherOptions {
|
|
10
|
+
readonly rootDir?: string
|
|
11
|
+
readonly debounceMs?: number
|
|
12
|
+
readonly additionalIgnore?: readonly string[]
|
|
13
|
+
readonly usePolling?: boolean
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const DEFAULT_WATCH_PATTERNS = [
|
|
17
|
+
'packages/*/README.md',
|
|
18
|
+
'packages/*/readme.md',
|
|
19
|
+
'packages/*/package.json',
|
|
20
|
+
'packages/*/src/**/*.ts',
|
|
21
|
+
'packages/*/src/**/*.tsx',
|
|
22
|
+
] as const
|
|
23
|
+
|
|
24
|
+
const DEFAULT_IGNORE_PATTERNS = [
|
|
25
|
+
'**/node_modules/**',
|
|
26
|
+
'**/lib/**',
|
|
27
|
+
'**/dist/**',
|
|
28
|
+
'**/.git/**',
|
|
29
|
+
'**/coverage/**',
|
|
30
|
+
'**/*.test.ts',
|
|
31
|
+
'**/*.spec.ts',
|
|
32
|
+
'**/__tests__/**',
|
|
33
|
+
'**/__mocks__/**',
|
|
34
|
+
] as const
|
|
35
|
+
|
|
36
|
+
export type DocChangeHandler = (events: readonly FileChangeEvent[]) => void | Promise<void>
|
|
37
|
+
|
|
38
|
+
export interface DocFileWatcher {
|
|
39
|
+
readonly start: () => Promise<void>
|
|
40
|
+
readonly close: () => Promise<void>
|
|
41
|
+
readonly onChanges: (handler: DocChangeHandler) => () => void
|
|
42
|
+
readonly getWatchedPaths: () => readonly string[]
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function extractPackageName(filePath: string, root: string): string | undefined {
|
|
46
|
+
const relativePath = path.relative(root, filePath)
|
|
47
|
+
const parts = relativePath.split(path.sep)
|
|
48
|
+
|
|
49
|
+
if (parts[0] === 'packages' && parts.length > 1) {
|
|
50
|
+
return parts[1]
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return undefined
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function createDocWatcher(options: DocWatcherOptions = {}): DocFileWatcher {
|
|
57
|
+
const {
|
|
58
|
+
rootDir = process.cwd(),
|
|
59
|
+
debounceMs = 300,
|
|
60
|
+
additionalIgnore = [],
|
|
61
|
+
usePolling = false,
|
|
62
|
+
} = options
|
|
63
|
+
|
|
64
|
+
const watchPaths = DEFAULT_WATCH_PATTERNS.map(pattern => path.join(rootDir, pattern))
|
|
65
|
+
const ignoredPatterns: string[] = [...DEFAULT_IGNORE_PATTERNS, ...additionalIgnore]
|
|
66
|
+
|
|
67
|
+
const watcherOptions: WatcherOptions = {
|
|
68
|
+
debounceMs,
|
|
69
|
+
ignored: ignoredPatterns,
|
|
70
|
+
usePolling,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const baseWatcher = createBaseWatcher(watchPaths, watcherOptions)
|
|
74
|
+
const handlers = new Set<DocChangeHandler>()
|
|
75
|
+
|
|
76
|
+
function transformToDocEvents(
|
|
77
|
+
changes: readonly {path: string; type: 'add' | 'change' | 'unlink'; timestamp: number}[],
|
|
78
|
+
): FileChangeEvent[] {
|
|
79
|
+
return changes.map(change => ({
|
|
80
|
+
type: change.type,
|
|
81
|
+
path: change.path,
|
|
82
|
+
packageName: extractPackageName(change.path, rootDir),
|
|
83
|
+
timestamp: new Date(change.timestamp),
|
|
84
|
+
}))
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return {
|
|
88
|
+
async start(): Promise<void> {
|
|
89
|
+
baseWatcher.on('change', event => {
|
|
90
|
+
const docEvents = transformToDocEvents(event.changes)
|
|
91
|
+
for (const handler of handlers) {
|
|
92
|
+
Promise.resolve(handler(docEvents)).catch(error => {
|
|
93
|
+
console.error('[doc-sync] Error in change handler:', error)
|
|
94
|
+
})
|
|
95
|
+
}
|
|
96
|
+
})
|
|
97
|
+
|
|
98
|
+
await baseWatcher.start()
|
|
99
|
+
},
|
|
100
|
+
|
|
101
|
+
async close(): Promise<void> {
|
|
102
|
+
await baseWatcher.close()
|
|
103
|
+
handlers.clear()
|
|
104
|
+
},
|
|
105
|
+
|
|
106
|
+
onChanges(handler: DocChangeHandler): () => void {
|
|
107
|
+
handlers.add(handler)
|
|
108
|
+
return () => {
|
|
109
|
+
handlers.delete(handler)
|
|
110
|
+
}
|
|
111
|
+
},
|
|
112
|
+
|
|
113
|
+
getWatchedPaths(): readonly string[] {
|
|
114
|
+
return watchPaths
|
|
115
|
+
},
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
export type FileCategory = 'readme' | 'source' | 'package-json' | 'unknown'
|
|
120
|
+
|
|
121
|
+
export function categorizeFile(filePath: string): FileCategory {
|
|
122
|
+
const basename = path.basename(filePath).toLowerCase()
|
|
123
|
+
|
|
124
|
+
if (basename === 'readme.md' || basename === 'readme') {
|
|
125
|
+
return 'readme'
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (basename === 'package.json') {
|
|
129
|
+
return 'package-json'
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const ext = path.extname(filePath).toLowerCase()
|
|
133
|
+
if (ext === '.ts' || ext === '.tsx') {
|
|
134
|
+
return 'source'
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return 'unknown'
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
export function groupChangesByPackage(
|
|
141
|
+
events: readonly FileChangeEvent[],
|
|
142
|
+
): Map<string, FileChangeEvent[]> {
|
|
143
|
+
const grouped = new Map<string, FileChangeEvent[]>()
|
|
144
|
+
|
|
145
|
+
for (const event of events) {
|
|
146
|
+
const pkg = event.packageName ?? '__unknown__'
|
|
147
|
+
const existing = grouped.get(pkg)
|
|
148
|
+
|
|
149
|
+
if (existing === undefined) {
|
|
150
|
+
grouped.set(pkg, [event])
|
|
151
|
+
} else {
|
|
152
|
+
existing.push(event)
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return grouped
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
export function filterDocumentationChanges(events: readonly FileChangeEvent[]): FileChangeEvent[] {
|
|
160
|
+
return events.filter(event => {
|
|
161
|
+
const category = categorizeFile(event.path)
|
|
162
|
+
return category !== 'unknown'
|
|
163
|
+
})
|
|
164
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export {
|
|
2
|
+
createDocChangeDetector,
|
|
3
|
+
determineRegenerationScope,
|
|
4
|
+
hasAnyFileChanged,
|
|
5
|
+
} from './change-detector'
|
|
6
|
+
export type {
|
|
7
|
+
DocChangeDetector,
|
|
8
|
+
DocChangeDetectorOptions,
|
|
9
|
+
PackageChangeAnalysis,
|
|
10
|
+
RegenerationScope,
|
|
11
|
+
} from './change-detector'
|
|
12
|
+
|
|
13
|
+
export {consolidateEvents, createDocDebouncer, deduplicateEvents} from './debouncer'
|
|
14
|
+
export type {BatchChangeHandler, DocDebouncer, DocDebouncerOptions} from './debouncer'
|
|
15
|
+
|
|
16
|
+
export {
|
|
17
|
+
categorizeFile,
|
|
18
|
+
createDocWatcher,
|
|
19
|
+
filterDocumentationChanges,
|
|
20
|
+
groupChangesByPackage,
|
|
21
|
+
} from './file-watcher'
|
|
22
|
+
export type {
|
|
23
|
+
DocChangeHandler,
|
|
24
|
+
DocFileWatcher,
|
|
25
|
+
DocWatcherOptions,
|
|
26
|
+
FileCategory,
|
|
27
|
+
} from './file-watcher'
|