@ncukondo/reference-manager 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -0
- package/dist/chunks/detector-DHztTaFY.js +619 -0
- package/dist/chunks/detector-DHztTaFY.js.map +1 -0
- package/dist/chunks/{detector-BF8Mcc72.js → loader-mQ25o6cV.js} +303 -664
- package/dist/chunks/loader-mQ25o6cV.js.map +1 -0
- package/dist/chunks/search-Be9vzUIH.js +29541 -0
- package/dist/chunks/search-Be9vzUIH.js.map +1 -0
- package/dist/cli/commands/add.d.ts +44 -16
- package/dist/cli/commands/add.d.ts.map +1 -1
- package/dist/cli/commands/cite.d.ts +49 -0
- package/dist/cli/commands/cite.d.ts.map +1 -0
- package/dist/cli/commands/fulltext.d.ts +101 -0
- package/dist/cli/commands/fulltext.d.ts.map +1 -0
- package/dist/cli/commands/index.d.ts +14 -10
- package/dist/cli/commands/index.d.ts.map +1 -1
- package/dist/cli/commands/list.d.ts +23 -6
- package/dist/cli/commands/list.d.ts.map +1 -1
- package/dist/cli/commands/remove.d.ts +47 -12
- package/dist/cli/commands/remove.d.ts.map +1 -1
- package/dist/cli/commands/search.d.ts +24 -7
- package/dist/cli/commands/search.d.ts.map +1 -1
- package/dist/cli/commands/update.d.ts +26 -13
- package/dist/cli/commands/update.d.ts.map +1 -1
- package/dist/cli/execution-context.d.ts +60 -0
- package/dist/cli/execution-context.d.ts.map +1 -0
- package/dist/cli/helpers.d.ts +18 -0
- package/dist/cli/helpers.d.ts.map +1 -1
- package/dist/cli/index.d.ts.map +1 -1
- package/dist/cli/server-client.d.ts +73 -10
- package/dist/cli/server-client.d.ts.map +1 -1
- package/dist/cli.js +1200 -528
- package/dist/cli.js.map +1 -1
- package/dist/config/csl-styles.d.ts +83 -0
- package/dist/config/csl-styles.d.ts.map +1 -0
- package/dist/config/defaults.d.ts +10 -0
- package/dist/config/defaults.d.ts.map +1 -1
- package/dist/config/loader.d.ts.map +1 -1
- package/dist/config/schema.d.ts +84 -0
- package/dist/config/schema.d.ts.map +1 -1
- package/dist/core/csl-json/types.d.ts +15 -3
- package/dist/core/csl-json/types.d.ts.map +1 -1
- package/dist/core/library.d.ts +60 -0
- package/dist/core/library.d.ts.map +1 -1
- package/dist/features/format/bibtex.d.ts +6 -0
- package/dist/features/format/bibtex.d.ts.map +1 -0
- package/dist/features/format/citation-csl.d.ts +41 -0
- package/dist/features/format/citation-csl.d.ts.map +1 -0
- package/dist/features/format/citation-fallback.d.ts +24 -0
- package/dist/features/format/citation-fallback.d.ts.map +1 -0
- package/dist/features/format/index.d.ts +10 -0
- package/dist/features/format/index.d.ts.map +1 -0
- package/dist/features/format/json.d.ts +6 -0
- package/dist/features/format/json.d.ts.map +1 -0
- package/dist/features/format/pretty.d.ts +6 -0
- package/dist/features/format/pretty.d.ts.map +1 -0
- package/dist/features/fulltext/filename.d.ts +17 -0
- package/dist/features/fulltext/filename.d.ts.map +1 -0
- package/dist/features/fulltext/index.d.ts +7 -0
- package/dist/features/fulltext/index.d.ts.map +1 -0
- package/dist/features/fulltext/manager.d.ts +109 -0
- package/dist/features/fulltext/manager.d.ts.map +1 -0
- package/dist/features/fulltext/types.d.ts +12 -0
- package/dist/features/fulltext/types.d.ts.map +1 -0
- package/dist/features/import/cache.d.ts +37 -0
- package/dist/features/import/cache.d.ts.map +1 -0
- package/dist/features/import/detector.d.ts +42 -0
- package/dist/features/import/detector.d.ts.map +1 -0
- package/dist/features/import/fetcher.d.ts +49 -0
- package/dist/features/import/fetcher.d.ts.map +1 -0
- package/dist/features/import/importer.d.ts +61 -0
- package/dist/features/import/importer.d.ts.map +1 -0
- package/dist/features/import/index.d.ts +8 -0
- package/dist/features/import/index.d.ts.map +1 -0
- package/dist/features/import/normalizer.d.ts +15 -0
- package/dist/features/import/normalizer.d.ts.map +1 -0
- package/dist/features/import/parser.d.ts +33 -0
- package/dist/features/import/parser.d.ts.map +1 -0
- package/dist/features/import/rate-limiter.d.ts +45 -0
- package/dist/features/import/rate-limiter.d.ts.map +1 -0
- package/dist/features/operations/add.d.ts +65 -0
- package/dist/features/operations/add.d.ts.map +1 -0
- package/dist/features/operations/cite.d.ts +48 -0
- package/dist/features/operations/cite.d.ts.map +1 -0
- package/dist/features/operations/list.d.ts +28 -0
- package/dist/features/operations/list.d.ts.map +1 -0
- package/dist/features/operations/remove.d.ts +29 -0
- package/dist/features/operations/remove.d.ts.map +1 -0
- package/dist/features/operations/search.d.ts +30 -0
- package/dist/features/operations/search.d.ts.map +1 -0
- package/dist/features/operations/update.d.ts +39 -0
- package/dist/features/operations/update.d.ts.map +1 -0
- package/dist/index.js +18 -16
- package/dist/index.js.map +1 -1
- package/dist/server/index.d.ts +3 -1
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/routes/add.d.ts +11 -0
- package/dist/server/routes/add.d.ts.map +1 -0
- package/dist/server/routes/cite.d.ts +9 -0
- package/dist/server/routes/cite.d.ts.map +1 -0
- package/dist/server/routes/list.d.ts +25 -0
- package/dist/server/routes/list.d.ts.map +1 -0
- package/dist/server/routes/references.d.ts.map +1 -1
- package/dist/server/routes/search.d.ts +26 -0
- package/dist/server/routes/search.d.ts.map +1 -0
- package/dist/server.js +215 -32
- package/dist/server.js.map +1 -1
- package/package.json +15 -4
- package/dist/chunks/detector-BF8Mcc72.js.map +0 -1
- package/dist/cli/output/bibtex.d.ts +0 -6
- package/dist/cli/output/bibtex.d.ts.map +0 -1
- package/dist/cli/output/index.d.ts +0 -7
- package/dist/cli/output/index.d.ts.map +0 -1
- package/dist/cli/output/json.d.ts +0 -6
- package/dist/cli/output/json.d.ts.map +0 -1
- package/dist/cli/output/pretty.d.ts +0 -6
- package/dist/cli/output/pretty.d.ts.map +0 -1
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sources":["../src/core/csl-json/validator.ts","../src/utils/logger.ts","../src/utils/file.ts","../src/utils/backup.ts","../src/features/merge/three-way.ts","../src/features/file-watcher/file-watcher.ts"],"sourcesContent":["import { CslItemSchema, CslLibrarySchema } from \"./types\";\nimport type { CslItem, CslLibrary } from \"./types\";\n\n/**\n * Validate CSL-JSON library structure\n * @param data - Data to validate (can be any type)\n * @returns Validated CSL-JSON library\n * @throws Error if validation fails\n */\nexport function validateCslJson(data: unknown): CslLibrary {\n const parseResult = CslLibrarySchema.safeParse(data);\n\n if (!parseResult.success) {\n throw new Error(`Invalid CSL-JSON structure: ${parseResult.error.message}`);\n }\n\n return parseResult.data;\n}\n\n/**\n * Validate a single CSL-JSON item\n * @param data - Data to validate (can be any type)\n * @returns Validation result with valid flag and errors\n */\nexport function validateCslItem(data: unknown): {\n valid: boolean;\n data?: CslItem;\n errors?: string[];\n} {\n const parseResult = CslItemSchema.safeParse(data);\n\n if (!parseResult.success) {\n return {\n valid: false,\n errors: parseResult.error.issues.map((issue) => issue.message),\n };\n }\n\n return {\n valid: true,\n data: parseResult.data,\n };\n}\n","export type LogLevel = \"silent\" | \"info\" | \"debug\";\n\nexport interface Logger {\n info(...args: unknown[]): void;\n debug(...args: unknown[]): void;\n error(...args: unknown[]): void;\n}\n\nexport function createLogger(level: LogLevel = \"info\"): Logger {\n const shouldLogInfo = level === \"info\" || level === \"debug\";\n const shouldLogDebug = level === \"debug\";\n\n function formatMessage(...args: unknown[]): string {\n return `${args.map((arg) => String(arg)).join(\" \")}\\n`;\n }\n\n return {\n info(...args: unknown[]): void {\n if (shouldLogInfo) {\n process.stderr.write(formatMessage(...args));\n }\n },\n\n debug(...args: unknown[]): void {\n if (shouldLogDebug) {\n process.stderr.write(formatMessage(...args));\n }\n },\n\n error(...args: unknown[]): void {\n process.stderr.write(formatMessage(...args));\n },\n };\n}\n","import { mkdir } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport writeFileAtomicLib from \"write-file-atomic\";\n\n/**\n * Write file atomically with parent directory creation\n */\nexport async function writeFileAtomic(filePath: string, content: string): Promise<void> {\n await ensureDirectoryExists(dirname(filePath));\n await writeFileAtomicLib(filePath, content, { encoding: \"utf-8\" });\n}\n\n/**\n * Ensure directory exists, creating it recursively if necessary\n */\nexport async function ensureDirectoryExists(dirPath: string): Promise<void> {\n await mkdir(dirPath, { recursive: true });\n}\n","import { existsSync } from \"node:fs\";\nimport { copyFile, readFile, readdir, stat, unlink } from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { ensureDirectoryExists } from \"./file\";\n\nexport interface BackupOptions {\n maxGenerations?: number;\n maxAgeMs?: number;\n}\n\nconst DEFAULT_MAX_GENERATIONS = 50;\nconst DEFAULT_MAX_AGE_MS = 365 * 24 * 60 * 60 * 1000; // 1 year\n\n/**\n * Get package name from package.json\n */\nasync function resolvePackageName(): Promise<string> {\n try {\n const currentFile = fileURLToPath(import.meta.url);\n let currentDir = dirname(currentFile);\n\n for (let i = 0; i < 10; i++) {\n const packageJsonPath = join(currentDir, \"package.json\");\n if (existsSync(packageJsonPath)) {\n const content = await readFile(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content);\n return pkg.name;\n }\n currentDir = dirname(currentDir);\n }\n } catch {\n // Fall back to hardcoded name if package.json is not found\n }\n\n return \"reference-manager\";\n}\n\nconst packageName = await resolvePackageName();\n\n/**\n * Get backup directory path for a library\n */\nexport function getBackupDirectory(libraryName: string): string {\n const pkgName = packageName ?? \"reference-manager\";\n return join(tmpdir(), pkgName, \"backups\", libraryName);\n}\n\n/**\n * Create a backup of the given file\n */\nexport async function createBackup(sourceFile: string, libraryName: string): Promise<string> {\n const backupDir = getBackupDirectory(libraryName);\n await ensureDirectoryExists(backupDir);\n\n const timestamp = Date.now();\n const backupFileName = `${timestamp}.backup`;\n const backupPath = join(backupDir, backupFileName);\n\n await copyFile(sourceFile, backupPath);\n\n return backupPath;\n}\n\n/**\n * List all backups for a library (sorted by modification time, newest first)\n */\nexport async function listBackups(libraryName: string): Promise<string[]> {\n const backupDir = getBackupDirectory(libraryName);\n\n if (!existsSync(backupDir)) {\n return [];\n }\n\n const files = await readdir(backupDir);\n const backupFiles = files.filter((f) => f.endsWith(\".backup\")).map((f) => join(backupDir, f));\n\n const filesWithStats = await Promise.all(\n backupFiles.map(async (file) => {\n const stats = await stat(file);\n return { file, mtime: stats.mtimeMs };\n })\n );\n\n filesWithStats.sort((a, b) => b.mtime - a.mtime);\n\n return filesWithStats.map((f) => f.file);\n}\n\n/**\n * Clean up old backups based on generation count and age\n */\nexport async function cleanupOldBackups(\n libraryName: string,\n options?: BackupOptions\n): Promise<void> {\n const maxGenerations = options?.maxGenerations ?? DEFAULT_MAX_GENERATIONS;\n const maxAgeMs = options?.maxAgeMs ?? DEFAULT_MAX_AGE_MS;\n\n const backups = await listBackups(libraryName);\n const now = Date.now();\n\n const backupsToDelete: string[] = [];\n\n for (let i = 0; i < backups.length; i++) {\n const backupPath = backups[i];\n if (!backupPath) continue;\n\n const stats = await stat(backupPath);\n const age = now - stats.mtimeMs;\n\n if (i >= maxGenerations || age > maxAgeMs) {\n backupsToDelete.push(backupPath);\n }\n }\n\n await Promise.all(backupsToDelete.map((backup) => unlink(backup)));\n}\n","/**\n * 3-way merge implementation with Last-Write-Wins (LWW) strategy\n */\n\nimport type { CslItem } from \"../../core/csl-json/types.js\";\nimport type {\n FieldConflict,\n ItemConflict,\n MergeOptions,\n MergeResult,\n MergeStatus,\n} from \"./types.js\";\n\n/**\n * Get UUID from item, with fallback to id if uuid is missing\n */\nfunction getItemUuid(item: CslItem): string {\n return item.custom?.uuid || item.id || \"unknown\";\n}\n\n/**\n * Get timestamp from item, with fallback to created_at\n */\nfunction getTimestamp(item: CslItem): string {\n return item.custom?.timestamp || item.custom?.created_at || \"1970-01-01T00:00:00.000Z\";\n}\n\n/**\n * Deep equality check for field values\n */\nfunction deepEqual(a: unknown, b: unknown): boolean {\n return JSON.stringify(a) === JSON.stringify(b);\n}\n\n/**\n * Resolve a field conflict using LWW or prefer option\n */\nfunction resolveFieldConflict(\n localValue: unknown,\n remoteValue: unknown,\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): unknown {\n if (localTimestamp > remoteTimestamp) {\n return localValue;\n }\n if (remoteTimestamp > localTimestamp) {\n return remoteValue;\n }\n // Timestamps equal: use prefer option or default to local\n if (options?.prefer === \"remote\") {\n return remoteValue;\n }\n return localValue;\n}\n\n/**\n * Determine conflict resolution type\n */\nfunction determineResolution(\n fieldConflicts: FieldConflict[],\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): ItemConflict[\"resolution\"] {\n const hasRealConflicts = fieldConflicts.every((fc) => fc.resolved !== undefined);\n const localIsNewer = fieldConflicts.some(\n (fc) => fc.local !== fc.remote && localTimestamp > remoteTimestamp\n );\n const remoteIsNewer = fieldConflicts.some(\n (fc) => fc.local !== fc.remote && remoteTimestamp > localTimestamp\n );\n\n if (hasRealConflicts && localIsNewer) return \"auto-lww\";\n if (hasRealConflicts && remoteIsNewer) return \"auto-lww\";\n if (options?.prefer === \"local\") return \"prefer-local\";\n if (options?.prefer === \"remote\") return \"prefer-remote\";\n return \"unresolved\";\n}\n\n/**\n * Merge a single field from base, local, and remote versions\n */\nfunction mergeField(\n key: string,\n baseValue: unknown,\n localValue: unknown,\n remoteValue: unknown,\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): { value: unknown; conflict: FieldConflict | null } {\n const localChanged = !deepEqual(baseValue, localValue);\n const remoteChanged = !deepEqual(baseValue, remoteValue);\n\n if (!localChanged && !remoteChanged) {\n return { value: baseValue, conflict: null };\n }\n\n if (localChanged && !remoteChanged) {\n return { value: localValue, conflict: null };\n }\n\n if (!localChanged && remoteChanged) {\n return { value: remoteValue, conflict: null };\n }\n\n // Both changed\n if (deepEqual(localValue, remoteValue)) {\n return { value: localValue, conflict: null };\n }\n\n // Both changed to different values\n const resolved = resolveFieldConflict(\n localValue,\n remoteValue,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n // Don't record conflicts for 'custom' metadata field\n if (key === \"custom\") {\n return { value: resolved, conflict: null };\n }\n\n return {\n value: resolved,\n conflict: {\n field: key,\n base: baseValue,\n local: localValue,\n remote: remoteValue,\n resolved,\n },\n };\n}\n\n/**\n * Merge a single item from base, local, and remote versions\n */\nfunction mergeItem(\n base: CslItem,\n local: CslItem,\n remote: CslItem,\n options?: MergeOptions\n): { merged: CslItem; conflict: ItemConflict | null } {\n const uuid = getItemUuid(base);\n const localTimestamp = getTimestamp(local);\n const remoteTimestamp = getTimestamp(remote);\n\n const merged: CslItem = { ...base };\n const fieldConflicts: FieldConflict[] = [];\n\n // Get all unique keys from all three versions\n const allKeys = new Set<string>([\n ...Object.keys(base),\n ...Object.keys(local),\n ...Object.keys(remote),\n ]);\n\n for (const key of allKeys) {\n const baseValue = (base as Record<string, unknown>)[key];\n const localValue = (local as Record<string, unknown>)[key];\n const remoteValue = (remote as Record<string, unknown>)[key];\n\n const { value, conflict } = mergeField(\n key,\n baseValue,\n localValue,\n remoteValue,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n (merged as Record<string, unknown>)[key] = value;\n\n if (conflict) {\n fieldConflicts.push(conflict);\n }\n }\n\n // If there are field conflicts, create ItemConflict\n if (fieldConflicts.length > 0) {\n const resolution = determineResolution(\n fieldConflicts,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n return {\n merged,\n conflict: {\n uuid,\n id: base.id || \"unknown\",\n fields: fieldConflicts,\n localTimestamp,\n remoteTimestamp,\n resolution,\n },\n };\n }\n\n return { merged, conflict: null };\n}\n\n/**\n * Build UUID-indexed maps from item arrays\n */\nfunction buildItemMaps(base: CslItem[], local: CslItem[], remote: CslItem[]) {\n const baseMap = new Map<string, CslItem>();\n const localMap = new Map<string, CslItem>();\n const remoteMap = new Map<string, CslItem>();\n\n for (const item of base) {\n baseMap.set(getItemUuid(item), item);\n }\n for (const item of local) {\n localMap.set(getItemUuid(item), item);\n }\n for (const item of remote) {\n remoteMap.set(getItemUuid(item), item);\n }\n\n return { baseMap, localMap, remoteMap };\n}\n\n/**\n * Handle items that exist in all three versions\n */\nfunction mergeExistingItem(\n baseItem: CslItem,\n localItem: CslItem,\n remoteItem: CslItem,\n options: MergeOptions | undefined,\n merged: CslItem[],\n conflicts: ItemConflict[]\n): void {\n const { merged: mergedItem, conflict } = mergeItem(baseItem, localItem, remoteItem, options);\n merged.push(mergedItem);\n if (conflict) {\n conflicts.push(conflict);\n }\n}\n\n/**\n * Handle items added in both local and remote\n */\nfunction handleDualAddition(\n uuid: string,\n localItem: CslItem,\n remoteItem: CslItem,\n options: MergeOptions | undefined,\n merged: CslItem[],\n conflicts: ItemConflict[]\n): void {\n if (deepEqual(localItem, remoteItem)) {\n merged.push(localItem);\n } else {\n const syntheticBase: CslItem = {\n id: uuid,\n type: \"article\",\n custom: {\n uuid,\n created_at: \"1970-01-01T00:00:00.000Z\",\n timestamp: \"1970-01-01T00:00:00.000Z\",\n },\n };\n const { merged: mergedItem, conflict } = mergeItem(\n syntheticBase,\n localItem,\n remoteItem,\n options\n );\n merged.push(mergedItem);\n if (conflict) {\n conflicts.push(conflict);\n }\n }\n}\n\n/**\n * Process a single UUID across all three versions\n */\nfunction processItem(\n uuid: string,\n baseMap: Map<string, CslItem>,\n localMap: Map<string, CslItem>,\n remoteMap: Map<string, CslItem>,\n options: MergeOptions | undefined,\n result: {\n merged: CslItem[];\n conflicts: ItemConflict[];\n localOnly: CslItem[];\n remoteOnly: CslItem[];\n deletedInLocal: CslItem[];\n deletedInRemote: CslItem[];\n }\n): void {\n const baseItem = baseMap.get(uuid);\n const localItem = localMap.get(uuid);\n const remoteItem = remoteMap.get(uuid);\n\n if (baseItem && localItem && remoteItem) {\n mergeExistingItem(baseItem, localItem, remoteItem, options, result.merged, result.conflicts);\n } else if (!baseItem && localItem && remoteItem) {\n handleDualAddition(uuid, localItem, remoteItem, options, result.merged, result.conflicts);\n } else if (!baseItem && localItem && !remoteItem) {\n result.merged.push(localItem);\n result.localOnly.push(localItem);\n } else if (!baseItem && !localItem && remoteItem) {\n result.merged.push(remoteItem);\n result.remoteOnly.push(remoteItem);\n } else if (baseItem && !localItem && remoteItem) {\n result.deletedInLocal.push(baseItem);\n } else if (baseItem && localItem && !remoteItem) {\n result.deletedInRemote.push(baseItem);\n } else if (baseItem && !localItem && !remoteItem) {\n result.deletedInLocal.push(baseItem);\n result.deletedInRemote.push(baseItem);\n }\n}\n\n/**\n * Performs a 3-way merge of CSL-JSON items\n *\n * @param base - Base version (common ancestor)\n * @param local - Local version (current working copy)\n * @param remote - Remote version (incoming changes)\n * @param options - Merge options (e.g., prefer local/remote for tie-breaking)\n * @returns Merge result with merged items and conflict information\n */\nexport function threeWayMerge(\n base: CslItem[],\n local: CslItem[],\n remote: CslItem[],\n options?: MergeOptions\n): MergeResult {\n const { baseMap, localMap, remoteMap } = buildItemMaps(base, local, remote);\n\n const result = {\n merged: [] as CslItem[],\n conflicts: [] as ItemConflict[],\n localOnly: [] as CslItem[],\n remoteOnly: [] as CslItem[],\n deletedInLocal: [] as CslItem[],\n deletedInRemote: [] as CslItem[],\n };\n\n const allUuids = new Set<string>([...baseMap.keys(), ...localMap.keys(), ...remoteMap.keys()]);\n\n for (const uuid of allUuids) {\n processItem(uuid, baseMap, localMap, remoteMap, options, result);\n }\n\n // Determine overall status\n let status: MergeStatus = \"success\";\n if (result.conflicts.length > 0) {\n const hasUnresolved = result.conflicts.some((c) => c.resolution === \"unresolved\");\n status = hasUnresolved ? \"conflict\" : \"auto-resolved\";\n }\n\n return {\n status,\n ...result,\n };\n}\n","import { EventEmitter } from \"node:events\";\nimport * as fs from \"node:fs/promises\";\nimport * as path from \"node:path\";\nimport chokidar, { type FSWatcher } from \"chokidar\";\n\n/**\n * Options for FileWatcher\n */\nexport interface FileWatcherOptions {\n /** Debounce time in milliseconds (default: 500) */\n debounceMs?: number;\n /** Poll interval in milliseconds for polling mode (default: 5000) */\n pollIntervalMs?: number;\n /** Use polling instead of native file system events */\n usePolling?: boolean;\n /** Retry delay in milliseconds for JSON parse (default: 200) */\n retryDelayMs?: number;\n /** Maximum number of retries for JSON parse (default: 10) */\n maxRetries?: number;\n}\n\n// Default values from spec\nconst DEFAULT_DEBOUNCE_MS = 500;\nconst DEFAULT_POLL_INTERVAL_MS = 5000;\nconst DEFAULT_RETRY_DELAY_MS = 200;\nconst DEFAULT_MAX_RETRIES = 10;\n\n/**\n * Check if a file should be ignored based on spec patterns\n * Ignored patterns:\n * - *.tmp\n * - *.bak\n * - *.conflict.*\n * - *.lock\n * - editor swap files (.swp, ~)\n */\nfunction shouldIgnore(filePath: string): boolean {\n const basename = path.basename(filePath);\n\n // *.tmp files\n if (basename.endsWith(\".tmp\")) return true;\n\n // *.bak files\n if (basename.endsWith(\".bak\")) return true;\n\n // *.conflict.* files (contains .conflict. in name)\n if (basename.includes(\".conflict.\")) return true;\n\n // *.lock files\n if (basename.endsWith(\".lock\")) return true;\n\n // Vim swap files (.*.swp)\n if (basename.startsWith(\".\") && basename.endsWith(\".swp\")) return true;\n\n // Editor backup files (*~)\n if (basename.endsWith(\"~\")) return true;\n\n return false;\n}\n\n/**\n * FileWatcher watches a file or directory for changes and emits events.\n *\n * Events:\n * - 'change': Emitted when a watched file changes (after debounce)\n * - 'error': Emitted when a watch error occurs\n * - 'ready': Emitted when watching has started\n * - 'parsed': Emitted when JSON file is successfully parsed\n * - 'parseError': Emitted when JSON parse fails after all retries\n */\nexport class FileWatcher extends EventEmitter {\n private readonly watchPath: string;\n private readonly debounceMs: number;\n private readonly pollIntervalMs: number;\n private readonly usePolling: boolean;\n private readonly retryDelayMs: number;\n private readonly maxRetries: number;\n\n private watcher: FSWatcher | null = null;\n private watching = false;\n private debounceTimers: Map<string, NodeJS.Timeout> = new Map();\n\n constructor(watchPath: string, options?: FileWatcherOptions) {\n super();\n this.watchPath = watchPath;\n this.debounceMs = options?.debounceMs ?? DEFAULT_DEBOUNCE_MS;\n this.pollIntervalMs = options?.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;\n this.usePolling = options?.usePolling ?? false;\n this.retryDelayMs = options?.retryDelayMs ?? DEFAULT_RETRY_DELAY_MS;\n this.maxRetries = options?.maxRetries ?? DEFAULT_MAX_RETRIES;\n }\n\n /**\n * Start watching for file changes\n */\n async start(): Promise<void> {\n if (this.watching) {\n return;\n }\n\n return new Promise((resolve, reject) => {\n this.watcher = chokidar.watch(this.watchPath, {\n ignored: shouldIgnore,\n persistent: true,\n usePolling: this.usePolling,\n interval: this.pollIntervalMs,\n ignoreInitial: true,\n awaitWriteFinish: false,\n });\n\n this.watcher.on(\"ready\", () => {\n this.watching = true;\n this.emit(\"ready\");\n resolve();\n });\n\n this.watcher.on(\"error\", (error: unknown) => {\n this.emit(\"error\", error);\n if (!this.watching) {\n reject(error);\n }\n });\n\n this.watcher.on(\"change\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n\n this.watcher.on(\"add\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n });\n }\n\n /**\n * Handle file change with debouncing\n */\n private handleFileChange(filePath: string): void {\n // Clear existing timer for this file\n const existingTimer = this.debounceTimers.get(filePath);\n if (existingTimer) {\n clearTimeout(existingTimer);\n }\n\n // Set new debounced timer\n const timer = setTimeout(() => {\n this.debounceTimers.delete(filePath);\n this.emit(\"change\", filePath);\n this.tryParseJsonFile(filePath);\n }, this.debounceMs);\n\n this.debounceTimers.set(filePath, timer);\n }\n\n /**\n * Try to parse JSON file with retries\n */\n private async tryParseJsonFile(filePath: string): Promise<void> {\n // Only parse .json files\n if (path.extname(filePath).toLowerCase() !== \".json\") {\n return;\n }\n\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt <= this.maxRetries; attempt++) {\n try {\n const content = await fs.readFile(filePath, \"utf-8\");\n const parsed = JSON.parse(content);\n this.emit(\"parsed\", filePath, parsed);\n return;\n } catch (error) {\n lastError = error as Error;\n if (attempt < this.maxRetries) {\n await this.delay(this.retryDelayMs);\n }\n }\n }\n\n this.emit(\"parseError\", filePath, lastError);\n }\n\n /**\n * Delay helper\n */\n private delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n\n /**\n * Stop watching for file changes\n */\n close(): void {\n if (this.watcher) {\n this.watcher.close();\n this.watcher = null;\n }\n\n // Clear all debounce timers\n for (const timer of this.debounceTimers.values()) {\n clearTimeout(timer);\n }\n this.debounceTimers.clear();\n\n this.watching = false;\n }\n\n /**\n * Get the watched path\n */\n getPath(): string {\n return this.watchPath;\n }\n\n /**\n * Check if the watcher is currently active\n */\n isWatching(): boolean {\n return this.watching;\n }\n\n /**\n * Get the debounce time in milliseconds\n */\n getDebounceMs(): number {\n return this.debounceMs;\n }\n\n /**\n * Get the poll interval in milliseconds\n */\n getPollIntervalMs(): number {\n return this.pollIntervalMs;\n }\n\n /**\n * Get the retry delay in milliseconds\n */\n getRetryDelayMs(): number {\n return this.retryDelayMs;\n }\n\n /**\n * Get the maximum number of retries\n */\n getMaxRetries(): number {\n return this.maxRetries;\n }\n}\n"],"names":[],"mappings":";;;;;;;;;;;;AASO,SAAS,gBAAgB,MAA2B;AACzD,QAAM,cAAc,iBAAiB,UAAU,IAAI;AAEnD,MAAI,CAAC,YAAY,SAAS;AACxB,UAAM,IAAI,MAAM,+BAA+B,YAAY,MAAM,OAAO,EAAE;AAAA,EAC5E;AAEA,SAAO,YAAY;AACrB;ACTO,SAAS,aAAa,QAAkB,QAAgB;AAC7D,QAAM,gBAAgB,UAAU,UAAU,UAAU;AACpD,QAAM,iBAAiB,UAAU;AAEjC,WAAS,iBAAiB,MAAyB;AACjD,WAAO,GAAG,KAAK,IAAI,CAAC,QAAQ,OAAO,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,EACpD;AAEA,SAAO;AAAA,IACL,QAAQ,MAAuB;AAC7B,UAAI,eAAe;AACjB,gBAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IAEA,SAAS,MAAuB;AAC9B,UAAI,gBAAgB;AAClB,gBAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IAEA,SAAS,MAAuB;AAC9B,cAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,IAC7C;AAAA,EAAA;AAEJ;AC1BA,eAAsB,gBAAgB,UAAkB,SAAgC;AACtF,QAAM,sBAAsB,QAAQ,QAAQ,CAAC;AAC7C,QAAM,mBAAmB,UAAU,SAAS,EAAE,UAAU,SAAS;AACnE;AAKA,eAAsB,sBAAsB,SAAgC;AAC1E,QAAM,MAAM,SAAS,EAAE,WAAW,MAAM;AAC1C;ACLA,MAAM,0BAA0B;AAChC,MAAM,qBAAqB,MAAM,KAAK,KAAK,KAAK;AAKhD,eAAe,qBAAsC;AACnD,MAAI;AACF,UAAM,cAAc,cAAc,YAAY,GAAG;AACjD,QAAI,aAAa,QAAQ,WAAW;AAEpC,aAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AAC3B,YAAM,kBAAkB,KAAK,YAAY,cAAc;AACvD,UAAI,WAAW,eAAe,GAAG;AAC/B,cAAM,UAAU,MAAM,SAAS,iBAAiB,OAAO;AACvD,cAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,eAAO,IAAI;AAAA,MACb;AACA,mBAAa,QAAQ,UAAU;AAAA,IACjC;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEA,MAAM,cAAc,MAAM,mBAAA;AAKnB,SAAS,mBAAmB,aAA6B;AAC9D,QAAM,UAAU,eAAe;AAC/B,SAAO,KAAK,OAAA,GAAU,SAAS,WAAW,WAAW;AACvD;AAKA,eAAsB,aAAa,YAAoB,aAAsC;AAC3F,QAAM,YAAY,mBAAmB,WAAW;AAChD,QAAM,sBAAsB,SAAS;AAErC,QAAM,YAAY,KAAK,IAAA;AACvB,QAAM,iBAAiB,GAAG,SAAS;AACnC,QAAM,aAAa,KAAK,WAAW,cAAc;AAEjD,QAAM,SAAS,YAAY,UAAU;AAErC,SAAO;AACT;AAKA,eAAsB,YAAY,aAAwC;AACxE,QAAM,YAAY,mBAAmB,WAAW;AAEhD,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO,CAAA;AAAA,EACT;AAEA,QAAM,QAAQ,MAAM,QAAQ,SAAS;AACrC,QAAM,cAAc,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,WAAW,CAAC,CAAC;AAE5F,QAAM,iBAAiB,MAAM,QAAQ;AAAA,IACnC,YAAY,IAAI,OAAO,SAAS;AAC9B,YAAM,QAAQ,MAAM,KAAK,IAAI;AAC7B,aAAO,EAAE,MAAM,OAAO,MAAM,QAAA;AAAA,IAC9B,CAAC;AAAA,EAAA;AAGH,iBAAe,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAE/C,SAAO,eAAe,IAAI,CAAC,MAAM,EAAE,IAAI;AACzC;AAKA,eAAsB,kBACpB,aACA,SACe;AACf,QAAM,iBAAiB,SAAS,kBAAkB;AAClD,QAAM,WAAW,SAAS,YAAY;AAEtC,QAAM,UAAU,MAAM,YAAY,WAAW;AAC7C,QAAM,MAAM,KAAK,IAAA;AAEjB,QAAM,kBAA4B,CAAA;AAElC,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,aAAa,QAAQ,CAAC;AAC5B,QAAI,CAAC,WAAY;AAEjB,UAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,UAAM,MAAM,MAAM,MAAM;AAExB,QAAI,KAAK,kBAAkB,MAAM,UAAU;AACzC,sBAAgB,KAAK,UAAU;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,gBAAgB,IAAI,CAAC,WAAW,OAAO,MAAM,CAAC,CAAC;AACnE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtGA,SAAS,YAAY,MAAuB;AAC1C,SAAO,KAAK,QAAQ,QAAQ,KAAK,MAAM;AACzC;AAKA,SAAS,aAAa,MAAuB;AAC3C,SAAO,KAAK,QAAQ,aAAa,KAAK,QAAQ,cAAc;AAC9D;AAKA,SAAS,UAAU,GAAY,GAAqB;AAClD,SAAO,KAAK,UAAU,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C;AAKA,SAAS,qBACP,YACA,aACA,gBACA,iBACA,SACS;AACT,MAAI,iBAAiB,iBAAiB;AACpC,WAAO;AAAA,EACT;AACA,MAAI,kBAAkB,gBAAgB;AACpC,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,WAAW,UAAU;AAChC,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAKA,SAAS,oBACP,gBACA,gBACA,iBACA,SAC4B;AAC5B,QAAM,mBAAmB,eAAe,MAAM,CAAC,OAAO,GAAG,aAAa,MAAS;AAC/E,QAAM,eAAe,eAAe;AAAA,IAClC,CAAC,OAAO,GAAG,UAAU,GAAG,UAAU,iBAAiB;AAAA,EAAA;AAErD,QAAM,gBAAgB,eAAe;AAAA,IACnC,CAAC,OAAO,GAAG,UAAU,GAAG,UAAU,kBAAkB;AAAA,EAAA;AAGtD,MAAI,oBAAoB,aAAc,QAAO;AAC7C,MAAI,oBAAoB,cAAe,QAAO;AAC9C,MAAI,SAAS,WAAW,QAAS,QAAO;AACxC,MAAI,SAAS,WAAW,SAAU,QAAO;AACzC,SAAO;AACT;AAKA,SAAS,WACP,KACA,WACA,YACA,aACA,gBACA,iBACA,SACoD;AACpD,QAAM,eAAe,CAAC,UAAU,WAAW,UAAU;AACrD,QAAM,gBAAgB,CAAC,UAAU,WAAW,WAAW;AAEvD,MAAI,CAAC,gBAAgB,CAAC,eAAe;AACnC,WAAO,EAAE,OAAO,WAAW,UAAU,KAAA;AAAA,EACvC;AAEA,MAAI,gBAAgB,CAAC,eAAe;AAClC,WAAO,EAAE,OAAO,YAAY,UAAU,KAAA;AAAA,EACxC;AAEA,MAAI,CAAC,gBAAgB,eAAe;AAClC,WAAO,EAAE,OAAO,aAAa,UAAU,KAAA;AAAA,EACzC;AAGA,MAAI,UAAU,YAAY,WAAW,GAAG;AACtC,WAAO,EAAE,OAAO,YAAY,UAAU,KAAA;AAAA,EACxC;AAGA,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAIF,MAAI,QAAQ,UAAU;AACpB,WAAO,EAAE,OAAO,UAAU,UAAU,KAAA;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,OAAO;AAAA,IACP,UAAU;AAAA,MACR,OAAO;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,QAAQ;AAAA,MACR;AAAA,IAAA;AAAA,EACF;AAEJ;AAKA,SAAS,UACP,MACA,OACA,QACA,SACoD;AACpD,QAAM,OAAO,YAAY,IAAI;AAC7B,QAAM,iBAAiB,aAAa,KAAK;AACzC,QAAM,kBAAkB,aAAa,MAAM;AAE3C,QAAM,SAAkB,EAAE,GAAG,KAAA;AAC7B,QAAM,iBAAkC,CAAA;AAGxC,QAAM,8BAAc,IAAY;AAAA,IAC9B,GAAG,OAAO,KAAK,IAAI;AAAA,IACnB,GAAG,OAAO,KAAK,KAAK;AAAA,IACpB,GAAG,OAAO,KAAK,MAAM;AAAA,EAAA,CACtB;AAED,aAAW,OAAO,SAAS;AACzB,UAAM,YAAa,KAAiC,GAAG;AACvD,UAAM,aAAc,MAAkC,GAAG;AACzD,UAAM,cAAe,OAAmC,GAAG;AAE3D,UAAM,EAAE,OAAO,SAAA,IAAa;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAGD,WAAmC,GAAG,IAAI;AAE3C,QAAI,UAAU;AACZ,qBAAe,KAAK,QAAQ;AAAA,IAC9B;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAGF,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,QACR;AAAA,QACA,IAAI,KAAK,MAAM;AAAA,QACf,QAAQ;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AAEA,SAAO,EAAE,QAAQ,UAAU,KAAA;AAC7B;AAKA,SAAS,cAAc,MAAiB,OAAkB,QAAmB;AAC3E,QAAM,8BAAc,IAAA;AACpB,QAAM,+BAAe,IAAA;AACrB,QAAM,gCAAgB,IAAA;AAEtB,aAAW,QAAQ,MAAM;AACvB,YAAQ,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACrC;AACA,aAAW,QAAQ,OAAO;AACxB,aAAS,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACtC;AACA,aAAW,QAAQ,QAAQ;AACzB,cAAU,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACvC;AAEA,SAAO,EAAE,SAAS,UAAU,UAAA;AAC9B;AAKA,SAAS,kBACP,UACA,WACA,YACA,SACA,QACA,WACM;AACN,QAAM,EAAE,QAAQ,YAAY,SAAA,IAAa,UAAU,UAAU,WAAW,YAAY,OAAO;AAC3F,SAAO,KAAK,UAAU;AACtB,MAAI,UAAU;AACZ,cAAU,KAAK,QAAQ;AAAA,EACzB;AACF;AAKA,SAAS,mBACP,MACA,WACA,YACA,SACA,QACA,WACM;AACN,MAAI,UAAU,WAAW,UAAU,GAAG;AACpC,WAAO,KAAK,SAAS;AAAA,EACvB,OAAO;AACL,UAAM,gBAAyB;AAAA,MAC7B,IAAI;AAAA,MACJ,MAAM;AAAA,MACN,QAAQ;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,MAAA;AAAA,IACb;AAEF,UAAM,EAAE,QAAQ,YAAY,SAAA,IAAa;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAEF,WAAO,KAAK,UAAU;AACtB,QAAI,UAAU;AACZ,gBAAU,KAAK,QAAQ;AAAA,IACzB;AAAA,EACF;AACF;AAKA,SAAS,YACP,MACA,SACA,UACA,WACA,SACA,QAQM;AACN,QAAM,WAAW,QAAQ,IAAI,IAAI;AACjC,QAAM,YAAY,SAAS,IAAI,IAAI;AACnC,QAAM,aAAa,UAAU,IAAI,IAAI;AAErC,MAAI,YAAY,aAAa,YAAY;AACvC,sBAAkB,UAAU,WAAW,YAAY,SAAS,OAAO,QAAQ,OAAO,SAAS;AAAA,EAC7F,WAAW,CAAC,YAAY,aAAa,YAAY;AAC/C,uBAAmB,MAAM,WAAW,YAAY,SAAS,OAAO,QAAQ,OAAO,SAAS;AAAA,EAC1F,WAAW,CAAC,YAAY,aAAa,CAAC,YAAY;AAChD,WAAO,OAAO,KAAK,SAAS;AAC5B,WAAO,UAAU,KAAK,SAAS;AAAA,EACjC,WAAW,CAAC,YAAY,CAAC,aAAa,YAAY;AAChD,WAAO,OAAO,KAAK,UAAU;AAC7B,WAAO,WAAW,KAAK,UAAU;AAAA,EACnC,WAAW,YAAY,CAAC,aAAa,YAAY;AAC/C,WAAO,eAAe,KAAK,QAAQ;AAAA,EACrC,WAAW,YAAY,aAAa,CAAC,YAAY;AAC/C,WAAO,gBAAgB,KAAK,QAAQ;AAAA,EACtC,WAAW,YAAY,CAAC,aAAa,CAAC,YAAY;AAChD,WAAO,eAAe,KAAK,QAAQ;AACnC,WAAO,gBAAgB,KAAK,QAAQ;AAAA,EACtC;AACF;AAWO,SAAS,cACd,MACA,OACA,QACA,SACa;AACb,QAAM,EAAE,SAAS,UAAU,UAAA,IAAc,cAAc,MAAM,OAAO,MAAM;AAE1E,QAAM,SAAS;AAAA,IACb,QAAQ,CAAA;AAAA,IACR,WAAW,CAAA;AAAA,IACX,WAAW,CAAA;AAAA,IACX,YAAY,CAAA;AAAA,IACZ,gBAAgB,CAAA;AAAA,IAChB,iBAAiB,CAAA;AAAA,EAAC;AAGpB,QAAM,WAAW,oBAAI,IAAY,CAAC,GAAG,QAAQ,KAAA,GAAQ,GAAG,SAAS,QAAQ,GAAG,UAAU,KAAA,CAAM,CAAC;AAE7F,aAAW,QAAQ,UAAU;AAC3B,gBAAY,MAAM,SAAS,UAAU,WAAW,SAAS,MAAM;AAAA,EACjE;AAGA,MAAI,SAAsB;AAC1B,MAAI,OAAO,UAAU,SAAS,GAAG;AAC/B,UAAM,gBAAgB,OAAO,UAAU,KAAK,CAAC,MAAM,EAAE,eAAe,YAAY;AAChF,aAAS,gBAAgB,aAAa;AAAA,EACxC;AAEA,SAAO;AAAA,IACL;AAAA,IACA,GAAG;AAAA,EAAA;AAEP;;;;;AC3VA,MAAM,sBAAsB;AAC5B,MAAM,2BAA2B;AACjC,MAAM,yBAAyB;AAC/B,MAAM,sBAAsB;AAW5B,SAAS,aAAa,UAA2B;AAC/C,QAAM,WAAW,KAAK,SAAS,QAAQ;AAGvC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,YAAY,EAAG,QAAO;AAG5C,MAAI,SAAS,SAAS,OAAO,EAAG,QAAO;AAGvC,MAAI,SAAS,WAAW,GAAG,KAAK,SAAS,SAAS,MAAM,EAAG,QAAO;AAGlE,MAAI,SAAS,SAAS,GAAG,EAAG,QAAO;AAEnC,SAAO;AACT;AAYO,MAAM,oBAAoB,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAA4B;AAAA,EAC5B,WAAW;AAAA,EACX,qCAAkD,IAAA;AAAA,EAE1D,YAAY,WAAmB,SAA8B;AAC3D,UAAA;AACA,SAAK,YAAY;AACjB,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,iBAAiB,SAAS,kBAAkB;AACjD,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,eAAe,SAAS,gBAAgB;AAC7C,SAAK,aAAa,SAAS,cAAc;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI,KAAK,UAAU;AACjB;AAAA,IACF;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,UAAU,SAAS,MAAM,KAAK,WAAW;AAAA,QAC5C,SAAS;AAAA,QACT,YAAY;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,eAAe;AAAA,QACf,kBAAkB;AAAA,MAAA,CACnB;AAED,WAAK,QAAQ,GAAG,SAAS,MAAM;AAC7B,aAAK,WAAW;AAChB,aAAK,KAAK,OAAO;AACjB,gBAAA;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,SAAS,CAAC,UAAmB;AAC3C,aAAK,KAAK,SAAS,KAAK;AACxB,YAAI,CAAC,KAAK,UAAU;AAClB,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,UAAU,CAAC,aAAqB;AAC9C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAED,WAAK,QAAQ,GAAG,OAAO,CAAC,aAAqB;AAC3C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAwB;AAE/C,UAAM,gBAAgB,KAAK,eAAe,IAAI,QAAQ;AACtD,QAAI,eAAe;AACjB,mBAAa,aAAa;AAAA,IAC5B;AAGA,UAAM,QAAQ,WAAW,MAAM;AAC7B,WAAK,eAAe,OAAO,QAAQ;AACnC,WAAK,KAAK,UAAU,QAAQ;AAC5B,WAAK,iBAAiB,QAAQ;AAAA,IAChC,GAAG,KAAK,UAAU;AAElB,SAAK,eAAe,IAAI,UAAU,KAAK;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAiB,UAAiC;AAE9D,QAAI,KAAK,QAAQ,QAAQ,EAAE,YAAA,MAAkB,SAAS;AACpD;AAAA,IACF;AAEA,QAAI,YAA0B;AAE9B,aAAS,UAAU,GAAG,WAAW,KAAK,YAAY,WAAW;AAC3D,UAAI;AACF,cAAM,UAAU,MAAM,GAAG,SAAS,UAAU,OAAO;AACnD,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,aAAK,KAAK,UAAU,UAAU,MAAM;AACpC;AAAA,MACF,SAAS,OAAO;AACd,oBAAY;AACZ,YAAI,UAAU,KAAK,YAAY;AAC7B,gBAAM,KAAK,MAAM,KAAK,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AAEA,SAAK,KAAK,cAAc,UAAU,SAAS;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAA;AACb,WAAK,UAAU;AAAA,IACjB;AAGA,eAAW,SAAS,KAAK,eAAe,OAAA,GAAU;AAChD,mBAAa,KAAK;AAAA,IACpB;AACA,SAAK,eAAe,MAAA;AAEpB,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,UAAkB;AAChB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA0B;AACxB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AACF;"}
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../src/core/csl-json/validator.ts","../src/utils/logger.ts","../src/utils/file.ts","../src/utils/backup.ts","../src/features/merge/three-way.ts","../src/features/file-watcher/file-watcher.ts"],"sourcesContent":["import { CslItemSchema, CslLibrarySchema } from \"./types\";\nimport type { CslItem, CslLibrary } from \"./types\";\n\n/**\n * Validate CSL-JSON library structure\n * @param data - Data to validate (can be any type)\n * @returns Validated CSL-JSON library\n * @throws Error if validation fails\n */\nexport function validateCslJson(data: unknown): CslLibrary {\n const parseResult = CslLibrarySchema.safeParse(data);\n\n if (!parseResult.success) {\n throw new Error(`Invalid CSL-JSON structure: ${parseResult.error.message}`);\n }\n\n return parseResult.data;\n}\n\n/**\n * Validate a single CSL-JSON item\n * @param data - Data to validate (can be any type)\n * @returns Validation result with valid flag and errors\n */\nexport function validateCslItem(data: unknown): {\n valid: boolean;\n data?: CslItem;\n errors?: string[];\n} {\n const parseResult = CslItemSchema.safeParse(data);\n\n if (!parseResult.success) {\n return {\n valid: false,\n errors: parseResult.error.issues.map((issue) => issue.message),\n };\n }\n\n return {\n valid: true,\n data: parseResult.data,\n };\n}\n","export type LogLevel = \"silent\" | \"info\" | \"debug\";\n\nexport interface Logger {\n info(...args: unknown[]): void;\n debug(...args: unknown[]): void;\n error(...args: unknown[]): void;\n}\n\nexport function createLogger(level: LogLevel = \"info\"): Logger {\n const shouldLogInfo = level === \"info\" || level === \"debug\";\n const shouldLogDebug = level === \"debug\";\n\n function formatMessage(...args: unknown[]): string {\n return `${args.map((arg) => String(arg)).join(\" \")}\\n`;\n }\n\n return {\n info(...args: unknown[]): void {\n if (shouldLogInfo) {\n process.stderr.write(formatMessage(...args));\n }\n },\n\n debug(...args: unknown[]): void {\n if (shouldLogDebug) {\n process.stderr.write(formatMessage(...args));\n }\n },\n\n error(...args: unknown[]): void {\n process.stderr.write(formatMessage(...args));\n },\n };\n}\n","import { mkdir } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport writeFileAtomicLib from \"write-file-atomic\";\n\n/**\n * Write file atomically with parent directory creation\n */\nexport async function writeFileAtomic(filePath: string, content: string): Promise<void> {\n await ensureDirectoryExists(dirname(filePath));\n await writeFileAtomicLib(filePath, content, { encoding: \"utf-8\" });\n}\n\n/**\n * Ensure directory exists, creating it recursively if necessary\n */\nexport async function ensureDirectoryExists(dirPath: string): Promise<void> {\n await mkdir(dirPath, { recursive: true });\n}\n","import { existsSync } from \"node:fs\";\nimport { copyFile, readFile, readdir, stat, unlink } from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { ensureDirectoryExists } from \"./file\";\n\nexport interface BackupOptions {\n maxGenerations?: number;\n maxAgeMs?: number;\n}\n\nconst DEFAULT_MAX_GENERATIONS = 50;\nconst DEFAULT_MAX_AGE_MS = 365 * 24 * 60 * 60 * 1000; // 1 year\n\n/**\n * Get package name from package.json\n */\nasync function resolvePackageName(): Promise<string> {\n try {\n const currentFile = fileURLToPath(import.meta.url);\n let currentDir = dirname(currentFile);\n\n for (let i = 0; i < 10; i++) {\n const packageJsonPath = join(currentDir, \"package.json\");\n if (existsSync(packageJsonPath)) {\n const content = await readFile(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content);\n return pkg.name;\n }\n currentDir = dirname(currentDir);\n }\n } catch {\n // Fall back to hardcoded name if package.json is not found\n }\n\n return \"reference-manager\";\n}\n\nconst packageName = await resolvePackageName();\n\n/**\n * Get backup directory path for a library\n */\nexport function getBackupDirectory(libraryName: string): string {\n const pkgName = packageName ?? \"reference-manager\";\n return join(tmpdir(), pkgName, \"backups\", libraryName);\n}\n\n/**\n * Create a backup of the given file\n */\nexport async function createBackup(sourceFile: string, libraryName: string): Promise<string> {\n const backupDir = getBackupDirectory(libraryName);\n await ensureDirectoryExists(backupDir);\n\n const timestamp = Date.now();\n const backupFileName = `${timestamp}.backup`;\n const backupPath = join(backupDir, backupFileName);\n\n await copyFile(sourceFile, backupPath);\n\n return backupPath;\n}\n\n/**\n * List all backups for a library (sorted by modification time, newest first)\n */\nexport async function listBackups(libraryName: string): Promise<string[]> {\n const backupDir = getBackupDirectory(libraryName);\n\n if (!existsSync(backupDir)) {\n return [];\n }\n\n const files = await readdir(backupDir);\n const backupFiles = files.filter((f) => f.endsWith(\".backup\")).map((f) => join(backupDir, f));\n\n const filesWithStats = await Promise.all(\n backupFiles.map(async (file) => {\n const stats = await stat(file);\n return { file, mtime: stats.mtimeMs };\n })\n );\n\n filesWithStats.sort((a, b) => b.mtime - a.mtime);\n\n return filesWithStats.map((f) => f.file);\n}\n\n/**\n * Clean up old backups based on generation count and age\n */\nexport async function cleanupOldBackups(\n libraryName: string,\n options?: BackupOptions\n): Promise<void> {\n const maxGenerations = options?.maxGenerations ?? DEFAULT_MAX_GENERATIONS;\n const maxAgeMs = options?.maxAgeMs ?? DEFAULT_MAX_AGE_MS;\n\n const backups = await listBackups(libraryName);\n const now = Date.now();\n\n const backupsToDelete: string[] = [];\n\n for (let i = 0; i < backups.length; i++) {\n const backupPath = backups[i];\n if (!backupPath) continue;\n\n const stats = await stat(backupPath);\n const age = now - stats.mtimeMs;\n\n if (i >= maxGenerations || age > maxAgeMs) {\n backupsToDelete.push(backupPath);\n }\n }\n\n await Promise.all(backupsToDelete.map((backup) => unlink(backup)));\n}\n","/**\n * 3-way merge implementation with Last-Write-Wins (LWW) strategy\n */\n\nimport type { CslItem } from \"../../core/csl-json/types.js\";\nimport type {\n FieldConflict,\n ItemConflict,\n MergeOptions,\n MergeResult,\n MergeStatus,\n} from \"./types.js\";\n\n/**\n * Get UUID from item, with fallback to id if uuid is missing\n */\nfunction getItemUuid(item: CslItem): string {\n return item.custom?.uuid || item.id || \"unknown\";\n}\n\n/**\n * Get timestamp from item, with fallback to created_at\n */\nfunction getTimestamp(item: CslItem): string {\n return item.custom?.timestamp || item.custom?.created_at || \"1970-01-01T00:00:00.000Z\";\n}\n\n/**\n * Deep equality check for field values\n */\nfunction deepEqual(a: unknown, b: unknown): boolean {\n return JSON.stringify(a) === JSON.stringify(b);\n}\n\n/**\n * Resolve a field conflict using LWW or prefer option\n */\nfunction resolveFieldConflict(\n localValue: unknown,\n remoteValue: unknown,\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): unknown {\n if (localTimestamp > remoteTimestamp) {\n return localValue;\n }\n if (remoteTimestamp > localTimestamp) {\n return remoteValue;\n }\n // Timestamps equal: use prefer option or default to local\n if (options?.prefer === \"remote\") {\n return remoteValue;\n }\n return localValue;\n}\n\n/**\n * Determine conflict resolution type\n */\nfunction determineResolution(\n fieldConflicts: FieldConflict[],\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): ItemConflict[\"resolution\"] {\n const hasRealConflicts = fieldConflicts.every((fc) => fc.resolved !== undefined);\n const localIsNewer = fieldConflicts.some(\n (fc) => fc.local !== fc.remote && localTimestamp > remoteTimestamp\n );\n const remoteIsNewer = fieldConflicts.some(\n (fc) => fc.local !== fc.remote && remoteTimestamp > localTimestamp\n );\n\n if (hasRealConflicts && localIsNewer) return \"auto-lww\";\n if (hasRealConflicts && remoteIsNewer) return \"auto-lww\";\n if (options?.prefer === \"local\") return \"prefer-local\";\n if (options?.prefer === \"remote\") return \"prefer-remote\";\n return \"unresolved\";\n}\n\n/**\n * Merge a single field from base, local, and remote versions\n */\nfunction mergeField(\n key: string,\n baseValue: unknown,\n localValue: unknown,\n remoteValue: unknown,\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): { value: unknown; conflict: FieldConflict | null } {\n const localChanged = !deepEqual(baseValue, localValue);\n const remoteChanged = !deepEqual(baseValue, remoteValue);\n\n if (!localChanged && !remoteChanged) {\n return { value: baseValue, conflict: null };\n }\n\n if (localChanged && !remoteChanged) {\n return { value: localValue, conflict: null };\n }\n\n if (!localChanged && remoteChanged) {\n return { value: remoteValue, conflict: null };\n }\n\n // Both changed\n if (deepEqual(localValue, remoteValue)) {\n return { value: localValue, conflict: null };\n }\n\n // Both changed to different values\n const resolved = resolveFieldConflict(\n localValue,\n remoteValue,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n // Don't record conflicts for 'custom' metadata field\n if (key === \"custom\") {\n return { value: resolved, conflict: null };\n }\n\n return {\n value: resolved,\n conflict: {\n field: key,\n base: baseValue,\n local: localValue,\n remote: remoteValue,\n resolved,\n },\n };\n}\n\n/**\n * Merge a single item from base, local, and remote versions\n */\nfunction mergeItem(\n base: CslItem,\n local: CslItem,\n remote: CslItem,\n options?: MergeOptions\n): { merged: CslItem; conflict: ItemConflict | null } {\n const uuid = getItemUuid(base);\n const localTimestamp = getTimestamp(local);\n const remoteTimestamp = getTimestamp(remote);\n\n const merged: CslItem = { ...base };\n const fieldConflicts: FieldConflict[] = [];\n\n // Get all unique keys from all three versions\n const allKeys = new Set<string>([\n ...Object.keys(base),\n ...Object.keys(local),\n ...Object.keys(remote),\n ]);\n\n for (const key of allKeys) {\n const baseValue = (base as Record<string, unknown>)[key];\n const localValue = (local as Record<string, unknown>)[key];\n const remoteValue = (remote as Record<string, unknown>)[key];\n\n const { value, conflict } = mergeField(\n key,\n baseValue,\n localValue,\n remoteValue,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n (merged as Record<string, unknown>)[key] = value;\n\n if (conflict) {\n fieldConflicts.push(conflict);\n }\n }\n\n // If there are field conflicts, create ItemConflict\n if (fieldConflicts.length > 0) {\n const resolution = determineResolution(\n fieldConflicts,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n return {\n merged,\n conflict: {\n uuid,\n id: base.id || \"unknown\",\n fields: fieldConflicts,\n localTimestamp,\n remoteTimestamp,\n resolution,\n },\n };\n }\n\n return { merged, conflict: null };\n}\n\n/**\n * Build UUID-indexed maps from item arrays\n */\nfunction buildItemMaps(base: CslItem[], local: CslItem[], remote: CslItem[]) {\n const baseMap = new Map<string, CslItem>();\n const localMap = new Map<string, CslItem>();\n const remoteMap = new Map<string, CslItem>();\n\n for (const item of base) {\n baseMap.set(getItemUuid(item), item);\n }\n for (const item of local) {\n localMap.set(getItemUuid(item), item);\n }\n for (const item of remote) {\n remoteMap.set(getItemUuid(item), item);\n }\n\n return { baseMap, localMap, remoteMap };\n}\n\n/**\n * Handle items that exist in all three versions\n */\nfunction mergeExistingItem(\n baseItem: CslItem,\n localItem: CslItem,\n remoteItem: CslItem,\n options: MergeOptions | undefined,\n merged: CslItem[],\n conflicts: ItemConflict[]\n): void {\n const { merged: mergedItem, conflict } = mergeItem(baseItem, localItem, remoteItem, options);\n merged.push(mergedItem);\n if (conflict) {\n conflicts.push(conflict);\n }\n}\n\n/**\n * Handle items added in both local and remote\n */\nfunction handleDualAddition(\n uuid: string,\n localItem: CslItem,\n remoteItem: CslItem,\n options: MergeOptions | undefined,\n merged: CslItem[],\n conflicts: ItemConflict[]\n): void {\n if (deepEqual(localItem, remoteItem)) {\n merged.push(localItem);\n } else {\n const syntheticBase: CslItem = {\n id: uuid,\n type: \"article\",\n custom: {\n uuid,\n created_at: \"1970-01-01T00:00:00.000Z\",\n timestamp: \"1970-01-01T00:00:00.000Z\",\n },\n };\n const { merged: mergedItem, conflict } = mergeItem(\n syntheticBase,\n localItem,\n remoteItem,\n options\n );\n merged.push(mergedItem);\n if (conflict) {\n conflicts.push(conflict);\n }\n }\n}\n\n/**\n * Process a single UUID across all three versions\n */\nfunction processItem(\n uuid: string,\n baseMap: Map<string, CslItem>,\n localMap: Map<string, CslItem>,\n remoteMap: Map<string, CslItem>,\n options: MergeOptions | undefined,\n result: {\n merged: CslItem[];\n conflicts: ItemConflict[];\n localOnly: CslItem[];\n remoteOnly: CslItem[];\n deletedInLocal: CslItem[];\n deletedInRemote: CslItem[];\n }\n): void {\n const baseItem = baseMap.get(uuid);\n const localItem = localMap.get(uuid);\n const remoteItem = remoteMap.get(uuid);\n\n if (baseItem && localItem && remoteItem) {\n mergeExistingItem(baseItem, localItem, remoteItem, options, result.merged, result.conflicts);\n } else if (!baseItem && localItem && remoteItem) {\n handleDualAddition(uuid, localItem, remoteItem, options, result.merged, result.conflicts);\n } else if (!baseItem && localItem && !remoteItem) {\n result.merged.push(localItem);\n result.localOnly.push(localItem);\n } else if (!baseItem && !localItem && remoteItem) {\n result.merged.push(remoteItem);\n result.remoteOnly.push(remoteItem);\n } else if (baseItem && !localItem && remoteItem) {\n result.deletedInLocal.push(baseItem);\n } else if (baseItem && localItem && !remoteItem) {\n result.deletedInRemote.push(baseItem);\n } else if (baseItem && !localItem && !remoteItem) {\n result.deletedInLocal.push(baseItem);\n result.deletedInRemote.push(baseItem);\n }\n}\n\n/**\n * Performs a 3-way merge of CSL-JSON items\n *\n * @param base - Base version (common ancestor)\n * @param local - Local version (current working copy)\n * @param remote - Remote version (incoming changes)\n * @param options - Merge options (e.g., prefer local/remote for tie-breaking)\n * @returns Merge result with merged items and conflict information\n */\nexport function threeWayMerge(\n base: CslItem[],\n local: CslItem[],\n remote: CslItem[],\n options?: MergeOptions\n): MergeResult {\n const { baseMap, localMap, remoteMap } = buildItemMaps(base, local, remote);\n\n const result = {\n merged: [] as CslItem[],\n conflicts: [] as ItemConflict[],\n localOnly: [] as CslItem[],\n remoteOnly: [] as CslItem[],\n deletedInLocal: [] as CslItem[],\n deletedInRemote: [] as CslItem[],\n };\n\n const allUuids = new Set<string>([...baseMap.keys(), ...localMap.keys(), ...remoteMap.keys()]);\n\n for (const uuid of allUuids) {\n processItem(uuid, baseMap, localMap, remoteMap, options, result);\n }\n\n // Determine overall status\n let status: MergeStatus = \"success\";\n if (result.conflicts.length > 0) {\n const hasUnresolved = result.conflicts.some((c) => c.resolution === \"unresolved\");\n status = hasUnresolved ? \"conflict\" : \"auto-resolved\";\n }\n\n return {\n status,\n ...result,\n };\n}\n","import { EventEmitter } from \"node:events\";\nimport * as fs from \"node:fs/promises\";\nimport * as path from \"node:path\";\nimport chokidar, { type FSWatcher } from \"chokidar\";\n\n/**\n * Options for FileWatcher\n */\nexport interface FileWatcherOptions {\n /** Debounce time in milliseconds (default: 500) */\n debounceMs?: number;\n /** Poll interval in milliseconds for polling mode (default: 5000) */\n pollIntervalMs?: number;\n /** Use polling instead of native file system events */\n usePolling?: boolean;\n /** Retry delay in milliseconds for JSON parse (default: 200) */\n retryDelayMs?: number;\n /** Maximum number of retries for JSON parse (default: 10) */\n maxRetries?: number;\n}\n\n// Default values from spec\nconst DEFAULT_DEBOUNCE_MS = 500;\nconst DEFAULT_POLL_INTERVAL_MS = 5000;\nconst DEFAULT_RETRY_DELAY_MS = 200;\nconst DEFAULT_MAX_RETRIES = 10;\n\n/**\n * Check if a file should be ignored based on spec patterns\n * Ignored patterns:\n * - *.tmp\n * - *.bak\n * - *.conflict.*\n * - *.lock\n * - editor swap files (.swp, ~)\n */\nfunction shouldIgnore(filePath: string): boolean {\n const basename = path.basename(filePath);\n\n // *.tmp files\n if (basename.endsWith(\".tmp\")) return true;\n\n // *.bak files\n if (basename.endsWith(\".bak\")) return true;\n\n // *.conflict.* files (contains .conflict. in name)\n if (basename.includes(\".conflict.\")) return true;\n\n // *.lock files\n if (basename.endsWith(\".lock\")) return true;\n\n // Vim swap files (.*.swp)\n if (basename.startsWith(\".\") && basename.endsWith(\".swp\")) return true;\n\n // Editor backup files (*~)\n if (basename.endsWith(\"~\")) return true;\n\n return false;\n}\n\n/**\n * FileWatcher watches a file or directory for changes and emits events.\n *\n * Events:\n * - 'change': Emitted when a watched file changes (after debounce)\n * - 'error': Emitted when a watch error occurs\n * - 'ready': Emitted when watching has started\n * - 'parsed': Emitted when JSON file is successfully parsed\n * - 'parseError': Emitted when JSON parse fails after all retries\n */\nexport class FileWatcher extends EventEmitter {\n private readonly watchPath: string;\n private readonly debounceMs: number;\n private readonly pollIntervalMs: number;\n private readonly usePolling: boolean;\n private readonly retryDelayMs: number;\n private readonly maxRetries: number;\n\n private watcher: FSWatcher | null = null;\n private watching = false;\n private debounceTimers: Map<string, NodeJS.Timeout> = new Map();\n\n constructor(watchPath: string, options?: FileWatcherOptions) {\n super();\n this.watchPath = watchPath;\n this.debounceMs = options?.debounceMs ?? DEFAULT_DEBOUNCE_MS;\n this.pollIntervalMs = options?.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;\n this.usePolling = options?.usePolling ?? false;\n this.retryDelayMs = options?.retryDelayMs ?? DEFAULT_RETRY_DELAY_MS;\n this.maxRetries = options?.maxRetries ?? DEFAULT_MAX_RETRIES;\n }\n\n /**\n * Start watching for file changes\n */\n async start(): Promise<void> {\n if (this.watching) {\n return;\n }\n\n return new Promise((resolve, reject) => {\n this.watcher = chokidar.watch(this.watchPath, {\n ignored: shouldIgnore,\n persistent: true,\n usePolling: this.usePolling,\n interval: this.pollIntervalMs,\n ignoreInitial: true,\n awaitWriteFinish: false,\n });\n\n this.watcher.on(\"ready\", () => {\n this.watching = true;\n this.emit(\"ready\");\n resolve();\n });\n\n this.watcher.on(\"error\", (error: unknown) => {\n this.emit(\"error\", error);\n if (!this.watching) {\n reject(error);\n }\n });\n\n this.watcher.on(\"change\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n\n this.watcher.on(\"add\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n });\n }\n\n /**\n * Handle file change with debouncing\n */\n private handleFileChange(filePath: string): void {\n // Clear existing timer for this file\n const existingTimer = this.debounceTimers.get(filePath);\n if (existingTimer) {\n clearTimeout(existingTimer);\n }\n\n // Set new debounced timer\n const timer = setTimeout(() => {\n this.debounceTimers.delete(filePath);\n this.emit(\"change\", filePath);\n this.tryParseJsonFile(filePath);\n }, this.debounceMs);\n\n this.debounceTimers.set(filePath, timer);\n }\n\n /**\n * Try to parse JSON file with retries\n */\n private async tryParseJsonFile(filePath: string): Promise<void> {\n // Only parse .json files\n if (path.extname(filePath).toLowerCase() !== \".json\") {\n return;\n }\n\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt <= this.maxRetries; attempt++) {\n try {\n const content = await fs.readFile(filePath, \"utf-8\");\n const parsed = JSON.parse(content);\n this.emit(\"parsed\", filePath, parsed);\n return;\n } catch (error) {\n lastError = error as Error;\n if (attempt < this.maxRetries) {\n await this.delay(this.retryDelayMs);\n }\n }\n }\n\n this.emit(\"parseError\", filePath, lastError);\n }\n\n /**\n * Delay helper\n */\n private delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n\n /**\n * Stop watching for file changes\n */\n close(): void {\n if (this.watcher) {\n this.watcher.close();\n this.watcher = null;\n }\n\n // Clear all debounce timers\n for (const timer of this.debounceTimers.values()) {\n clearTimeout(timer);\n }\n this.debounceTimers.clear();\n\n this.watching = false;\n }\n\n /**\n * Get the watched path\n */\n getPath(): string {\n return this.watchPath;\n }\n\n /**\n * Check if the watcher is currently active\n */\n isWatching(): boolean {\n return this.watching;\n }\n\n /**\n * Get the debounce time in milliseconds\n */\n getDebounceMs(): number {\n return this.debounceMs;\n }\n\n /**\n * Get the poll interval in milliseconds\n */\n getPollIntervalMs(): number {\n return this.pollIntervalMs;\n }\n\n /**\n * Get the retry delay in milliseconds\n */\n getRetryDelayMs(): number {\n return this.retryDelayMs;\n }\n\n /**\n * Get the maximum number of retries\n */\n getMaxRetries(): number {\n return this.maxRetries;\n }\n}\n"],"names":["b"],"mappings":";;;;;;;;;;;;;;AASO,SAAS,gBAAgB,MAA2B;AACzD,QAAM,cAAc,iBAAiB,UAAU,IAAI;AAEnD,MAAI,CAAC,YAAY,SAAS;AACxB,UAAM,IAAI,MAAM,+BAA+B,YAAY,MAAM,OAAO,EAAE;AAAA,EAC5E;AAEA,SAAO,YAAY;AACrB;ACTO,SAAS,aAAa,QAAkB,QAAgB;AAC7D,QAAM,gBAAgB,UAAU,UAAU,UAAU;AACpD,QAAM,iBAAiB,UAAU;AAEjC,WAAS,iBAAiB,MAAyB;AACjD,WAAO,GAAG,KAAK,IAAI,CAAC,QAAQ,OAAO,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,EACpD;AAEA,SAAO;AAAA,IACL,QAAQ,MAAuB;AAC7B,UAAI,eAAe;AACjB,gBAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IAEA,SAAS,MAAuB;AAC9B,UAAI,gBAAgB;AAClB,gBAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IAEA,SAAS,MAAuB;AAC9B,cAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,IAC7C;AAAA,EAAA;AAEJ;AC1BA,eAAsB,gBAAgB,UAAkB,SAAgC;AACtF,QAAM,sBAAsB,QAAQ,QAAQ,CAAC;AAC7C,QAAM,mBAAmB,UAAU,SAAS,EAAE,UAAU,SAAS;AACnE;AAKA,eAAsB,sBAAsB,SAAgC;AAC1E,QAAM,MAAM,SAAS,EAAE,WAAW,MAAM;AAC1C;ACLA,MAAM,0BAA0B;AAChC,MAAM,qBAAqB,MAAM,KAAK,KAAK,KAAK;AAKhD,eAAe,qBAAsC;AACnD,MAAI;AACF,UAAM,cAAc,cAAc,YAAY,GAAG;AACjD,QAAI,aAAa,QAAQ,WAAW;AAEpC,aAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AAC3B,YAAM,kBAAkB,KAAK,YAAY,cAAc;AACvD,UAAI,WAAW,eAAe,GAAG;AAC/B,cAAM,UAAU,MAAM,SAAS,iBAAiB,OAAO;AACvD,cAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,eAAO,IAAI;AAAA,MACb;AACA,mBAAa,QAAQ,UAAU;AAAA,IACjC;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEA,MAAM,cAAc,MAAM,mBAAA;AAKnB,SAAS,mBAAmB,aAA6B;AAC9D,QAAM,UAAU,eAAe;AAC/B,SAAO,KAAK,OAAA,GAAU,SAAS,WAAW,WAAW;AACvD;AAKA,eAAsB,aAAa,YAAoB,aAAsC;AAC3F,QAAM,YAAY,mBAAmB,WAAW;AAChD,QAAM,sBAAsB,SAAS;AAErC,QAAM,YAAY,KAAK,IAAA;AACvB,QAAM,iBAAiB,GAAG,SAAS;AACnC,QAAM,aAAa,KAAK,WAAW,cAAc;AAEjD,QAAM,SAAS,YAAY,UAAU;AAErC,SAAO;AACT;AAKA,eAAsB,YAAY,aAAwC;AACxE,QAAM,YAAY,mBAAmB,WAAW;AAEhD,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO,CAAA;AAAA,EACT;AAEA,QAAM,QAAQ,MAAM,QAAQ,SAAS;AACrC,QAAM,cAAc,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,WAAW,CAAC,CAAC;AAE5F,QAAM,iBAAiB,MAAM,QAAQ;AAAA,IACnC,YAAY,IAAI,OAAO,SAAS;AAC9B,YAAM,QAAQ,MAAM,KAAK,IAAI;AAC7B,aAAO,EAAE,MAAM,OAAO,MAAM,QAAA;AAAA,IAC9B,CAAC;AAAA,EAAA;AAGH,iBAAe,KAAK,CAAC,GAAGA,OAAMA,GAAE,QAAQ,EAAE,KAAK;AAE/C,SAAO,eAAe,IAAI,CAAC,MAAM,EAAE,IAAI;AACzC;AAKA,eAAsB,kBACpB,aACA,SACe;AACf,QAAM,iBAAiB,SAAS,kBAAkB;AAClD,QAAM,WAAW,SAAS,YAAY;AAEtC,QAAM,UAAU,MAAM,YAAY,WAAW;AAC7C,QAAM,MAAM,KAAK,IAAA;AAEjB,QAAM,kBAA4B,CAAA;AAElC,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,aAAa,QAAQ,CAAC;AAC5B,QAAI,CAAC,WAAY;AAEjB,UAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,UAAM,MAAM,MAAM,MAAM;AAExB,QAAI,KAAK,kBAAkB,MAAM,UAAU;AACzC,sBAAgB,KAAK,UAAU;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,gBAAgB,IAAI,CAAC,WAAW,OAAO,MAAM,CAAC,CAAC;AACnE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtGA,SAAS,YAAY,MAAuB;AAC1C,SAAO,KAAK,QAAQ,QAAQ,KAAK,MAAM;AACzC;AAKA,SAAS,aAAa,MAAuB;AAC3C,SAAO,KAAK,QAAQ,aAAa,KAAK,QAAQ,cAAc;AAC9D;AAKA,SAAS,UAAU,GAAYA,IAAqB;AAClD,SAAO,KAAK,UAAU,CAAC,MAAM,KAAK,UAAUA,EAAC;AAC/C;AAKA,SAAS,qBACP,YACA,aACA,gBACA,iBACA,SACS;AACT,MAAI,iBAAiB,iBAAiB;AACpC,WAAO;AAAA,EACT;AACA,MAAI,kBAAkB,gBAAgB;AACpC,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,WAAW,UAAU;AAChC,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAKA,SAAS,oBACP,gBACA,gBACA,iBACA,SAC4B;AAC5B,QAAM,mBAAmB,eAAe,MAAM,CAAC,OAAO,GAAG,aAAa,MAAS;AAC/E,QAAM,eAAe,eAAe;AAAA,IAClC,CAAC,OAAO,GAAG,UAAU,GAAG,UAAU,iBAAiB;AAAA,EAAA;AAErD,QAAM,gBAAgB,eAAe;AAAA,IACnC,CAAC,OAAO,GAAG,UAAU,GAAG,UAAU,kBAAkB;AAAA,EAAA;AAGtD,MAAI,oBAAoB,aAAc,QAAO;AAC7C,MAAI,oBAAoB,cAAe,QAAO;AAC9C,MAAI,SAAS,WAAW,QAAS,QAAO;AACxC,MAAI,SAAS,WAAW,SAAU,QAAO;AACzC,SAAO;AACT;AAKA,SAAS,WACP,KACA,WACA,YACA,aACA,gBACA,iBACA,SACoD;AACpD,QAAM,eAAe,CAAC,UAAU,WAAW,UAAU;AACrD,QAAM,gBAAgB,CAAC,UAAU,WAAW,WAAW;AAEvD,MAAI,CAAC,gBAAgB,CAAC,eAAe;AACnC,WAAO,EAAE,OAAO,WAAW,UAAU,KAAA;AAAA,EACvC;AAEA,MAAI,gBAAgB,CAAC,eAAe;AAClC,WAAO,EAAE,OAAO,YAAY,UAAU,KAAA;AAAA,EACxC;AAEA,MAAI,CAAC,gBAAgB,eAAe;AAClC,WAAO,EAAE,OAAO,aAAa,UAAU,KAAA;AAAA,EACzC;AAGA,MAAI,UAAU,YAAY,WAAW,GAAG;AACtC,WAAO,EAAE,OAAO,YAAY,UAAU,KAAA;AAAA,EACxC;AAGA,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAIF,MAAI,QAAQ,UAAU;AACpB,WAAO,EAAE,OAAO,UAAU,UAAU,KAAA;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,OAAO;AAAA,IACP,UAAU;AAAA,MACR,OAAO;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,QAAQ;AAAA,MACR;AAAA,IAAA;AAAA,EACF;AAEJ;AAKA,SAAS,UACP,MACA,OACA,QACA,SACoD;AACpD,QAAM,OAAO,YAAY,IAAI;AAC7B,QAAM,iBAAiB,aAAa,KAAK;AACzC,QAAM,kBAAkB,aAAa,MAAM;AAE3C,QAAM,SAAkB,EAAE,GAAG,KAAA;AAC7B,QAAM,iBAAkC,CAAA;AAGxC,QAAM,8BAAc,IAAY;AAAA,IAC9B,GAAG,OAAO,KAAK,IAAI;AAAA,IACnB,GAAG,OAAO,KAAK,KAAK;AAAA,IACpB,GAAG,OAAO,KAAK,MAAM;AAAA,EAAA,CACtB;AAED,aAAW,OAAO,SAAS;AACzB,UAAM,YAAa,KAAiC,GAAG;AACvD,UAAM,aAAc,MAAkC,GAAG;AACzD,UAAM,cAAe,OAAmC,GAAG;AAE3D,UAAM,EAAE,OAAO,SAAA,IAAa;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAGD,WAAmC,GAAG,IAAI;AAE3C,QAAI,UAAU;AACZ,qBAAe,KAAK,QAAQ;AAAA,IAC9B;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAGF,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,QACR;AAAA,QACA,IAAI,KAAK,MAAM;AAAA,QACf,QAAQ;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AAEA,SAAO,EAAE,QAAQ,UAAU,KAAA;AAC7B;AAKA,SAAS,cAAc,MAAiB,OAAkB,QAAmB;AAC3E,QAAM,8BAAc,IAAA;AACpB,QAAM,+BAAe,IAAA;AACrB,QAAM,gCAAgB,IAAA;AAEtB,aAAW,QAAQ,MAAM;AACvB,YAAQ,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACrC;AACA,aAAW,QAAQ,OAAO;AACxB,aAAS,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACtC;AACA,aAAW,QAAQ,QAAQ;AACzB,cAAU,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACvC;AAEA,SAAO,EAAE,SAAS,UAAU,UAAA;AAC9B;AAKA,SAAS,kBACP,UACA,WACA,YACA,SACA,QACA,WACM;AACN,QAAM,EAAE,QAAQ,YAAY,SAAA,IAAa,UAAU,UAAU,WAAW,YAAY,OAAO;AAC3F,SAAO,KAAK,UAAU;AACtB,MAAI,UAAU;AACZ,cAAU,KAAK,QAAQ;AAAA,EACzB;AACF;AAKA,SAAS,mBACP,MACA,WACA,YACA,SACA,QACA,WACM;AACN,MAAI,UAAU,WAAW,UAAU,GAAG;AACpC,WAAO,KAAK,SAAS;AAAA,EACvB,OAAO;AACL,UAAM,gBAAyB;AAAA,MAC7B,IAAI;AAAA,MACJ,MAAM;AAAA,MACN,QAAQ;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,MAAA;AAAA,IACb;AAEF,UAAM,EAAE,QAAQ,YAAY,SAAA,IAAa;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAEF,WAAO,KAAK,UAAU;AACtB,QAAI,UAAU;AACZ,gBAAU,KAAK,QAAQ;AAAA,IACzB;AAAA,EACF;AACF;AAKA,SAAS,YACP,MACA,SACA,UACA,WACA,SACA,QAQM;AACN,QAAM,WAAW,QAAQ,IAAI,IAAI;AACjC,QAAM,YAAY,SAAS,IAAI,IAAI;AACnC,QAAM,aAAa,UAAU,IAAI,IAAI;AAErC,MAAI,YAAY,aAAa,YAAY;AACvC,sBAAkB,UAAU,WAAW,YAAY,SAAS,OAAO,QAAQ,OAAO,SAAS;AAAA,EAC7F,WAAW,CAAC,YAAY,aAAa,YAAY;AAC/C,uBAAmB,MAAM,WAAW,YAAY,SAAS,OAAO,QAAQ,OAAO,SAAS;AAAA,EAC1F,WAAW,CAAC,YAAY,aAAa,CAAC,YAAY;AAChD,WAAO,OAAO,KAAK,SAAS;AAC5B,WAAO,UAAU,KAAK,SAAS;AAAA,EACjC,WAAW,CAAC,YAAY,CAAC,aAAa,YAAY;AAChD,WAAO,OAAO,KAAK,UAAU;AAC7B,WAAO,WAAW,KAAK,UAAU;AAAA,EACnC,WAAW,YAAY,CAAC,aAAa,YAAY;AAC/C,WAAO,eAAe,KAAK,QAAQ;AAAA,EACrC,WAAW,YAAY,aAAa,CAAC,YAAY;AAC/C,WAAO,gBAAgB,KAAK,QAAQ;AAAA,EACtC,WAAW,YAAY,CAAC,aAAa,CAAC,YAAY;AAChD,WAAO,eAAe,KAAK,QAAQ;AACnC,WAAO,gBAAgB,KAAK,QAAQ;AAAA,EACtC;AACF;AAWO,SAAS,cACd,MACA,OACA,QACA,SACa;AACb,QAAM,EAAE,SAAS,UAAU,UAAA,IAAc,cAAc,MAAM,OAAO,MAAM;AAE1E,QAAM,SAAS;AAAA,IACb,QAAQ,CAAA;AAAA,IACR,WAAW,CAAA;AAAA,IACX,WAAW,CAAA;AAAA,IACX,YAAY,CAAA;AAAA,IACZ,gBAAgB,CAAA;AAAA,IAChB,iBAAiB,CAAA;AAAA,EAAC;AAGpB,QAAM,WAAW,oBAAI,IAAY,CAAC,GAAG,QAAQ,KAAA,GAAQ,GAAG,SAAS,QAAQ,GAAG,UAAU,KAAA,CAAM,CAAC;AAE7F,aAAW,QAAQ,UAAU;AAC3B,gBAAY,MAAM,SAAS,UAAU,WAAW,SAAS,MAAM;AAAA,EACjE;AAGA,MAAI,SAAsB;AAC1B,MAAI,OAAO,UAAU,SAAS,GAAG;AAC/B,UAAM,gBAAgB,OAAO,UAAU,KAAK,CAAC,MAAM,EAAE,eAAe,YAAY;AAChF,aAAS,gBAAgB,aAAa;AAAA,EACxC;AAEA,SAAO;AAAA,IACL;AAAA,IACA,GAAG;AAAA,EAAA;AAEP;;;;;AC3VA,MAAM,sBAAsB;AAC5B,MAAM,2BAA2B;AACjC,MAAM,yBAAyB;AAC/B,MAAM,sBAAsB;AAW5B,SAAS,aAAa,UAA2B;AAC/C,QAAM,WAAW,KAAK,SAAS,QAAQ;AAGvC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,YAAY,EAAG,QAAO;AAG5C,MAAI,SAAS,SAAS,OAAO,EAAG,QAAO;AAGvC,MAAI,SAAS,WAAW,GAAG,KAAK,SAAS,SAAS,MAAM,EAAG,QAAO;AAGlE,MAAI,SAAS,SAAS,GAAG,EAAG,QAAO;AAEnC,SAAO;AACT;AAYO,MAAM,oBAAoB,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAA4B;AAAA,EAC5B,WAAW;AAAA,EACX,qCAAkD,IAAA;AAAA,EAE1D,YAAY,WAAmB,SAA8B;AAC3D,UAAA;AACA,SAAK,YAAY;AACjB,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,iBAAiB,SAAS,kBAAkB;AACjD,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,eAAe,SAAS,gBAAgB;AAC7C,SAAK,aAAa,SAAS,cAAc;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI,KAAK,UAAU;AACjB;AAAA,IACF;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,UAAU,SAAS,MAAM,KAAK,WAAW;AAAA,QAC5C,SAAS;AAAA,QACT,YAAY;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,eAAe;AAAA,QACf,kBAAkB;AAAA,MAAA,CACnB;AAED,WAAK,QAAQ,GAAG,SAAS,MAAM;AAC7B,aAAK,WAAW;AAChB,aAAK,KAAK,OAAO;AACjB,gBAAA;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,SAAS,CAAC,UAAmB;AAC3C,aAAK,KAAK,SAAS,KAAK;AACxB,YAAI,CAAC,KAAK,UAAU;AAClB,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,UAAU,CAAC,aAAqB;AAC9C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAED,WAAK,QAAQ,GAAG,OAAO,CAAC,aAAqB;AAC3C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAwB;AAE/C,UAAM,gBAAgB,KAAK,eAAe,IAAI,QAAQ;AACtD,QAAI,eAAe;AACjB,mBAAa,aAAa;AAAA,IAC5B;AAGA,UAAM,QAAQ,WAAW,MAAM;AAC7B,WAAK,eAAe,OAAO,QAAQ;AACnC,WAAK,KAAK,UAAU,QAAQ;AAC5B,WAAK,iBAAiB,QAAQ;AAAA,IAChC,GAAG,KAAK,UAAU;AAElB,SAAK,eAAe,IAAI,UAAU,KAAK;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAiB,UAAiC;AAE9D,QAAI,KAAK,QAAQ,QAAQ,EAAE,YAAA,MAAkB,SAAS;AACpD;AAAA,IACF;AAEA,QAAI,YAA0B;AAE9B,aAAS,UAAU,GAAG,WAAW,KAAK,YAAY,WAAW;AAC3D,UAAI;AACF,cAAM,UAAU,MAAM,GAAG,SAAS,UAAU,OAAO;AACnD,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,aAAK,KAAK,UAAU,UAAU,MAAM;AACpC;AAAA,MACF,SAAS,OAAO;AACd,oBAAY;AACZ,YAAI,UAAU,KAAK,YAAY;AAC7B,gBAAM,KAAK,MAAM,KAAK,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AAEA,SAAK,KAAK,cAAc,UAAU,SAAS;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAA;AACb,WAAK,UAAU;AAAA,IACjB;AAGA,eAAW,SAAS,KAAK,eAAe,OAAA,GAAU;AAChD,mBAAa,KAAK;AAAA,IACpB;AACA,SAAK,eAAe,MAAA;AAEpB,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,UAAkB;AAChB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA0B;AACxB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AACF;"}
|
package/dist/server/index.d.ts
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import { Hono } from "hono";
|
|
2
|
+
import type { Config } from "../config/schema.js";
|
|
2
3
|
import type { Library } from "../core/library.js";
|
|
3
4
|
/**
|
|
4
5
|
* Create the main Hono server application.
|
|
5
6
|
* @param library - Library instance for the references API
|
|
7
|
+
* @param config - Configuration for the server
|
|
6
8
|
* @returns Hono application
|
|
7
9
|
*/
|
|
8
|
-
export declare function createServer(library: Library): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
10
|
+
export declare function createServer(library: Library, config: Config): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
9
11
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAQlD;;;;;GAKG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,8EA2B5D"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import type { Config } from "../../config/schema.js";
|
|
3
|
+
import type { Library } from "../../core/library.js";
|
|
4
|
+
/**
|
|
5
|
+
* Create add route for importing references.
|
|
6
|
+
* @param library - Library instance to use for operations
|
|
7
|
+
* @param config - Configuration with PubMed settings
|
|
8
|
+
* @returns Hono app with add route
|
|
9
|
+
*/
|
|
10
|
+
export declare function createAddRoute(library: Library, config: Config): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
11
|
+
//# sourceMappingURL=add.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"add.d.ts","sourceRoot":"","sources":["../../../src/server/routes/add.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAoBrD;;;;;GAKG;AACH,wBAAgB,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,8EAiD9D"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import type { Library } from "../../core/library.js";
|
|
3
|
+
/**
|
|
4
|
+
* Create cite route for generating citations.
|
|
5
|
+
* @param library - Library instance to use for operations
|
|
6
|
+
* @returns Hono app with cite route
|
|
7
|
+
*/
|
|
8
|
+
export declare function createCiteRoute(library: Library): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
9
|
+
//# sourceMappingURL=cite.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cite.d.ts","sourceRoot":"","sources":["../../../src/server/routes/cite.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAE5B,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAqCrD;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,OAAO,EAAE,OAAO,8EA2B/C"}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import type { Library } from "../../core/library.js";
|
|
4
|
+
/**
|
|
5
|
+
* Request body schema for list endpoint
|
|
6
|
+
*/
|
|
7
|
+
declare const listRequestBodySchema: z.ZodObject<{
|
|
8
|
+
format: z.ZodOptional<z.ZodEnum<{
|
|
9
|
+
uuid: "uuid";
|
|
10
|
+
json: "json";
|
|
11
|
+
bibtex: "bibtex";
|
|
12
|
+
pretty: "pretty";
|
|
13
|
+
"ids-only": "ids-only";
|
|
14
|
+
}>>;
|
|
15
|
+
}, z.core.$strip>;
|
|
16
|
+
/**
|
|
17
|
+
* Request body type for list endpoint
|
|
18
|
+
*/
|
|
19
|
+
export type ListRequestBody = z.infer<typeof listRequestBodySchema>;
|
|
20
|
+
/**
|
|
21
|
+
* Creates list route for HTTP server
|
|
22
|
+
*/
|
|
23
|
+
export declare function createListRoute(library: Library): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
24
|
+
export {};
|
|
25
|
+
//# sourceMappingURL=list.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"list.d.ts","sourceRoot":"","sources":["../../../src/server/routes/list.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAGrD;;GAEG;AACH,QAAA,MAAM,qBAAqB;;;;;;;;iBAEzB,CAAC;AAEH;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,qBAAqB,CAAC,CAAC;AAEpE;;GAEG;AACH,wBAAgB,eAAe,CAAC,OAAO,EAAE,OAAO,8EAkC/C"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"references.d.ts","sourceRoot":"","sources":["../../../src/server/routes/references.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;
|
|
1
|
+
{"version":3,"file":"references.d.ts","sourceRoot":"","sources":["../../../src/server/routes/references.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAIrD;;;;GAIG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,OAAO,8EAmKrD"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import type { Library } from "../../core/library.js";
|
|
4
|
+
/**
|
|
5
|
+
* Request body schema for search endpoint
|
|
6
|
+
*/
|
|
7
|
+
declare const searchRequestBodySchema: z.ZodObject<{
|
|
8
|
+
query: z.ZodString;
|
|
9
|
+
format: z.ZodOptional<z.ZodEnum<{
|
|
10
|
+
uuid: "uuid";
|
|
11
|
+
json: "json";
|
|
12
|
+
bibtex: "bibtex";
|
|
13
|
+
pretty: "pretty";
|
|
14
|
+
"ids-only": "ids-only";
|
|
15
|
+
}>>;
|
|
16
|
+
}, z.core.$strip>;
|
|
17
|
+
/**
|
|
18
|
+
* Request body type for search endpoint
|
|
19
|
+
*/
|
|
20
|
+
export type SearchRequestBody = z.infer<typeof searchRequestBodySchema>;
|
|
21
|
+
/**
|
|
22
|
+
* Creates search route for HTTP server
|
|
23
|
+
*/
|
|
24
|
+
export declare function createSearchRoute(library: Library): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
25
|
+
export {};
|
|
26
|
+
//# sourceMappingURL=search.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"search.d.ts","sourceRoot":"","sources":["../../../src/server/routes/search.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAGrD;;GAEG;AACH,QAAA,MAAM,uBAAuB;;;;;;;;;iBAG3B,CAAC;AAEH;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,uBAAuB,CAAC,CAAC;AAExE;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,8EAoCjD"}
|
package/dist/server.js
CHANGED
|
@@ -1,8 +1,116 @@
|
|
|
1
1
|
import { Hono } from "hono";
|
|
2
|
+
import { a as addReferences, c as citeReferences, l as listReferences, u as updateReference, r as removeReference, s as searchReferences } from "./chunks/search-Be9vzUIH.js";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
function buildPubmedConfig(config) {
|
|
5
|
+
const pubmedConfig = {};
|
|
6
|
+
if (config.pubmed.email !== void 0) {
|
|
7
|
+
pubmedConfig.email = config.pubmed.email;
|
|
8
|
+
}
|
|
9
|
+
if (config.pubmed.apiKey !== void 0) {
|
|
10
|
+
pubmedConfig.apiKey = config.pubmed.apiKey;
|
|
11
|
+
}
|
|
12
|
+
return pubmedConfig;
|
|
13
|
+
}
|
|
14
|
+
function createAddRoute(library, config) {
|
|
15
|
+
const route = new Hono();
|
|
16
|
+
route.post("/", async (c) => {
|
|
17
|
+
let body;
|
|
18
|
+
try {
|
|
19
|
+
body = await c.req.json();
|
|
20
|
+
} catch {
|
|
21
|
+
return c.json({ error: "Invalid JSON" }, 400);
|
|
22
|
+
}
|
|
23
|
+
if (!body || typeof body !== "object") {
|
|
24
|
+
return c.json({ error: "Request body must be an object" }, 400);
|
|
25
|
+
}
|
|
26
|
+
const { inputs, options } = body;
|
|
27
|
+
if (!inputs || !Array.isArray(inputs) || inputs.length === 0) {
|
|
28
|
+
return c.json({ error: "inputs must be a non-empty array of strings" }, 400);
|
|
29
|
+
}
|
|
30
|
+
if (!inputs.every((input) => typeof input === "string")) {
|
|
31
|
+
return c.json({ error: "All inputs must be strings" }, 400);
|
|
32
|
+
}
|
|
33
|
+
const addOptions = {
|
|
34
|
+
force: options?.force ?? false,
|
|
35
|
+
pubmedConfig: buildPubmedConfig(config)
|
|
36
|
+
};
|
|
37
|
+
if (options?.format) {
|
|
38
|
+
addOptions.format = options.format;
|
|
39
|
+
}
|
|
40
|
+
const result = await addReferences(inputs, library, addOptions);
|
|
41
|
+
return c.json(result);
|
|
42
|
+
});
|
|
43
|
+
return route;
|
|
44
|
+
}
|
|
45
|
+
const CiteRequestSchema = z.object({
|
|
46
|
+
identifiers: z.array(z.string()).min(1, "identifiers must be a non-empty array"),
|
|
47
|
+
byUuid: z.boolean().optional(),
|
|
48
|
+
inText: z.boolean().optional(),
|
|
49
|
+
style: z.string().optional(),
|
|
50
|
+
cslFile: z.string().optional(),
|
|
51
|
+
locale: z.string().optional(),
|
|
52
|
+
format: z.enum(["text", "html"]).optional()
|
|
53
|
+
});
|
|
54
|
+
function buildCiteOptions(body) {
|
|
55
|
+
return {
|
|
56
|
+
identifiers: body.identifiers,
|
|
57
|
+
...body.byUuid !== void 0 && { byUuid: body.byUuid },
|
|
58
|
+
...body.inText !== void 0 && { inText: body.inText },
|
|
59
|
+
...body.style !== void 0 && { style: body.style },
|
|
60
|
+
...body.cslFile !== void 0 && { cslFile: body.cslFile },
|
|
61
|
+
...body.locale !== void 0 && { locale: body.locale },
|
|
62
|
+
...body.format !== void 0 && { format: body.format }
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
function createCiteRoute(library) {
|
|
66
|
+
const route = new Hono();
|
|
67
|
+
route.post("/", async (c) => {
|
|
68
|
+
let rawBody;
|
|
69
|
+
try {
|
|
70
|
+
rawBody = await c.req.json();
|
|
71
|
+
} catch {
|
|
72
|
+
return c.json({ error: "Invalid JSON" }, 400);
|
|
73
|
+
}
|
|
74
|
+
const parseResult = CiteRequestSchema.safeParse(rawBody);
|
|
75
|
+
if (!parseResult.success) {
|
|
76
|
+
const errorMessage = parseResult.error.issues[0]?.message ?? "Invalid request body";
|
|
77
|
+
return c.json({ error: errorMessage }, 400);
|
|
78
|
+
}
|
|
79
|
+
const result = await citeReferences(library, buildCiteOptions(parseResult.data));
|
|
80
|
+
return c.json(result);
|
|
81
|
+
});
|
|
82
|
+
return route;
|
|
83
|
+
}
|
|
2
84
|
const healthRoute = new Hono();
|
|
3
85
|
healthRoute.get("/", (c) => {
|
|
4
86
|
return c.json({ status: "ok" });
|
|
5
87
|
});
|
|
88
|
+
const listRequestBodySchema = z.object({
|
|
89
|
+
format: z.enum(["pretty", "json", "bibtex", "ids-only", "uuid"]).optional()
|
|
90
|
+
});
|
|
91
|
+
function createListRoute(library) {
|
|
92
|
+
const route = new Hono();
|
|
93
|
+
route.post("/", async (c) => {
|
|
94
|
+
let body;
|
|
95
|
+
try {
|
|
96
|
+
body = await c.req.json();
|
|
97
|
+
} catch {
|
|
98
|
+
return c.json({ error: "Invalid JSON" }, 400);
|
|
99
|
+
}
|
|
100
|
+
const parseResult = listRequestBodySchema.safeParse(body);
|
|
101
|
+
if (!parseResult.success) {
|
|
102
|
+
return c.json({ error: "Request body must be an object" }, 400);
|
|
103
|
+
}
|
|
104
|
+
const requestBody = parseResult.data;
|
|
105
|
+
const options = {};
|
|
106
|
+
if (requestBody.format !== void 0) {
|
|
107
|
+
options.format = requestBody.format;
|
|
108
|
+
}
|
|
109
|
+
const result = listReferences(library, options);
|
|
110
|
+
return c.json(result);
|
|
111
|
+
});
|
|
112
|
+
return route;
|
|
113
|
+
}
|
|
6
114
|
function createReferencesRoute(library) {
|
|
7
115
|
const route = new Hono();
|
|
8
116
|
route.get("/", (c) => {
|
|
@@ -10,7 +118,7 @@ function createReferencesRoute(library) {
|
|
|
10
118
|
const items = references.map((ref) => ref.getItem());
|
|
11
119
|
return c.json(items);
|
|
12
120
|
});
|
|
13
|
-
route.get("/:uuid", (c) => {
|
|
121
|
+
route.get("/uuid/:uuid", (c) => {
|
|
14
122
|
const uuid = c.req.param("uuid");
|
|
15
123
|
const ref = library.findByUuid(uuid);
|
|
16
124
|
if (!ref) {
|
|
@@ -18,6 +126,14 @@ function createReferencesRoute(library) {
|
|
|
18
126
|
}
|
|
19
127
|
return c.json(ref.getItem());
|
|
20
128
|
});
|
|
129
|
+
route.get("/id/:id", (c) => {
|
|
130
|
+
const id = c.req.param("id");
|
|
131
|
+
const ref = library.findById(id);
|
|
132
|
+
if (!ref) {
|
|
133
|
+
return c.json({ error: "Reference not found" }, 404);
|
|
134
|
+
}
|
|
135
|
+
return c.json(ref.getItem());
|
|
136
|
+
});
|
|
21
137
|
route.post("/", async (c) => {
|
|
22
138
|
try {
|
|
23
139
|
const body = await c.req.json();
|
|
@@ -38,51 +154,118 @@ function createReferencesRoute(library) {
|
|
|
38
154
|
);
|
|
39
155
|
}
|
|
40
156
|
});
|
|
41
|
-
route.put("/:uuid", async (c) => {
|
|
157
|
+
route.put("/uuid/:uuid", async (c) => {
|
|
42
158
|
const uuid = c.req.param("uuid");
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
159
|
+
let body;
|
|
160
|
+
try {
|
|
161
|
+
body = await c.req.json();
|
|
162
|
+
} catch {
|
|
163
|
+
return c.json({ error: "Invalid JSON" }, 400);
|
|
164
|
+
}
|
|
165
|
+
if (!body || typeof body !== "object") {
|
|
166
|
+
return c.json({ error: "Request body must be an object" }, 400);
|
|
46
167
|
}
|
|
168
|
+
const result = await updateReference(library, {
|
|
169
|
+
identifier: uuid,
|
|
170
|
+
byUuid: true,
|
|
171
|
+
updates: body,
|
|
172
|
+
onIdCollision: "suffix"
|
|
173
|
+
});
|
|
174
|
+
if (!result.updated) {
|
|
175
|
+
const status = result.idCollision ? 409 : 404;
|
|
176
|
+
return c.json(result, status);
|
|
177
|
+
}
|
|
178
|
+
return c.json(result);
|
|
179
|
+
});
|
|
180
|
+
route.put("/id/:id", async (c) => {
|
|
181
|
+
const id = c.req.param("id");
|
|
182
|
+
let body;
|
|
47
183
|
try {
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
library.add(body);
|
|
55
|
-
const updatedRef = library.findByUuid(uuid);
|
|
56
|
-
if (!updatedRef) {
|
|
57
|
-
return c.json({ error: "Failed to update reference" }, 500);
|
|
58
|
-
}
|
|
59
|
-
return c.json(updatedRef.getItem());
|
|
60
|
-
} catch (error) {
|
|
61
|
-
return c.json(
|
|
62
|
-
{
|
|
63
|
-
error: "Invalid request body",
|
|
64
|
-
details: error instanceof Error ? error.message : String(error)
|
|
65
|
-
},
|
|
66
|
-
400
|
|
67
|
-
);
|
|
184
|
+
body = await c.req.json();
|
|
185
|
+
} catch {
|
|
186
|
+
return c.json({ error: "Invalid JSON" }, 400);
|
|
187
|
+
}
|
|
188
|
+
if (!body || typeof body !== "object") {
|
|
189
|
+
return c.json({ error: "Request body must be an object" }, 400);
|
|
68
190
|
}
|
|
191
|
+
const result = await updateReference(library, {
|
|
192
|
+
identifier: id,
|
|
193
|
+
byUuid: false,
|
|
194
|
+
updates: body,
|
|
195
|
+
onIdCollision: "suffix"
|
|
196
|
+
});
|
|
197
|
+
if (!result.updated) {
|
|
198
|
+
const status = result.idCollision ? 409 : 404;
|
|
199
|
+
return c.json(result, status);
|
|
200
|
+
}
|
|
201
|
+
return c.json(result);
|
|
69
202
|
});
|
|
70
|
-
route.delete("/:uuid", (c) => {
|
|
203
|
+
route.delete("/uuid/:uuid", async (c) => {
|
|
71
204
|
const uuid = c.req.param("uuid");
|
|
72
|
-
const
|
|
73
|
-
|
|
74
|
-
|
|
205
|
+
const result = await removeReference(library, {
|
|
206
|
+
identifier: uuid,
|
|
207
|
+
byUuid: true
|
|
208
|
+
});
|
|
209
|
+
if (!result.removed) {
|
|
210
|
+
return c.json(result, 404);
|
|
211
|
+
}
|
|
212
|
+
return c.json(result);
|
|
213
|
+
});
|
|
214
|
+
route.delete("/id/:id", async (c) => {
|
|
215
|
+
const id = c.req.param("id");
|
|
216
|
+
const result = await removeReference(library, {
|
|
217
|
+
identifier: id,
|
|
218
|
+
byUuid: false
|
|
219
|
+
});
|
|
220
|
+
if (!result.removed) {
|
|
221
|
+
return c.json(result, 404);
|
|
222
|
+
}
|
|
223
|
+
return c.json(result);
|
|
224
|
+
});
|
|
225
|
+
return route;
|
|
226
|
+
}
|
|
227
|
+
const searchRequestBodySchema = z.object({
|
|
228
|
+
query: z.string(),
|
|
229
|
+
format: z.enum(["pretty", "json", "bibtex", "ids-only", "uuid"]).optional()
|
|
230
|
+
});
|
|
231
|
+
function createSearchRoute(library) {
|
|
232
|
+
const route = new Hono();
|
|
233
|
+
route.post("/", async (c) => {
|
|
234
|
+
let body;
|
|
235
|
+
try {
|
|
236
|
+
body = await c.req.json();
|
|
237
|
+
} catch {
|
|
238
|
+
return c.json({ error: "Invalid JSON" }, 400);
|
|
239
|
+
}
|
|
240
|
+
const parseResult = searchRequestBodySchema.safeParse(body);
|
|
241
|
+
if (!parseResult.success) {
|
|
242
|
+
return c.json({ error: "Invalid request body" }, 400);
|
|
243
|
+
}
|
|
244
|
+
const requestBody = parseResult.data;
|
|
245
|
+
const options = {
|
|
246
|
+
query: requestBody.query
|
|
247
|
+
};
|
|
248
|
+
if (requestBody.format !== void 0) {
|
|
249
|
+
options.format = requestBody.format;
|
|
75
250
|
}
|
|
76
|
-
library
|
|
77
|
-
return c.
|
|
251
|
+
const result = searchReferences(library, options);
|
|
252
|
+
return c.json(result);
|
|
78
253
|
});
|
|
79
254
|
return route;
|
|
80
255
|
}
|
|
81
|
-
function createServer(library) {
|
|
256
|
+
function createServer(library, config) {
|
|
82
257
|
const app = new Hono();
|
|
83
258
|
app.route("/health", healthRoute);
|
|
84
259
|
const referencesRoute = createReferencesRoute(library);
|
|
85
260
|
app.route("/api/references", referencesRoute);
|
|
261
|
+
const addRoute = createAddRoute(library, config);
|
|
262
|
+
app.route("/api/add", addRoute);
|
|
263
|
+
const citeRoute = createCiteRoute(library);
|
|
264
|
+
app.route("/api/cite", citeRoute);
|
|
265
|
+
const listRoute = createListRoute(library);
|
|
266
|
+
app.route("/api/list", listRoute);
|
|
267
|
+
const searchRoute = createSearchRoute(library);
|
|
268
|
+
app.route("/api/search", searchRoute);
|
|
86
269
|
return app;
|
|
87
270
|
}
|
|
88
271
|
export {
|