@ncukondo/reference-manager 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +167 -0
- package/bin/reference-manager.js +5 -0
- package/dist/chunks/detector-BF8Mcc72.js +1415 -0
- package/dist/chunks/detector-BF8Mcc72.js.map +1 -0
- package/dist/cli/commands/add.d.ts +22 -0
- package/dist/cli/commands/add.d.ts.map +1 -0
- package/dist/cli/commands/index.d.ts +16 -0
- package/dist/cli/commands/index.d.ts.map +1 -0
- package/dist/cli/commands/list.d.ts +15 -0
- package/dist/cli/commands/list.d.ts.map +1 -0
- package/dist/cli/commands/remove.d.ts +19 -0
- package/dist/cli/commands/remove.d.ts.map +1 -0
- package/dist/cli/commands/search.d.ts +16 -0
- package/dist/cli/commands/search.d.ts.map +1 -0
- package/dist/cli/commands/server.d.ts +32 -0
- package/dist/cli/commands/server.d.ts.map +1 -0
- package/dist/cli/commands/update.d.ts +20 -0
- package/dist/cli/commands/update.d.ts.map +1 -0
- package/dist/cli/helpers.d.ts +61 -0
- package/dist/cli/helpers.d.ts.map +1 -0
- package/dist/cli/index.d.ts +13 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/output/bibtex.d.ts +6 -0
- package/dist/cli/output/bibtex.d.ts.map +1 -0
- package/dist/cli/output/index.d.ts +7 -0
- package/dist/cli/output/index.d.ts.map +1 -0
- package/dist/cli/output/json.d.ts +6 -0
- package/dist/cli/output/json.d.ts.map +1 -0
- package/dist/cli/output/pretty.d.ts +6 -0
- package/dist/cli/output/pretty.d.ts.map +1 -0
- package/dist/cli/server-client.d.ts +38 -0
- package/dist/cli/server-client.d.ts.map +1 -0
- package/dist/cli/server-detection.d.ts +27 -0
- package/dist/cli/server-detection.d.ts.map +1 -0
- package/dist/cli.js +981 -0
- package/dist/cli.js.map +1 -0
- package/dist/config/defaults.d.ts +29 -0
- package/dist/config/defaults.d.ts.map +1 -0
- package/dist/config/index.d.ts +10 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/loader.d.ts +27 -0
- package/dist/config/loader.d.ts.map +1 -0
- package/dist/config/schema.d.ts +129 -0
- package/dist/config/schema.d.ts.map +1 -0
- package/dist/core/csl-json/parser.d.ts +9 -0
- package/dist/core/csl-json/parser.d.ts.map +1 -0
- package/dist/core/csl-json/serializer.d.ts +15 -0
- package/dist/core/csl-json/serializer.d.ts.map +1 -0
- package/dist/core/csl-json/types.d.ts +124 -0
- package/dist/core/csl-json/types.d.ts.map +1 -0
- package/dist/core/csl-json/validator.d.ts +19 -0
- package/dist/core/csl-json/validator.d.ts.map +1 -0
- package/dist/core/identifier/generator.d.ts +17 -0
- package/dist/core/identifier/generator.d.ts.map +1 -0
- package/dist/core/identifier/normalize.d.ts +20 -0
- package/dist/core/identifier/normalize.d.ts.map +1 -0
- package/dist/core/identifier/uuid.d.ts +24 -0
- package/dist/core/identifier/uuid.d.ts.map +1 -0
- package/dist/core/index.d.ts +15 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/library.d.ts +73 -0
- package/dist/core/library.d.ts.map +1 -0
- package/dist/core/reference.d.ts +86 -0
- package/dist/core/reference.d.ts.map +1 -0
- package/dist/features/duplicate/detector.d.ts +19 -0
- package/dist/features/duplicate/detector.d.ts.map +1 -0
- package/dist/features/duplicate/index.d.ts +6 -0
- package/dist/features/duplicate/index.d.ts.map +1 -0
- package/dist/features/duplicate/types.d.ts +45 -0
- package/dist/features/duplicate/types.d.ts.map +1 -0
- package/dist/features/file-watcher/file-watcher.d.ts +83 -0
- package/dist/features/file-watcher/file-watcher.d.ts.map +1 -0
- package/dist/features/file-watcher/index.d.ts +2 -0
- package/dist/features/file-watcher/index.d.ts.map +1 -0
- package/dist/features/merge/index.d.ts +8 -0
- package/dist/features/merge/index.d.ts.map +1 -0
- package/dist/features/merge/three-way.d.ts +16 -0
- package/dist/features/merge/three-way.d.ts.map +1 -0
- package/dist/features/merge/types.d.ts +74 -0
- package/dist/features/merge/types.d.ts.map +1 -0
- package/dist/features/search/index.d.ts +9 -0
- package/dist/features/search/index.d.ts.map +1 -0
- package/dist/features/search/matcher.d.ts +18 -0
- package/dist/features/search/matcher.d.ts.map +1 -0
- package/dist/features/search/normalizer.d.ts +12 -0
- package/dist/features/search/normalizer.d.ts.map +1 -0
- package/dist/features/search/sorter.d.ts +11 -0
- package/dist/features/search/sorter.d.ts.map +1 -0
- package/dist/features/search/tokenizer.d.ts +6 -0
- package/dist/features/search/tokenizer.d.ts.map +1 -0
- package/dist/features/search/types.d.ts +77 -0
- package/dist/features/search/types.d.ts.map +1 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +559 -0
- package/dist/index.js.map +1 -0
- package/dist/server/index.d.ts +9 -0
- package/dist/server/index.d.ts.map +1 -0
- package/dist/server/portfile.d.ts +43 -0
- package/dist/server/portfile.d.ts.map +1 -0
- package/dist/server/routes/health.d.ts +7 -0
- package/dist/server/routes/health.d.ts.map +1 -0
- package/dist/server/routes/references.d.ts +9 -0
- package/dist/server/routes/references.d.ts.map +1 -0
- package/dist/server.js +91 -0
- package/dist/server.js.map +1 -0
- package/dist/utils/backup.d.ts +21 -0
- package/dist/utils/backup.d.ts.map +1 -0
- package/dist/utils/file.d.ts +9 -0
- package/dist/utils/file.d.ts.map +1 -0
- package/dist/utils/hash.d.ts +9 -0
- package/dist/utils/hash.d.ts.map +1 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/logger.d.ts +8 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/package.json +72 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../src/core/csl-json/validator.ts","../src/utils/logger.ts","../src/utils/file.ts","../src/utils/backup.ts","../src/features/merge/three-way.ts","../src/features/file-watcher/file-watcher.ts"],"sourcesContent":["import { CslItemSchema, CslLibrarySchema } from \"./types\";\nimport type { CslItem, CslLibrary } from \"./types\";\n\n/**\n * Validate CSL-JSON library structure\n * @param data - Data to validate (can be any type)\n * @returns Validated CSL-JSON library\n * @throws Error if validation fails\n */\nexport function validateCslJson(data: unknown): CslLibrary {\n const parseResult = CslLibrarySchema.safeParse(data);\n\n if (!parseResult.success) {\n throw new Error(`Invalid CSL-JSON structure: ${parseResult.error.message}`);\n }\n\n return parseResult.data;\n}\n\n/**\n * Validate a single CSL-JSON item\n * @param data - Data to validate (can be any type)\n * @returns Validation result with valid flag and errors\n */\nexport function validateCslItem(data: unknown): {\n valid: boolean;\n data?: CslItem;\n errors?: string[];\n} {\n const parseResult = CslItemSchema.safeParse(data);\n\n if (!parseResult.success) {\n return {\n valid: false,\n errors: parseResult.error.issues.map((issue) => issue.message),\n };\n }\n\n return {\n valid: true,\n data: parseResult.data,\n };\n}\n","export type LogLevel = \"silent\" | \"info\" | \"debug\";\n\nexport interface Logger {\n info(...args: unknown[]): void;\n debug(...args: unknown[]): void;\n error(...args: unknown[]): void;\n}\n\nexport function createLogger(level: LogLevel = \"info\"): Logger {\n const shouldLogInfo = level === \"info\" || level === \"debug\";\n const shouldLogDebug = level === \"debug\";\n\n function formatMessage(...args: unknown[]): string {\n return `${args.map((arg) => String(arg)).join(\" \")}\\n`;\n }\n\n return {\n info(...args: unknown[]): void {\n if (shouldLogInfo) {\n process.stderr.write(formatMessage(...args));\n }\n },\n\n debug(...args: unknown[]): void {\n if (shouldLogDebug) {\n process.stderr.write(formatMessage(...args));\n }\n },\n\n error(...args: unknown[]): void {\n process.stderr.write(formatMessage(...args));\n },\n };\n}\n","import { mkdir } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport writeFileAtomicLib from \"write-file-atomic\";\n\n/**\n * Write file atomically with parent directory creation\n */\nexport async function writeFileAtomic(filePath: string, content: string): Promise<void> {\n await ensureDirectoryExists(dirname(filePath));\n await writeFileAtomicLib(filePath, content, { encoding: \"utf-8\" });\n}\n\n/**\n * Ensure directory exists, creating it recursively if necessary\n */\nexport async function ensureDirectoryExists(dirPath: string): Promise<void> {\n await mkdir(dirPath, { recursive: true });\n}\n","import { existsSync } from \"node:fs\";\nimport { copyFile, readFile, readdir, stat, unlink } from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { ensureDirectoryExists } from \"./file\";\n\nexport interface BackupOptions {\n maxGenerations?: number;\n maxAgeMs?: number;\n}\n\nconst DEFAULT_MAX_GENERATIONS = 50;\nconst DEFAULT_MAX_AGE_MS = 365 * 24 * 60 * 60 * 1000; // 1 year\n\n/**\n * Get package name from package.json\n */\nasync function resolvePackageName(): Promise<string> {\n try {\n const currentFile = fileURLToPath(import.meta.url);\n let currentDir = dirname(currentFile);\n\n for (let i = 0; i < 10; i++) {\n const packageJsonPath = join(currentDir, \"package.json\");\n if (existsSync(packageJsonPath)) {\n const content = await readFile(packageJsonPath, \"utf-8\");\n const pkg = JSON.parse(content);\n return pkg.name;\n }\n currentDir = dirname(currentDir);\n }\n } catch {\n // Fall back to hardcoded name if package.json is not found\n }\n\n return \"reference-manager\";\n}\n\nconst packageName = await resolvePackageName();\n\n/**\n * Get backup directory path for a library\n */\nexport function getBackupDirectory(libraryName: string): string {\n const pkgName = packageName ?? \"reference-manager\";\n return join(tmpdir(), pkgName, \"backups\", libraryName);\n}\n\n/**\n * Create a backup of the given file\n */\nexport async function createBackup(sourceFile: string, libraryName: string): Promise<string> {\n const backupDir = getBackupDirectory(libraryName);\n await ensureDirectoryExists(backupDir);\n\n const timestamp = Date.now();\n const backupFileName = `${timestamp}.backup`;\n const backupPath = join(backupDir, backupFileName);\n\n await copyFile(sourceFile, backupPath);\n\n return backupPath;\n}\n\n/**\n * List all backups for a library (sorted by modification time, newest first)\n */\nexport async function listBackups(libraryName: string): Promise<string[]> {\n const backupDir = getBackupDirectory(libraryName);\n\n if (!existsSync(backupDir)) {\n return [];\n }\n\n const files = await readdir(backupDir);\n const backupFiles = files.filter((f) => f.endsWith(\".backup\")).map((f) => join(backupDir, f));\n\n const filesWithStats = await Promise.all(\n backupFiles.map(async (file) => {\n const stats = await stat(file);\n return { file, mtime: stats.mtimeMs };\n })\n );\n\n filesWithStats.sort((a, b) => b.mtime - a.mtime);\n\n return filesWithStats.map((f) => f.file);\n}\n\n/**\n * Clean up old backups based on generation count and age\n */\nexport async function cleanupOldBackups(\n libraryName: string,\n options?: BackupOptions\n): Promise<void> {\n const maxGenerations = options?.maxGenerations ?? DEFAULT_MAX_GENERATIONS;\n const maxAgeMs = options?.maxAgeMs ?? DEFAULT_MAX_AGE_MS;\n\n const backups = await listBackups(libraryName);\n const now = Date.now();\n\n const backupsToDelete: string[] = [];\n\n for (let i = 0; i < backups.length; i++) {\n const backupPath = backups[i];\n if (!backupPath) continue;\n\n const stats = await stat(backupPath);\n const age = now - stats.mtimeMs;\n\n if (i >= maxGenerations || age > maxAgeMs) {\n backupsToDelete.push(backupPath);\n }\n }\n\n await Promise.all(backupsToDelete.map((backup) => unlink(backup)));\n}\n","/**\n * 3-way merge implementation with Last-Write-Wins (LWW) strategy\n */\n\nimport type { CslItem } from \"../../core/csl-json/types.js\";\nimport type {\n FieldConflict,\n ItemConflict,\n MergeOptions,\n MergeResult,\n MergeStatus,\n} from \"./types.js\";\n\n/**\n * Get UUID from item, with fallback to id if uuid is missing\n */\nfunction getItemUuid(item: CslItem): string {\n return item.custom?.uuid || item.id || \"unknown\";\n}\n\n/**\n * Get timestamp from item, with fallback to created_at\n */\nfunction getTimestamp(item: CslItem): string {\n return item.custom?.timestamp || item.custom?.created_at || \"1970-01-01T00:00:00.000Z\";\n}\n\n/**\n * Deep equality check for field values\n */\nfunction deepEqual(a: unknown, b: unknown): boolean {\n return JSON.stringify(a) === JSON.stringify(b);\n}\n\n/**\n * Resolve a field conflict using LWW or prefer option\n */\nfunction resolveFieldConflict(\n localValue: unknown,\n remoteValue: unknown,\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): unknown {\n if (localTimestamp > remoteTimestamp) {\n return localValue;\n }\n if (remoteTimestamp > localTimestamp) {\n return remoteValue;\n }\n // Timestamps equal: use prefer option or default to local\n if (options?.prefer === \"remote\") {\n return remoteValue;\n }\n return localValue;\n}\n\n/**\n * Determine conflict resolution type\n */\nfunction determineResolution(\n fieldConflicts: FieldConflict[],\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): ItemConflict[\"resolution\"] {\n const hasRealConflicts = fieldConflicts.every((fc) => fc.resolved !== undefined);\n const localIsNewer = fieldConflicts.some(\n (fc) => fc.local !== fc.remote && localTimestamp > remoteTimestamp\n );\n const remoteIsNewer = fieldConflicts.some(\n (fc) => fc.local !== fc.remote && remoteTimestamp > localTimestamp\n );\n\n if (hasRealConflicts && localIsNewer) return \"auto-lww\";\n if (hasRealConflicts && remoteIsNewer) return \"auto-lww\";\n if (options?.prefer === \"local\") return \"prefer-local\";\n if (options?.prefer === \"remote\") return \"prefer-remote\";\n return \"unresolved\";\n}\n\n/**\n * Merge a single field from base, local, and remote versions\n */\nfunction mergeField(\n key: string,\n baseValue: unknown,\n localValue: unknown,\n remoteValue: unknown,\n localTimestamp: string,\n remoteTimestamp: string,\n options?: MergeOptions\n): { value: unknown; conflict: FieldConflict | null } {\n const localChanged = !deepEqual(baseValue, localValue);\n const remoteChanged = !deepEqual(baseValue, remoteValue);\n\n if (!localChanged && !remoteChanged) {\n return { value: baseValue, conflict: null };\n }\n\n if (localChanged && !remoteChanged) {\n return { value: localValue, conflict: null };\n }\n\n if (!localChanged && remoteChanged) {\n return { value: remoteValue, conflict: null };\n }\n\n // Both changed\n if (deepEqual(localValue, remoteValue)) {\n return { value: localValue, conflict: null };\n }\n\n // Both changed to different values\n const resolved = resolveFieldConflict(\n localValue,\n remoteValue,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n // Don't record conflicts for 'custom' metadata field\n if (key === \"custom\") {\n return { value: resolved, conflict: null };\n }\n\n return {\n value: resolved,\n conflict: {\n field: key,\n base: baseValue,\n local: localValue,\n remote: remoteValue,\n resolved,\n },\n };\n}\n\n/**\n * Merge a single item from base, local, and remote versions\n */\nfunction mergeItem(\n base: CslItem,\n local: CslItem,\n remote: CslItem,\n options?: MergeOptions\n): { merged: CslItem; conflict: ItemConflict | null } {\n const uuid = getItemUuid(base);\n const localTimestamp = getTimestamp(local);\n const remoteTimestamp = getTimestamp(remote);\n\n const merged: CslItem = { ...base };\n const fieldConflicts: FieldConflict[] = [];\n\n // Get all unique keys from all three versions\n const allKeys = new Set<string>([\n ...Object.keys(base),\n ...Object.keys(local),\n ...Object.keys(remote),\n ]);\n\n for (const key of allKeys) {\n const baseValue = (base as Record<string, unknown>)[key];\n const localValue = (local as Record<string, unknown>)[key];\n const remoteValue = (remote as Record<string, unknown>)[key];\n\n const { value, conflict } = mergeField(\n key,\n baseValue,\n localValue,\n remoteValue,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n (merged as Record<string, unknown>)[key] = value;\n\n if (conflict) {\n fieldConflicts.push(conflict);\n }\n }\n\n // If there are field conflicts, create ItemConflict\n if (fieldConflicts.length > 0) {\n const resolution = determineResolution(\n fieldConflicts,\n localTimestamp,\n remoteTimestamp,\n options\n );\n\n return {\n merged,\n conflict: {\n uuid,\n id: base.id || \"unknown\",\n fields: fieldConflicts,\n localTimestamp,\n remoteTimestamp,\n resolution,\n },\n };\n }\n\n return { merged, conflict: null };\n}\n\n/**\n * Build UUID-indexed maps from item arrays\n */\nfunction buildItemMaps(base: CslItem[], local: CslItem[], remote: CslItem[]) {\n const baseMap = new Map<string, CslItem>();\n const localMap = new Map<string, CslItem>();\n const remoteMap = new Map<string, CslItem>();\n\n for (const item of base) {\n baseMap.set(getItemUuid(item), item);\n }\n for (const item of local) {\n localMap.set(getItemUuid(item), item);\n }\n for (const item of remote) {\n remoteMap.set(getItemUuid(item), item);\n }\n\n return { baseMap, localMap, remoteMap };\n}\n\n/**\n * Handle items that exist in all three versions\n */\nfunction mergeExistingItem(\n baseItem: CslItem,\n localItem: CslItem,\n remoteItem: CslItem,\n options: MergeOptions | undefined,\n merged: CslItem[],\n conflicts: ItemConflict[]\n): void {\n const { merged: mergedItem, conflict } = mergeItem(baseItem, localItem, remoteItem, options);\n merged.push(mergedItem);\n if (conflict) {\n conflicts.push(conflict);\n }\n}\n\n/**\n * Handle items added in both local and remote\n */\nfunction handleDualAddition(\n uuid: string,\n localItem: CslItem,\n remoteItem: CslItem,\n options: MergeOptions | undefined,\n merged: CslItem[],\n conflicts: ItemConflict[]\n): void {\n if (deepEqual(localItem, remoteItem)) {\n merged.push(localItem);\n } else {\n const syntheticBase: CslItem = {\n id: uuid,\n type: \"article\",\n custom: {\n uuid,\n created_at: \"1970-01-01T00:00:00.000Z\",\n timestamp: \"1970-01-01T00:00:00.000Z\",\n },\n };\n const { merged: mergedItem, conflict } = mergeItem(\n syntheticBase,\n localItem,\n remoteItem,\n options\n );\n merged.push(mergedItem);\n if (conflict) {\n conflicts.push(conflict);\n }\n }\n}\n\n/**\n * Process a single UUID across all three versions\n */\nfunction processItem(\n uuid: string,\n baseMap: Map<string, CslItem>,\n localMap: Map<string, CslItem>,\n remoteMap: Map<string, CslItem>,\n options: MergeOptions | undefined,\n result: {\n merged: CslItem[];\n conflicts: ItemConflict[];\n localOnly: CslItem[];\n remoteOnly: CslItem[];\n deletedInLocal: CslItem[];\n deletedInRemote: CslItem[];\n }\n): void {\n const baseItem = baseMap.get(uuid);\n const localItem = localMap.get(uuid);\n const remoteItem = remoteMap.get(uuid);\n\n if (baseItem && localItem && remoteItem) {\n mergeExistingItem(baseItem, localItem, remoteItem, options, result.merged, result.conflicts);\n } else if (!baseItem && localItem && remoteItem) {\n handleDualAddition(uuid, localItem, remoteItem, options, result.merged, result.conflicts);\n } else if (!baseItem && localItem && !remoteItem) {\n result.merged.push(localItem);\n result.localOnly.push(localItem);\n } else if (!baseItem && !localItem && remoteItem) {\n result.merged.push(remoteItem);\n result.remoteOnly.push(remoteItem);\n } else if (baseItem && !localItem && remoteItem) {\n result.deletedInLocal.push(baseItem);\n } else if (baseItem && localItem && !remoteItem) {\n result.deletedInRemote.push(baseItem);\n } else if (baseItem && !localItem && !remoteItem) {\n result.deletedInLocal.push(baseItem);\n result.deletedInRemote.push(baseItem);\n }\n}\n\n/**\n * Performs a 3-way merge of CSL-JSON items\n *\n * @param base - Base version (common ancestor)\n * @param local - Local version (current working copy)\n * @param remote - Remote version (incoming changes)\n * @param options - Merge options (e.g., prefer local/remote for tie-breaking)\n * @returns Merge result with merged items and conflict information\n */\nexport function threeWayMerge(\n base: CslItem[],\n local: CslItem[],\n remote: CslItem[],\n options?: MergeOptions\n): MergeResult {\n const { baseMap, localMap, remoteMap } = buildItemMaps(base, local, remote);\n\n const result = {\n merged: [] as CslItem[],\n conflicts: [] as ItemConflict[],\n localOnly: [] as CslItem[],\n remoteOnly: [] as CslItem[],\n deletedInLocal: [] as CslItem[],\n deletedInRemote: [] as CslItem[],\n };\n\n const allUuids = new Set<string>([...baseMap.keys(), ...localMap.keys(), ...remoteMap.keys()]);\n\n for (const uuid of allUuids) {\n processItem(uuid, baseMap, localMap, remoteMap, options, result);\n }\n\n // Determine overall status\n let status: MergeStatus = \"success\";\n if (result.conflicts.length > 0) {\n const hasUnresolved = result.conflicts.some((c) => c.resolution === \"unresolved\");\n status = hasUnresolved ? \"conflict\" : \"auto-resolved\";\n }\n\n return {\n status,\n ...result,\n };\n}\n","import { EventEmitter } from \"node:events\";\nimport * as fs from \"node:fs/promises\";\nimport * as path from \"node:path\";\nimport chokidar, { type FSWatcher } from \"chokidar\";\n\n/**\n * Options for FileWatcher\n */\nexport interface FileWatcherOptions {\n /** Debounce time in milliseconds (default: 500) */\n debounceMs?: number;\n /** Poll interval in milliseconds for polling mode (default: 5000) */\n pollIntervalMs?: number;\n /** Use polling instead of native file system events */\n usePolling?: boolean;\n /** Retry delay in milliseconds for JSON parse (default: 200) */\n retryDelayMs?: number;\n /** Maximum number of retries for JSON parse (default: 10) */\n maxRetries?: number;\n}\n\n// Default values from spec\nconst DEFAULT_DEBOUNCE_MS = 500;\nconst DEFAULT_POLL_INTERVAL_MS = 5000;\nconst DEFAULT_RETRY_DELAY_MS = 200;\nconst DEFAULT_MAX_RETRIES = 10;\n\n/**\n * Check if a file should be ignored based on spec patterns\n * Ignored patterns:\n * - *.tmp\n * - *.bak\n * - *.conflict.*\n * - *.lock\n * - editor swap files (.swp, ~)\n */\nfunction shouldIgnore(filePath: string): boolean {\n const basename = path.basename(filePath);\n\n // *.tmp files\n if (basename.endsWith(\".tmp\")) return true;\n\n // *.bak files\n if (basename.endsWith(\".bak\")) return true;\n\n // *.conflict.* files (contains .conflict. in name)\n if (basename.includes(\".conflict.\")) return true;\n\n // *.lock files\n if (basename.endsWith(\".lock\")) return true;\n\n // Vim swap files (.*.swp)\n if (basename.startsWith(\".\") && basename.endsWith(\".swp\")) return true;\n\n // Editor backup files (*~)\n if (basename.endsWith(\"~\")) return true;\n\n return false;\n}\n\n/**\n * FileWatcher watches a file or directory for changes and emits events.\n *\n * Events:\n * - 'change': Emitted when a watched file changes (after debounce)\n * - 'error': Emitted when a watch error occurs\n * - 'ready': Emitted when watching has started\n * - 'parsed': Emitted when JSON file is successfully parsed\n * - 'parseError': Emitted when JSON parse fails after all retries\n */\nexport class FileWatcher extends EventEmitter {\n private readonly watchPath: string;\n private readonly debounceMs: number;\n private readonly pollIntervalMs: number;\n private readonly usePolling: boolean;\n private readonly retryDelayMs: number;\n private readonly maxRetries: number;\n\n private watcher: FSWatcher | null = null;\n private watching = false;\n private debounceTimers: Map<string, NodeJS.Timeout> = new Map();\n\n constructor(watchPath: string, options?: FileWatcherOptions) {\n super();\n this.watchPath = watchPath;\n this.debounceMs = options?.debounceMs ?? DEFAULT_DEBOUNCE_MS;\n this.pollIntervalMs = options?.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;\n this.usePolling = options?.usePolling ?? false;\n this.retryDelayMs = options?.retryDelayMs ?? DEFAULT_RETRY_DELAY_MS;\n this.maxRetries = options?.maxRetries ?? DEFAULT_MAX_RETRIES;\n }\n\n /**\n * Start watching for file changes\n */\n async start(): Promise<void> {\n if (this.watching) {\n return;\n }\n\n return new Promise((resolve, reject) => {\n this.watcher = chokidar.watch(this.watchPath, {\n ignored: shouldIgnore,\n persistent: true,\n usePolling: this.usePolling,\n interval: this.pollIntervalMs,\n ignoreInitial: true,\n awaitWriteFinish: false,\n });\n\n this.watcher.on(\"ready\", () => {\n this.watching = true;\n this.emit(\"ready\");\n resolve();\n });\n\n this.watcher.on(\"error\", (error: unknown) => {\n this.emit(\"error\", error);\n if (!this.watching) {\n reject(error);\n }\n });\n\n this.watcher.on(\"change\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n\n this.watcher.on(\"add\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n });\n }\n\n /**\n * Handle file change with debouncing\n */\n private handleFileChange(filePath: string): void {\n // Clear existing timer for this file\n const existingTimer = this.debounceTimers.get(filePath);\n if (existingTimer) {\n clearTimeout(existingTimer);\n }\n\n // Set new debounced timer\n const timer = setTimeout(() => {\n this.debounceTimers.delete(filePath);\n this.emit(\"change\", filePath);\n this.tryParseJsonFile(filePath);\n }, this.debounceMs);\n\n this.debounceTimers.set(filePath, timer);\n }\n\n /**\n * Try to parse JSON file with retries\n */\n private async tryParseJsonFile(filePath: string): Promise<void> {\n // Only parse .json files\n if (path.extname(filePath).toLowerCase() !== \".json\") {\n return;\n }\n\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt <= this.maxRetries; attempt++) {\n try {\n const content = await fs.readFile(filePath, \"utf-8\");\n const parsed = JSON.parse(content);\n this.emit(\"parsed\", filePath, parsed);\n return;\n } catch (error) {\n lastError = error as Error;\n if (attempt < this.maxRetries) {\n await this.delay(this.retryDelayMs);\n }\n }\n }\n\n this.emit(\"parseError\", filePath, lastError);\n }\n\n /**\n * Delay helper\n */\n private delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n\n /**\n * Stop watching for file changes\n */\n close(): void {\n if (this.watcher) {\n this.watcher.close();\n this.watcher = null;\n }\n\n // Clear all debounce timers\n for (const timer of this.debounceTimers.values()) {\n clearTimeout(timer);\n }\n this.debounceTimers.clear();\n\n this.watching = false;\n }\n\n /**\n * Get the watched path\n */\n getPath(): string {\n return this.watchPath;\n }\n\n /**\n * Check if the watcher is currently active\n */\n isWatching(): boolean {\n return this.watching;\n }\n\n /**\n * Get the debounce time in milliseconds\n */\n getDebounceMs(): number {\n return this.debounceMs;\n }\n\n /**\n * Get the poll interval in milliseconds\n */\n getPollIntervalMs(): number {\n return this.pollIntervalMs;\n }\n\n /**\n * Get the retry delay in milliseconds\n */\n getRetryDelayMs(): number {\n return this.retryDelayMs;\n }\n\n /**\n * Get the maximum number of retries\n */\n getMaxRetries(): number {\n return this.maxRetries;\n }\n}\n"],"names":[],"mappings":";;;;;;;;;;;;AASO,SAAS,gBAAgB,MAA2B;AACzD,QAAM,cAAc,iBAAiB,UAAU,IAAI;AAEnD,MAAI,CAAC,YAAY,SAAS;AACxB,UAAM,IAAI,MAAM,+BAA+B,YAAY,MAAM,OAAO,EAAE;AAAA,EAC5E;AAEA,SAAO,YAAY;AACrB;ACTO,SAAS,aAAa,QAAkB,QAAgB;AAC7D,QAAM,gBAAgB,UAAU,UAAU,UAAU;AACpD,QAAM,iBAAiB,UAAU;AAEjC,WAAS,iBAAiB,MAAyB;AACjD,WAAO,GAAG,KAAK,IAAI,CAAC,QAAQ,OAAO,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,EACpD;AAEA,SAAO;AAAA,IACL,QAAQ,MAAuB;AAC7B,UAAI,eAAe;AACjB,gBAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IAEA,SAAS,MAAuB;AAC9B,UAAI,gBAAgB;AAClB,gBAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,MAC7C;AAAA,IACF;AAAA,IAEA,SAAS,MAAuB;AAC9B,cAAQ,OAAO,MAAM,cAAc,GAAG,IAAI,CAAC;AAAA,IAC7C;AAAA,EAAA;AAEJ;AC1BA,eAAsB,gBAAgB,UAAkB,SAAgC;AACtF,QAAM,sBAAsB,QAAQ,QAAQ,CAAC;AAC7C,QAAM,mBAAmB,UAAU,SAAS,EAAE,UAAU,SAAS;AACnE;AAKA,eAAsB,sBAAsB,SAAgC;AAC1E,QAAM,MAAM,SAAS,EAAE,WAAW,MAAM;AAC1C;ACLA,MAAM,0BAA0B;AAChC,MAAM,qBAAqB,MAAM,KAAK,KAAK,KAAK;AAKhD,eAAe,qBAAsC;AACnD,MAAI;AACF,UAAM,cAAc,cAAc,YAAY,GAAG;AACjD,QAAI,aAAa,QAAQ,WAAW;AAEpC,aAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AAC3B,YAAM,kBAAkB,KAAK,YAAY,cAAc;AACvD,UAAI,WAAW,eAAe,GAAG;AAC/B,cAAM,UAAU,MAAM,SAAS,iBAAiB,OAAO;AACvD,cAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,eAAO,IAAI;AAAA,MACb;AACA,mBAAa,QAAQ,UAAU;AAAA,IACjC;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEA,MAAM,cAAc,MAAM,mBAAA;AAKnB,SAAS,mBAAmB,aAA6B;AAC9D,QAAM,UAAU,eAAe;AAC/B,SAAO,KAAK,OAAA,GAAU,SAAS,WAAW,WAAW;AACvD;AAKA,eAAsB,aAAa,YAAoB,aAAsC;AAC3F,QAAM,YAAY,mBAAmB,WAAW;AAChD,QAAM,sBAAsB,SAAS;AAErC,QAAM,YAAY,KAAK,IAAA;AACvB,QAAM,iBAAiB,GAAG,SAAS;AACnC,QAAM,aAAa,KAAK,WAAW,cAAc;AAEjD,QAAM,SAAS,YAAY,UAAU;AAErC,SAAO;AACT;AAKA,eAAsB,YAAY,aAAwC;AACxE,QAAM,YAAY,mBAAmB,WAAW;AAEhD,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO,CAAA;AAAA,EACT;AAEA,QAAM,QAAQ,MAAM,QAAQ,SAAS;AACrC,QAAM,cAAc,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,WAAW,CAAC,CAAC;AAE5F,QAAM,iBAAiB,MAAM,QAAQ;AAAA,IACnC,YAAY,IAAI,OAAO,SAAS;AAC9B,YAAM,QAAQ,MAAM,KAAK,IAAI;AAC7B,aAAO,EAAE,MAAM,OAAO,MAAM,QAAA;AAAA,IAC9B,CAAC;AAAA,EAAA;AAGH,iBAAe,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAE/C,SAAO,eAAe,IAAI,CAAC,MAAM,EAAE,IAAI;AACzC;AAKA,eAAsB,kBACpB,aACA,SACe;AACf,QAAM,iBAAiB,SAAS,kBAAkB;AAClD,QAAM,WAAW,SAAS,YAAY;AAEtC,QAAM,UAAU,MAAM,YAAY,WAAW;AAC7C,QAAM,MAAM,KAAK,IAAA;AAEjB,QAAM,kBAA4B,CAAA;AAElC,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,aAAa,QAAQ,CAAC;AAC5B,QAAI,CAAC,WAAY;AAEjB,UAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,UAAM,MAAM,MAAM,MAAM;AAExB,QAAI,KAAK,kBAAkB,MAAM,UAAU;AACzC,sBAAgB,KAAK,UAAU;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,gBAAgB,IAAI,CAAC,WAAW,OAAO,MAAM,CAAC,CAAC;AACnE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtGA,SAAS,YAAY,MAAuB;AAC1C,SAAO,KAAK,QAAQ,QAAQ,KAAK,MAAM;AACzC;AAKA,SAAS,aAAa,MAAuB;AAC3C,SAAO,KAAK,QAAQ,aAAa,KAAK,QAAQ,cAAc;AAC9D;AAKA,SAAS,UAAU,GAAY,GAAqB;AAClD,SAAO,KAAK,UAAU,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C;AAKA,SAAS,qBACP,YACA,aACA,gBACA,iBACA,SACS;AACT,MAAI,iBAAiB,iBAAiB;AACpC,WAAO;AAAA,EACT;AACA,MAAI,kBAAkB,gBAAgB;AACpC,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,WAAW,UAAU;AAChC,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAKA,SAAS,oBACP,gBACA,gBACA,iBACA,SAC4B;AAC5B,QAAM,mBAAmB,eAAe,MAAM,CAAC,OAAO,GAAG,aAAa,MAAS;AAC/E,QAAM,eAAe,eAAe;AAAA,IAClC,CAAC,OAAO,GAAG,UAAU,GAAG,UAAU,iBAAiB;AAAA,EAAA;AAErD,QAAM,gBAAgB,eAAe;AAAA,IACnC,CAAC,OAAO,GAAG,UAAU,GAAG,UAAU,kBAAkB;AAAA,EAAA;AAGtD,MAAI,oBAAoB,aAAc,QAAO;AAC7C,MAAI,oBAAoB,cAAe,QAAO;AAC9C,MAAI,SAAS,WAAW,QAAS,QAAO;AACxC,MAAI,SAAS,WAAW,SAAU,QAAO;AACzC,SAAO;AACT;AAKA,SAAS,WACP,KACA,WACA,YACA,aACA,gBACA,iBACA,SACoD;AACpD,QAAM,eAAe,CAAC,UAAU,WAAW,UAAU;AACrD,QAAM,gBAAgB,CAAC,UAAU,WAAW,WAAW;AAEvD,MAAI,CAAC,gBAAgB,CAAC,eAAe;AACnC,WAAO,EAAE,OAAO,WAAW,UAAU,KAAA;AAAA,EACvC;AAEA,MAAI,gBAAgB,CAAC,eAAe;AAClC,WAAO,EAAE,OAAO,YAAY,UAAU,KAAA;AAAA,EACxC;AAEA,MAAI,CAAC,gBAAgB,eAAe;AAClC,WAAO,EAAE,OAAO,aAAa,UAAU,KAAA;AAAA,EACzC;AAGA,MAAI,UAAU,YAAY,WAAW,GAAG;AACtC,WAAO,EAAE,OAAO,YAAY,UAAU,KAAA;AAAA,EACxC;AAGA,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAIF,MAAI,QAAQ,UAAU;AACpB,WAAO,EAAE,OAAO,UAAU,UAAU,KAAA;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,OAAO;AAAA,IACP,UAAU;AAAA,MACR,OAAO;AAAA,MACP,MAAM;AAAA,MACN,OAAO;AAAA,MACP,QAAQ;AAAA,MACR;AAAA,IAAA;AAAA,EACF;AAEJ;AAKA,SAAS,UACP,MACA,OACA,QACA,SACoD;AACpD,QAAM,OAAO,YAAY,IAAI;AAC7B,QAAM,iBAAiB,aAAa,KAAK;AACzC,QAAM,kBAAkB,aAAa,MAAM;AAE3C,QAAM,SAAkB,EAAE,GAAG,KAAA;AAC7B,QAAM,iBAAkC,CAAA;AAGxC,QAAM,8BAAc,IAAY;AAAA,IAC9B,GAAG,OAAO,KAAK,IAAI;AAAA,IACnB,GAAG,OAAO,KAAK,KAAK;AAAA,IACpB,GAAG,OAAO,KAAK,MAAM;AAAA,EAAA,CACtB;AAED,aAAW,OAAO,SAAS;AACzB,UAAM,YAAa,KAAiC,GAAG;AACvD,UAAM,aAAc,MAAkC,GAAG;AACzD,UAAM,cAAe,OAAmC,GAAG;AAE3D,UAAM,EAAE,OAAO,SAAA,IAAa;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAGD,WAAmC,GAAG,IAAI;AAE3C,QAAI,UAAU;AACZ,qBAAe,KAAK,QAAQ;AAAA,IAC9B;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAGF,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,QACR;AAAA,QACA,IAAI,KAAK,MAAM;AAAA,QACf,QAAQ;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ;AAEA,SAAO,EAAE,QAAQ,UAAU,KAAA;AAC7B;AAKA,SAAS,cAAc,MAAiB,OAAkB,QAAmB;AAC3E,QAAM,8BAAc,IAAA;AACpB,QAAM,+BAAe,IAAA;AACrB,QAAM,gCAAgB,IAAA;AAEtB,aAAW,QAAQ,MAAM;AACvB,YAAQ,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACrC;AACA,aAAW,QAAQ,OAAO;AACxB,aAAS,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACtC;AACA,aAAW,QAAQ,QAAQ;AACzB,cAAU,IAAI,YAAY,IAAI,GAAG,IAAI;AAAA,EACvC;AAEA,SAAO,EAAE,SAAS,UAAU,UAAA;AAC9B;AAKA,SAAS,kBACP,UACA,WACA,YACA,SACA,QACA,WACM;AACN,QAAM,EAAE,QAAQ,YAAY,SAAA,IAAa,UAAU,UAAU,WAAW,YAAY,OAAO;AAC3F,SAAO,KAAK,UAAU;AACtB,MAAI,UAAU;AACZ,cAAU,KAAK,QAAQ;AAAA,EACzB;AACF;AAKA,SAAS,mBACP,MACA,WACA,YACA,SACA,QACA,WACM;AACN,MAAI,UAAU,WAAW,UAAU,GAAG;AACpC,WAAO,KAAK,SAAS;AAAA,EACvB,OAAO;AACL,UAAM,gBAAyB;AAAA,MAC7B,IAAI;AAAA,MACJ,MAAM;AAAA,MACN,QAAQ;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,MAAA;AAAA,IACb;AAEF,UAAM,EAAE,QAAQ,YAAY,SAAA,IAAa;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAEF,WAAO,KAAK,UAAU;AACtB,QAAI,UAAU;AACZ,gBAAU,KAAK,QAAQ;AAAA,IACzB;AAAA,EACF;AACF;AAKA,SAAS,YACP,MACA,SACA,UACA,WACA,SACA,QAQM;AACN,QAAM,WAAW,QAAQ,IAAI,IAAI;AACjC,QAAM,YAAY,SAAS,IAAI,IAAI;AACnC,QAAM,aAAa,UAAU,IAAI,IAAI;AAErC,MAAI,YAAY,aAAa,YAAY;AACvC,sBAAkB,UAAU,WAAW,YAAY,SAAS,OAAO,QAAQ,OAAO,SAAS;AAAA,EAC7F,WAAW,CAAC,YAAY,aAAa,YAAY;AAC/C,uBAAmB,MAAM,WAAW,YAAY,SAAS,OAAO,QAAQ,OAAO,SAAS;AAAA,EAC1F,WAAW,CAAC,YAAY,aAAa,CAAC,YAAY;AAChD,WAAO,OAAO,KAAK,SAAS;AAC5B,WAAO,UAAU,KAAK,SAAS;AAAA,EACjC,WAAW,CAAC,YAAY,CAAC,aAAa,YAAY;AAChD,WAAO,OAAO,KAAK,UAAU;AAC7B,WAAO,WAAW,KAAK,UAAU;AAAA,EACnC,WAAW,YAAY,CAAC,aAAa,YAAY;AAC/C,WAAO,eAAe,KAAK,QAAQ;AAAA,EACrC,WAAW,YAAY,aAAa,CAAC,YAAY;AAC/C,WAAO,gBAAgB,KAAK,QAAQ;AAAA,EACtC,WAAW,YAAY,CAAC,aAAa,CAAC,YAAY;AAChD,WAAO,eAAe,KAAK,QAAQ;AACnC,WAAO,gBAAgB,KAAK,QAAQ;AAAA,EACtC;AACF;AAWO,SAAS,cACd,MACA,OACA,QACA,SACa;AACb,QAAM,EAAE,SAAS,UAAU,UAAA,IAAc,cAAc,MAAM,OAAO,MAAM;AAE1E,QAAM,SAAS;AAAA,IACb,QAAQ,CAAA;AAAA,IACR,WAAW,CAAA;AAAA,IACX,WAAW,CAAA;AAAA,IACX,YAAY,CAAA;AAAA,IACZ,gBAAgB,CAAA;AAAA,IAChB,iBAAiB,CAAA;AAAA,EAAC;AAGpB,QAAM,WAAW,oBAAI,IAAY,CAAC,GAAG,QAAQ,KAAA,GAAQ,GAAG,SAAS,QAAQ,GAAG,UAAU,KAAA,CAAM,CAAC;AAE7F,aAAW,QAAQ,UAAU;AAC3B,gBAAY,MAAM,SAAS,UAAU,WAAW,SAAS,MAAM;AAAA,EACjE;AAGA,MAAI,SAAsB;AAC1B,MAAI,OAAO,UAAU,SAAS,GAAG;AAC/B,UAAM,gBAAgB,OAAO,UAAU,KAAK,CAAC,MAAM,EAAE,eAAe,YAAY;AAChF,aAAS,gBAAgB,aAAa;AAAA,EACxC;AAEA,SAAO;AAAA,IACL;AAAA,IACA,GAAG;AAAA,EAAA;AAEP;;;;;AC3VA,MAAM,sBAAsB;AAC5B,MAAM,2BAA2B;AACjC,MAAM,yBAAyB;AAC/B,MAAM,sBAAsB;AAW5B,SAAS,aAAa,UAA2B;AAC/C,QAAM,WAAW,KAAK,SAAS,QAAQ;AAGvC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,YAAY,EAAG,QAAO;AAG5C,MAAI,SAAS,SAAS,OAAO,EAAG,QAAO;AAGvC,MAAI,SAAS,WAAW,GAAG,KAAK,SAAS,SAAS,MAAM,EAAG,QAAO;AAGlE,MAAI,SAAS,SAAS,GAAG,EAAG,QAAO;AAEnC,SAAO;AACT;AAYO,MAAM,oBAAoB,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAA4B;AAAA,EAC5B,WAAW;AAAA,EACX,qCAAkD,IAAA;AAAA,EAE1D,YAAY,WAAmB,SAA8B;AAC3D,UAAA;AACA,SAAK,YAAY;AACjB,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,iBAAiB,SAAS,kBAAkB;AACjD,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,eAAe,SAAS,gBAAgB;AAC7C,SAAK,aAAa,SAAS,cAAc;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI,KAAK,UAAU;AACjB;AAAA,IACF;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,UAAU,SAAS,MAAM,KAAK,WAAW;AAAA,QAC5C,SAAS;AAAA,QACT,YAAY;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,eAAe;AAAA,QACf,kBAAkB;AAAA,MAAA,CACnB;AAED,WAAK,QAAQ,GAAG,SAAS,MAAM;AAC7B,aAAK,WAAW;AAChB,aAAK,KAAK,OAAO;AACjB,gBAAA;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,SAAS,CAAC,UAAmB;AAC3C,aAAK,KAAK,SAAS,KAAK;AACxB,YAAI,CAAC,KAAK,UAAU;AAClB,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,UAAU,CAAC,aAAqB;AAC9C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAED,WAAK,QAAQ,GAAG,OAAO,CAAC,aAAqB;AAC3C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAwB;AAE/C,UAAM,gBAAgB,KAAK,eAAe,IAAI,QAAQ;AACtD,QAAI,eAAe;AACjB,mBAAa,aAAa;AAAA,IAC5B;AAGA,UAAM,QAAQ,WAAW,MAAM;AAC7B,WAAK,eAAe,OAAO,QAAQ;AACnC,WAAK,KAAK,UAAU,QAAQ;AAC5B,WAAK,iBAAiB,QAAQ;AAAA,IAChC,GAAG,KAAK,UAAU;AAElB,SAAK,eAAe,IAAI,UAAU,KAAK;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAiB,UAAiC;AAE9D,QAAI,KAAK,QAAQ,QAAQ,EAAE,YAAA,MAAkB,SAAS;AACpD;AAAA,IACF;AAEA,QAAI,YAA0B;AAE9B,aAAS,UAAU,GAAG,WAAW,KAAK,YAAY,WAAW;AAC3D,UAAI;AACF,cAAM,UAAU,MAAM,GAAG,SAAS,UAAU,OAAO;AACnD,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,aAAK,KAAK,UAAU,UAAU,MAAM;AACpC;AAAA,MACF,SAAS,OAAO;AACd,oBAAY;AACZ,YAAI,UAAU,KAAK,YAAY;AAC7B,gBAAM,KAAK,MAAM,KAAK,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AAEA,SAAK,KAAK,cAAc,UAAU,SAAS;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAA;AACb,WAAK,UAAU;AAAA,IACjB;AAGA,eAAW,SAAS,KAAK,eAAe,OAAA,GAAU;AAChD,mBAAa,KAAK;AAAA,IACpB;AACA,SAAK,eAAe,MAAA;AAEpB,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,UAAkB;AAChB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA0B;AACxB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AACF;"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import type { Library } from "../core/library.js";
|
|
3
|
+
/**
|
|
4
|
+
* Create the main Hono server application.
|
|
5
|
+
* @param library - Library instance for the references API
|
|
6
|
+
* @returns Hono application
|
|
7
|
+
*/
|
|
8
|
+
export declare function createServer(library: Library): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
9
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAIlD;;;;GAIG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,8EAW5C"}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get the default portfile path.
|
|
3
|
+
* @returns The path to the portfile in the system's temp directory.
|
|
4
|
+
*/
|
|
5
|
+
export declare function getPortfilePath(): string;
|
|
6
|
+
/**
|
|
7
|
+
* Write port, PID, library path, and optionally started_at to the portfile.
|
|
8
|
+
* @param portfilePath - Path to the portfile
|
|
9
|
+
* @param port - Server port number
|
|
10
|
+
* @param pid - Server process ID
|
|
11
|
+
* @param library - Path to the library file
|
|
12
|
+
* @param started_at - Optional ISO 8601 timestamp of when the server started
|
|
13
|
+
*/
|
|
14
|
+
export declare function writePortfile(portfilePath: string, port: number, pid: number, library: string, started_at?: string): Promise<void>;
|
|
15
|
+
/**
|
|
16
|
+
* Read port, PID, library, and optionally started_at from the portfile.
|
|
17
|
+
* @param portfilePath - Path to the portfile
|
|
18
|
+
* @returns Object with port, pid, library (if present), and started_at (if present), or null if file doesn't exist or is invalid
|
|
19
|
+
*/
|
|
20
|
+
export declare function readPortfile(portfilePath: string): Promise<{
|
|
21
|
+
port: number;
|
|
22
|
+
pid: number;
|
|
23
|
+
library?: string;
|
|
24
|
+
started_at?: string;
|
|
25
|
+
} | null>;
|
|
26
|
+
/**
|
|
27
|
+
* Check if the portfile exists.
|
|
28
|
+
* @param portfilePath - Path to the portfile
|
|
29
|
+
* @returns True if portfile exists, false otherwise
|
|
30
|
+
*/
|
|
31
|
+
export declare function portfileExists(portfilePath: string): Promise<boolean>;
|
|
32
|
+
/**
|
|
33
|
+
* Remove the portfile.
|
|
34
|
+
* @param portfilePath - Path to the portfile
|
|
35
|
+
*/
|
|
36
|
+
export declare function removePortfile(portfilePath: string): Promise<void>;
|
|
37
|
+
/**
|
|
38
|
+
* Check if a process with the given PID is running.
|
|
39
|
+
* @param pid - Process ID to check
|
|
40
|
+
* @returns True if process is running, false otherwise
|
|
41
|
+
*/
|
|
42
|
+
export declare function isProcessRunning(pid: number): boolean;
|
|
43
|
+
//# sourceMappingURL=portfile.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"portfile.d.ts","sourceRoot":"","sources":["../../src/server/portfile.ts"],"names":[],"mappings":"AAIA;;;GAGG;AACH,wBAAgB,eAAe,IAAI,MAAM,CAGxC;AAED;;;;;;;GAOG;AACH,wBAAsB,aAAa,CACjC,YAAY,EAAE,MAAM,EACpB,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,MAAM,EACX,OAAO,EAAE,MAAM,EACf,UAAU,CAAC,EAAE,MAAM,GAClB,OAAO,CAAC,IAAI,CAAC,CAYf;AAED;;;;GAIG;AACH,wBAAsB,YAAY,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC;IAChE,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,MAAM,CAAC;IACZ,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB,GAAG,IAAI,CAAC,CAkCR;AAED;;;;GAIG;AACH,wBAAsB,cAAc,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAO3E;AAED;;;GAGG;AACH,wBAAsB,cAAc,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAMxE;AAED;;;;GAIG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAYrD"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
/**
|
|
3
|
+
* Health check route.
|
|
4
|
+
* Returns a simple status to verify the server is running.
|
|
5
|
+
*/
|
|
6
|
+
export declare const healthRoute: Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
7
|
+
//# sourceMappingURL=health.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"health.d.ts","sourceRoot":"","sources":["../../../src/server/routes/health.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAE5B;;;GAGG;AACH,eAAO,MAAM,WAAW,4EAAa,CAAC"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import type { Library } from "../../core/library.js";
|
|
3
|
+
/**
|
|
4
|
+
* Create references CRUD route with the given library.
|
|
5
|
+
* @param library - Library instance to use for operations
|
|
6
|
+
* @returns Hono app with references routes
|
|
7
|
+
*/
|
|
8
|
+
export declare function createReferencesRoute(library: Library): Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
|
|
9
|
+
//# sourceMappingURL=references.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"references.d.ts","sourceRoot":"","sources":["../../../src/server/routes/references.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAErD;;;;GAIG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,OAAO,8EA4GrD"}
|
package/dist/server.js
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
const healthRoute = new Hono();
|
|
3
|
+
healthRoute.get("/", (c) => {
|
|
4
|
+
return c.json({ status: "ok" });
|
|
5
|
+
});
|
|
6
|
+
function createReferencesRoute(library) {
|
|
7
|
+
const route = new Hono();
|
|
8
|
+
route.get("/", (c) => {
|
|
9
|
+
const references = library.getAll();
|
|
10
|
+
const items = references.map((ref) => ref.getItem());
|
|
11
|
+
return c.json(items);
|
|
12
|
+
});
|
|
13
|
+
route.get("/:uuid", (c) => {
|
|
14
|
+
const uuid = c.req.param("uuid");
|
|
15
|
+
const ref = library.findByUuid(uuid);
|
|
16
|
+
if (!ref) {
|
|
17
|
+
return c.json({ error: "Reference not found" }, 404);
|
|
18
|
+
}
|
|
19
|
+
return c.json(ref.getItem());
|
|
20
|
+
});
|
|
21
|
+
route.post("/", async (c) => {
|
|
22
|
+
try {
|
|
23
|
+
const body = await c.req.json();
|
|
24
|
+
library.add(body);
|
|
25
|
+
const allRefs = library.getAll();
|
|
26
|
+
const addedRef = allRefs[allRefs.length - 1];
|
|
27
|
+
if (!addedRef) {
|
|
28
|
+
return c.json({ error: "Failed to add reference" }, 500);
|
|
29
|
+
}
|
|
30
|
+
return c.json(addedRef.getItem(), 201);
|
|
31
|
+
} catch (error) {
|
|
32
|
+
return c.json(
|
|
33
|
+
{
|
|
34
|
+
error: "Invalid request body",
|
|
35
|
+
details: error instanceof Error ? error.message : String(error)
|
|
36
|
+
},
|
|
37
|
+
400
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
route.put("/:uuid", async (c) => {
|
|
42
|
+
const uuid = c.req.param("uuid");
|
|
43
|
+
const existing = library.findByUuid(uuid);
|
|
44
|
+
if (!existing) {
|
|
45
|
+
return c.json({ error: "Reference not found" }, 404);
|
|
46
|
+
}
|
|
47
|
+
try {
|
|
48
|
+
const body = await c.req.json();
|
|
49
|
+
if (!body.custom) {
|
|
50
|
+
body.custom = {};
|
|
51
|
+
}
|
|
52
|
+
body.custom.uuid = uuid;
|
|
53
|
+
library.removeByUuid(uuid);
|
|
54
|
+
library.add(body);
|
|
55
|
+
const updatedRef = library.findByUuid(uuid);
|
|
56
|
+
if (!updatedRef) {
|
|
57
|
+
return c.json({ error: "Failed to update reference" }, 500);
|
|
58
|
+
}
|
|
59
|
+
return c.json(updatedRef.getItem());
|
|
60
|
+
} catch (error) {
|
|
61
|
+
return c.json(
|
|
62
|
+
{
|
|
63
|
+
error: "Invalid request body",
|
|
64
|
+
details: error instanceof Error ? error.message : String(error)
|
|
65
|
+
},
|
|
66
|
+
400
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
route.delete("/:uuid", (c) => {
|
|
71
|
+
const uuid = c.req.param("uuid");
|
|
72
|
+
const existing = library.findByUuid(uuid);
|
|
73
|
+
if (!existing) {
|
|
74
|
+
return c.json({ error: "Reference not found" }, 404);
|
|
75
|
+
}
|
|
76
|
+
library.removeByUuid(uuid);
|
|
77
|
+
return c.body(null, 204);
|
|
78
|
+
});
|
|
79
|
+
return route;
|
|
80
|
+
}
|
|
81
|
+
function createServer(library) {
|
|
82
|
+
const app = new Hono();
|
|
83
|
+
app.route("/health", healthRoute);
|
|
84
|
+
const referencesRoute = createReferencesRoute(library);
|
|
85
|
+
app.route("/api/references", referencesRoute);
|
|
86
|
+
return app;
|
|
87
|
+
}
|
|
88
|
+
export {
|
|
89
|
+
createServer
|
|
90
|
+
};
|
|
91
|
+
//# sourceMappingURL=server.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"server.js","sources":["../src/server/routes/health.ts","../src/server/routes/references.ts","../src/server/index.ts"],"sourcesContent":["import { Hono } from \"hono\";\n\n/**\n * Health check route.\n * Returns a simple status to verify the server is running.\n */\nexport const healthRoute = new Hono();\n\nhealthRoute.get(\"/\", (c) => {\n return c.json({ status: \"ok\" });\n});\n","import { Hono } from \"hono\";\nimport type { Library } from \"../../core/library.js\";\n\n/**\n * Create references CRUD route with the given library.\n * @param library - Library instance to use for operations\n * @returns Hono app with references routes\n */\nexport function createReferencesRoute(library: Library) {\n const route = new Hono();\n\n // GET / - Get all references\n route.get(\"/\", (c) => {\n const references = library.getAll();\n const items = references.map((ref) => ref.getItem());\n return c.json(items);\n });\n\n // GET /:uuid - Get reference by UUID\n route.get(\"/:uuid\", (c) => {\n const uuid = c.req.param(\"uuid\");\n const ref = library.findByUuid(uuid);\n\n if (!ref) {\n return c.json({ error: \"Reference not found\" }, 404);\n }\n\n return c.json(ref.getItem());\n });\n\n // POST / - Create new reference\n route.post(\"/\", async (c) => {\n try {\n const body = await c.req.json();\n\n // Create and add reference (library.add handles validation)\n library.add(body);\n\n // Find the newly added reference by UUID (it was just added)\n const allRefs = library.getAll();\n const addedRef = allRefs[allRefs.length - 1];\n\n if (!addedRef) {\n return c.json({ error: \"Failed to add reference\" }, 500);\n }\n\n return c.json(addedRef.getItem(), 201);\n } catch (error) {\n return c.json(\n {\n error: \"Invalid request body\",\n details: error instanceof Error ? error.message : String(error),\n },\n 400\n );\n }\n });\n\n // PUT /:uuid - Update reference\n route.put(\"/:uuid\", async (c) => {\n const uuid = c.req.param(\"uuid\");\n\n // Check if reference exists\n const existing = library.findByUuid(uuid);\n if (!existing) {\n return c.json({ error: \"Reference not found\" }, 404);\n }\n\n try {\n const body = await c.req.json();\n\n // Ensure UUID is preserved in the body\n if (!body.custom) {\n body.custom = {};\n }\n body.custom.uuid = uuid;\n\n // Remove old reference and add updated one\n library.removeByUuid(uuid);\n library.add(body);\n\n // Find the updated reference\n const updatedRef = library.findByUuid(uuid);\n if (!updatedRef) {\n return c.json({ error: \"Failed to update reference\" }, 500);\n }\n\n return c.json(updatedRef.getItem());\n } catch (error) {\n return c.json(\n {\n error: \"Invalid request body\",\n details: error instanceof Error ? error.message : String(error),\n },\n 400\n );\n }\n });\n\n // DELETE /:uuid - Delete reference\n route.delete(\"/:uuid\", (c) => {\n const uuid = c.req.param(\"uuid\");\n\n // Check if reference exists\n const existing = library.findByUuid(uuid);\n if (!existing) {\n return c.json({ error: \"Reference not found\" }, 404);\n }\n\n // Remove reference\n library.removeByUuid(uuid);\n\n return c.body(null, 204);\n });\n\n return route;\n}\n","import { Hono } from \"hono\";\nimport type { Library } from \"../core/library.js\";\nimport { healthRoute } from \"./routes/health.js\";\nimport { createReferencesRoute } from \"./routes/references.js\";\n\n/**\n * Create the main Hono server application.\n * @param library - Library instance for the references API\n * @returns Hono application\n */\nexport function createServer(library: Library) {\n const app = new Hono();\n\n // Health check route\n app.route(\"/health\", healthRoute);\n\n // References API routes\n const referencesRoute = createReferencesRoute(library);\n app.route(\"/api/references\", referencesRoute);\n\n return app;\n}\n"],"names":[],"mappings":";AAMO,MAAM,cAAc,IAAI,KAAA;AAE/B,YAAY,IAAI,KAAK,CAAC,MAAM;AAC1B,SAAO,EAAE,KAAK,EAAE,QAAQ,MAAM;AAChC,CAAC;ACFM,SAAS,sBAAsB,SAAkB;AACtD,QAAM,QAAQ,IAAI,KAAA;AAGlB,QAAM,IAAI,KAAK,CAAC,MAAM;AACpB,UAAM,aAAa,QAAQ,OAAA;AAC3B,UAAM,QAAQ,WAAW,IAAI,CAAC,QAAQ,IAAI,SAAS;AACnD,WAAO,EAAE,KAAK,KAAK;AAAA,EACrB,CAAC;AAGD,QAAM,IAAI,UAAU,CAAC,MAAM;AACzB,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,MAAM,QAAQ,WAAW,IAAI;AAEnC,QAAI,CAAC,KAAK;AACR,aAAO,EAAE,KAAK,EAAE,OAAO,sBAAA,GAAyB,GAAG;AAAA,IACrD;AAEA,WAAO,EAAE,KAAK,IAAI,QAAA,CAAS;AAAA,EAC7B,CAAC;AAGD,QAAM,KAAK,KAAK,OAAO,MAAM;AAC3B,QAAI;AACF,YAAM,OAAO,MAAM,EAAE,IAAI,KAAA;AAGzB,cAAQ,IAAI,IAAI;AAGhB,YAAM,UAAU,QAAQ,OAAA;AACxB,YAAM,WAAW,QAAQ,QAAQ,SAAS,CAAC;AAE3C,UAAI,CAAC,UAAU;AACb,eAAO,EAAE,KAAK,EAAE,OAAO,0BAAA,GAA6B,GAAG;AAAA,MACzD;AAEA,aAAO,EAAE,KAAK,SAAS,QAAA,GAAW,GAAG;AAAA,IACvC,SAAS,OAAO;AACd,aAAO,EAAE;AAAA,QACP;AAAA,UACE,OAAO;AAAA,UACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAAA;AAAA,QAEhE;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF,CAAC;AAGD,QAAM,IAAI,UAAU,OAAO,MAAM;AAC/B,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,UAAM,WAAW,QAAQ,WAAW,IAAI;AACxC,QAAI,CAAC,UAAU;AACb,aAAO,EAAE,KAAK,EAAE,OAAO,sBAAA,GAAyB,GAAG;AAAA,IACrD;AAEA,QAAI;AACF,YAAM,OAAO,MAAM,EAAE,IAAI,KAAA;AAGzB,UAAI,CAAC,KAAK,QAAQ;AAChB,aAAK,SAAS,CAAA;AAAA,MAChB;AACA,WAAK,OAAO,OAAO;AAGnB,cAAQ,aAAa,IAAI;AACzB,cAAQ,IAAI,IAAI;AAGhB,YAAM,aAAa,QAAQ,WAAW,IAAI;AAC1C,UAAI,CAAC,YAAY;AACf,eAAO,EAAE,KAAK,EAAE,OAAO,6BAAA,GAAgC,GAAG;AAAA,MAC5D;AAEA,aAAO,EAAE,KAAK,WAAW,QAAA,CAAS;AAAA,IACpC,SAAS,OAAO;AACd,aAAO,EAAE;AAAA,QACP;AAAA,UACE,OAAO;AAAA,UACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAAA;AAAA,QAEhE;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF,CAAC;AAGD,QAAM,OAAO,UAAU,CAAC,MAAM;AAC5B,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,UAAM,WAAW,QAAQ,WAAW,IAAI;AACxC,QAAI,CAAC,UAAU;AACb,aAAO,EAAE,KAAK,EAAE,OAAO,sBAAA,GAAyB,GAAG;AAAA,IACrD;AAGA,YAAQ,aAAa,IAAI;AAEzB,WAAO,EAAE,KAAK,MAAM,GAAG;AAAA,EACzB,CAAC;AAED,SAAO;AACT;AC1GO,SAAS,aAAa,SAAkB;AAC7C,QAAM,MAAM,IAAI,KAAA;AAGhB,MAAI,MAAM,WAAW,WAAW;AAGhC,QAAM,kBAAkB,sBAAsB,OAAO;AACrD,MAAI,MAAM,mBAAmB,eAAe;AAE5C,SAAO;AACT;"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
export interface BackupOptions {
|
|
2
|
+
maxGenerations?: number;
|
|
3
|
+
maxAgeMs?: number;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Get backup directory path for a library
|
|
7
|
+
*/
|
|
8
|
+
export declare function getBackupDirectory(libraryName: string): string;
|
|
9
|
+
/**
|
|
10
|
+
* Create a backup of the given file
|
|
11
|
+
*/
|
|
12
|
+
export declare function createBackup(sourceFile: string, libraryName: string): Promise<string>;
|
|
13
|
+
/**
|
|
14
|
+
* List all backups for a library (sorted by modification time, newest first)
|
|
15
|
+
*/
|
|
16
|
+
export declare function listBackups(libraryName: string): Promise<string[]>;
|
|
17
|
+
/**
|
|
18
|
+
* Clean up old backups based on generation count and age
|
|
19
|
+
*/
|
|
20
|
+
export declare function cleanupOldBackups(libraryName: string, options?: BackupOptions): Promise<void>;
|
|
21
|
+
//# sourceMappingURL=backup.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"backup.d.ts","sourceRoot":"","sources":["../../src/utils/backup.ts"],"names":[],"mappings":"AAOA,MAAM,WAAW,aAAa;IAC5B,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AA+BD;;GAEG;AACH,wBAAgB,kBAAkB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,CAG9D;AAED;;GAEG;AACH,wBAAsB,YAAY,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAW3F;AAED;;GAEG;AACH,wBAAsB,WAAW,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAoBxE;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CACrC,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,IAAI,CAAC,CAsBf"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Write file atomically with parent directory creation
|
|
3
|
+
*/
|
|
4
|
+
export declare function writeFileAtomic(filePath: string, content: string): Promise<void>;
|
|
5
|
+
/**
|
|
6
|
+
* Ensure directory exists, creating it recursively if necessary
|
|
7
|
+
*/
|
|
8
|
+
export declare function ensureDirectoryExists(dirPath: string): Promise<void>;
|
|
9
|
+
//# sourceMappingURL=file.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/utils/file.ts"],"names":[],"mappings":"AAIA;;GAEG;AACH,wBAAsB,eAAe,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAGtF;AAED;;GAEG;AACH,wBAAsB,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAE1E"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Compute SHA-256 hash of a string
|
|
3
|
+
*/
|
|
4
|
+
export declare function computeHash(input: string): string;
|
|
5
|
+
/**
|
|
6
|
+
* Compute SHA-256 hash of a file
|
|
7
|
+
*/
|
|
8
|
+
export declare function computeFileHash(filePath: string): Promise<string>;
|
|
9
|
+
//# sourceMappingURL=hash.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../../src/utils/hash.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAEjD;AAED;;GAEG;AACH,wBAAsB,eAAe,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CASvE"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { createLogger, type Logger, type LogLevel } from "./logger";
|
|
2
|
+
export { computeHash, computeFileHash } from "./hash";
|
|
3
|
+
export { writeFileAtomic, ensureDirectoryExists } from "./file";
|
|
4
|
+
export { createBackup, cleanupOldBackups, getBackupDirectory, listBackups, type BackupOptions, } from "./backup";
|
|
5
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,KAAK,MAAM,EAAE,KAAK,QAAQ,EAAE,MAAM,UAAU,CAAC;AACpE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,QAAQ,CAAC;AACtD,OAAO,EAAE,eAAe,EAAE,qBAAqB,EAAE,MAAM,QAAQ,CAAC;AAChE,OAAO,EACL,YAAY,EACZ,iBAAiB,EACjB,kBAAkB,EAClB,WAAW,EACX,KAAK,aAAa,GACnB,MAAM,UAAU,CAAC"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export type LogLevel = "silent" | "info" | "debug";
|
|
2
|
+
export interface Logger {
|
|
3
|
+
info(...args: unknown[]): void;
|
|
4
|
+
debug(...args: unknown[]): void;
|
|
5
|
+
error(...args: unknown[]): void;
|
|
6
|
+
}
|
|
7
|
+
export declare function createLogger(level?: LogLevel): Logger;
|
|
8
|
+
//# sourceMappingURL=logger.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/utils/logger.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,QAAQ,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;AAEnD,MAAM,WAAW,MAAM;IACrB,IAAI,CAAC,GAAG,IAAI,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC;IAC/B,KAAK,CAAC,GAAG,IAAI,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC;IAChC,KAAK,CAAC,GAAG,IAAI,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC;CACjC;AAED,wBAAgB,YAAY,CAAC,KAAK,GAAE,QAAiB,GAAG,MAAM,CAyB7D"}
|
package/package.json
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ncukondo/reference-manager",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "A local reference management tool using CSL-JSON as the single source of truth",
|
|
5
|
+
"publishConfig": {
|
|
6
|
+
"access": "public"
|
|
7
|
+
},
|
|
8
|
+
"type": "module",
|
|
9
|
+
"engines": {
|
|
10
|
+
"node": ">=22"
|
|
11
|
+
},
|
|
12
|
+
"bin": {
|
|
13
|
+
"ref": "./bin/reference-manager.js",
|
|
14
|
+
"reference-manager": "./bin/reference-manager.js"
|
|
15
|
+
},
|
|
16
|
+
"main": "./dist/index.js",
|
|
17
|
+
"types": "./dist/index.d.ts",
|
|
18
|
+
"exports": {
|
|
19
|
+
".": {
|
|
20
|
+
"import": "./dist/index.js",
|
|
21
|
+
"types": "./dist/index.d.ts"
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
"files": [
|
|
25
|
+
"dist",
|
|
26
|
+
"bin"
|
|
27
|
+
],
|
|
28
|
+
"scripts": {
|
|
29
|
+
"dev": "vite",
|
|
30
|
+
"build": "vite build && tsc --emitDeclarationOnly",
|
|
31
|
+
"preview": "vite preview",
|
|
32
|
+
"test": "vitest",
|
|
33
|
+
"test:watch": "vitest --watch",
|
|
34
|
+
"test:coverage": "vitest run --coverage",
|
|
35
|
+
"lint": "biome check .",
|
|
36
|
+
"lint:fix": "biome check --write .",
|
|
37
|
+
"format": "biome format --write .",
|
|
38
|
+
"typecheck": "tsc --noEmit",
|
|
39
|
+
"prepublishOnly": "npm run build"
|
|
40
|
+
},
|
|
41
|
+
"dependencies": {
|
|
42
|
+
"@iarna/toml": "^2.2.5",
|
|
43
|
+
"chokidar": "^5.0.0",
|
|
44
|
+
"commander": "^12.1.0",
|
|
45
|
+
"hono": "^4.11.1",
|
|
46
|
+
"write-file-atomic": "^7.0.0",
|
|
47
|
+
"zod": "^4.1.13"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@biomejs/biome": "^1.9.0",
|
|
51
|
+
"@types/chokidar": "^1.7.5",
|
|
52
|
+
"@types/node": "^22.19.2",
|
|
53
|
+
"@types/write-file-atomic": "^4.0.3",
|
|
54
|
+
"typescript": "^5.6.0",
|
|
55
|
+
"vite": "^6.0.0",
|
|
56
|
+
"vite-node": "^2.1.0",
|
|
57
|
+
"vitest": "^2.1.0"
|
|
58
|
+
},
|
|
59
|
+
"keywords": [
|
|
60
|
+
"reference-manager",
|
|
61
|
+
"csl-json",
|
|
62
|
+
"bibliography",
|
|
63
|
+
"pandoc",
|
|
64
|
+
"citation"
|
|
65
|
+
],
|
|
66
|
+
"author": "",
|
|
67
|
+
"license": "MIT",
|
|
68
|
+
"repository": {
|
|
69
|
+
"type": "git",
|
|
70
|
+
"url": "https://github.com/ncukondo/reference-manager.git"
|
|
71
|
+
}
|
|
72
|
+
}
|