@ez-corp/ez-context 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -319,9 +319,9 @@ async function driftAction(pathArg, options) {
319
319
  try {
320
320
  const bridge = await createBridge(projectPath);
321
321
  if (!await bridge.hasIndex(projectPath)) {
322
- spinner.fail("No search index found");
323
- console.error(chalk.red("Run 'ez-context generate' or 'ez-search index .' first to create an index."));
324
- process.exit(1);
322
+ spinner.text = "No search index found — creating index...";
323
+ await bridge.ensureIndex(projectPath);
324
+ spinner.text = "Index created. Loading context files...";
325
325
  }
326
326
  let filePaths;
327
327
  if (options.file) filePaths = [path.resolve(projectPath, options.file)];
package/dist/cli.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"cli.js","names":[],"sources":["../src/commands/generate.ts","../src/commands/inspect.ts","../src/core/drift/claim-extractor.ts","../src/core/drift/claim-scorer.ts","../src/core/drift/report.ts","../src/commands/drift.ts","../src/core/updater.ts","../src/commands/update.ts","../src/cli.ts"],"sourcesContent":["import path from \"node:path\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { extractConventions } from \"../core/pipeline.js\";\nimport { emit } from \"../emitters/index.js\";\nimport type { EmitOptions, OutputFormat } from \"../emitters/types.js\";\n\nconst DRY_RUN_PREVIEW_LINES = 20;\n\nconst VALID_FORMATS: OutputFormat[] = [\n \"claude\",\n \"agents\",\n \"cursor\",\n \"copilot\",\n \"skills\",\n \"rulesync\",\n \"ruler\",\n];\n\nexport function parseFormats(raw: string): OutputFormat[] {\n const formats = [...new Set(raw.split(\",\").map((s) => s.trim()).filter(Boolean))];\n const invalid = formats.filter((f) => !VALID_FORMATS.includes(f as OutputFormat));\n if (invalid.length > 0) {\n throw new Error(\n `Invalid format(s): ${invalid.join(\", \")}. Valid: ${VALID_FORMATS.join(\", \")}`\n );\n }\n return formats as OutputFormat[];\n}\n\nfunction truncatePreview(content: string): string {\n const lines = content.split(\"\\n\");\n if (lines.length <= DRY_RUN_PREVIEW_LINES) {\n return content;\n }\n const preview = lines.slice(0, DRY_RUN_PREVIEW_LINES).join(\"\\n\");\n return `${preview}\\n... (${lines.length} lines total)`;\n}\n\nexport async function generateAction(\n pathArg: string,\n options: { dryRun?: boolean; yes?: boolean; output?: string; threshold?: string; format?: string }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n\n // --yes and non-TTY environments: ora handles non-TTY gracefully by\n // falling back to plain text. No interactive prompts are used anywhere.\n const spinner = ora(\"Analyzing project conventions...\").start();\n\n try {\n const registry = await extractConventions(projectPath);\n const conventionCount = registry.conventions.length;\n spinner.succeed(`Found ${conventionCount} convention${conventionCount === 1 ? \"\" : \"s\"}`);\n\n const confidenceThreshold = parseFloat(options.threshold ?? \"0.7\");\n const outputDir = path.resolve(options.output ?? \".\");\n\n // Parse and validate --format (default: \"claude,agents\")\n const formats = parseFormats(options.format ?? \"claude,agents\");\n\n const emitOptions: EmitOptions = {\n outputDir,\n confidenceThreshold,\n dryRun: options.dryRun ?? false,\n formats,\n };\n\n const isDefault = formats.length === 2 && formats.includes(\"claude\") && formats.includes(\"agents\");\n const genSpinnerText = isDefault\n ? \"Generating context files...\"\n : `Generating ${formats.length} context file${formats.length === 1 ? \"\" : \"s\"}...`;\n const genSpinner = ora(genSpinnerText).start();\n const result = await emit(registry, emitOptions);\n\n if (options.dryRun) {\n genSpinner.succeed(\"Dry run complete\");\n console.log();\n console.log(chalk.bold.yellow(\"╔══════════════════════════════════════╗\"));\n console.log(chalk.bold.yellow(\"║ DRY RUN -- no files will be written ║\"));\n console.log(chalk.bold.yellow(\"╚══════════════════════════════════════╝\"));\n console.log();\n for (const [format, content] of Object.entries(result.rendered)) {\n console.log(chalk.cyan(`--- ${format.toUpperCase()} ---`));\n console.log(truncatePreview(content));\n console.log();\n }\n } else {\n genSpinner.succeed(`Generated ${result.filesWritten.length} file${result.filesWritten.length === 1 ? \"\" : \"s\"}`);\n console.log();\n console.log(chalk.bold.green(\"Generated files:\"));\n for (const filePath of result.filesWritten) {\n const relPath = path.relative(outputDir, filePath);\n console.log(` ${chalk.cyan(relPath)}`);\n }\n }\n } catch (err) {\n spinner.fail(\"Analysis failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","import path from \"node:path\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { extractConventions } from \"../core/pipeline.js\";\nimport type { ConventionEntry } from \"../core/schema.js\";\n\nfunction confidenceDot(confidence: number): string {\n if (confidence >= 0.8) return chalk.green(\"●\");\n if (confidence >= 0.6) return chalk.yellow(\"●\");\n return chalk.red(\"●\");\n}\n\nexport async function inspectAction(\n pathArg: string,\n options: { threshold?: string }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n const spinner = ora(\"Analyzing project conventions...\").start();\n\n try {\n const registry = await extractConventions(projectPath);\n const totalCount = registry.conventions.length;\n spinner.succeed(`Extracted ${totalCount} convention${totalCount === 1 ? \"\" : \"s\"}`);\n\n const threshold = parseFloat(options.threshold ?? \"0.7\");\n\n const filtered = registry.conventions.filter(\n (c) => c.confidence >= threshold\n );\n\n if (filtered.length === 0) {\n console.log(\n chalk.yellow(\n `\\nNo conventions found above ${threshold} confidence threshold. Try lowering --threshold.`\n )\n );\n return;\n }\n\n // Group by category\n const byCategory = new Map<string, ConventionEntry[]>();\n for (const convention of filtered) {\n const group = byCategory.get(convention.category) ?? [];\n group.push(convention);\n byCategory.set(convention.category, group);\n }\n\n console.log();\n for (const [category, conventions] of byCategory) {\n console.log(chalk.bold(category.toUpperCase()));\n for (const convention of conventions) {\n const pct = Math.round(convention.confidence * 100);\n console.log(\n ` ${confidenceDot(convention.confidence)} ${convention.pattern} ${chalk.gray(`(${pct}%)`)}`\n );\n }\n console.log();\n }\n\n const categoryCount = byCategory.size;\n console.log(\n chalk.gray(\n `Found ${filtered.length} convention${filtered.length === 1 ? \"\" : \"s\"} across ${categoryCount} categor${categoryCount === 1 ? \"y\" : \"ies\"} (threshold: ${threshold})`\n )\n );\n } catch (err) {\n spinner.fail(\"Analysis failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","/**\n * Claim extractor — parses markdown context files into individual testable claims.\n *\n * Input: raw markdown string (CLAUDE.md, AGENTS.md, .cursorrules, etc.)\n * Output: Claim[] where each claim is an atomic declarative statement\n *\n * Extraction rules:\n * - Bullet points: ^[-*+]\\s+\n * - Numbered list items: ^\\d+\\.\\s+\n * - Bold/code markers stripped from extracted text\n * - Boilerplate value lines skipped (Language: X, Framework: X, etc.)\n * - ez-context markers and HTML comments skipped\n * - Claims shorter than 10 chars or longer than 300 chars excluded\n * - Current section heading tracked for context\n */\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface Claim {\n text: string; // The claim text (bold/code markers stripped)\n sourceFile: string; // Which file it came from\n sourceLine: number; // 1-based line number\n sourceSection: string; // Nearest parent heading\n}\n\n// ---------------------------------------------------------------------------\n// Filters\n// ---------------------------------------------------------------------------\n\n/**\n * Matches boilerplate key-value lines that are structural metadata, not\n * behavioral claims. Applied AFTER bold/code stripping.\n *\n * Examples skipped:\n * \"Language: TypeScript\"\n * \"Package Manager: bun\"\n */\nconst BOILERPLATE_VALUE =\n /^(Language|Framework|Build|Package Manager|Test Runner|Pattern|Layers):\\s/i;\n\n// ---------------------------------------------------------------------------\n// Core function\n// ---------------------------------------------------------------------------\n\n/**\n * Extract all testable claims from a markdown string.\n *\n * @param content Raw markdown content of the context file\n * @param sourceFile Path to the source file (stored on each claim)\n * @returns Array of extracted claims, filtered and deduplicated\n */\nexport function extractClaims(content: string, sourceFile: string): Claim[] {\n const claims: Claim[] = [];\n const lines = content.split(\"\\n\");\n let currentSection = \"\";\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i]!.trim();\n const lineNum = i + 1; // 1-based\n\n // Skip blank lines\n if (!line) continue;\n\n // Skip HTML comments (includes ez-context markers like <!-- ez-context:... -->)\n if (line.startsWith(\"<!--\")) continue;\n\n // Skip lines containing ez-context markers (belt-and-suspenders for inline markers)\n if (line.includes(\"ez-context:\")) continue;\n\n // Track section headings — H1, H2, H3\n const heading = line.match(/^#{1,3}\\s+(.+)/);\n if (heading) {\n currentSection = heading[1]!.trim();\n continue;\n }\n\n // Match bullet points or numbered list items\n const bullet = line.match(/^[-*+]\\s+(.+)/);\n const numbered = !bullet ? line.match(/^\\d+\\.\\s+(.+)/) : null;\n const rawText = bullet ? bullet[1]! : numbered ? numbered[1]! : null;\n\n if (!rawText) continue;\n\n // Strip bold markers (**text** -> text) and inline code markers (`text` -> text)\n const text = rawText\n .replace(/\\*\\*([^*]+)\\*\\*/g, \"$1\")\n .replace(/`([^`]+)`/g, \"$1\")\n .trim();\n\n // Apply length filters\n if (text.length < 10 || text.length > 300) continue;\n\n // Skip boilerplate key-value lines\n if (BOILERPLATE_VALUE.test(text)) continue;\n\n claims.push({\n text,\n sourceFile,\n sourceLine: lineNum,\n sourceSection: currentSection,\n });\n }\n\n return claims;\n}\n","/**\n * Claim scorer — compares extracted claims against the code index via semantic search.\n *\n * Each claim is searched against the indexed codebase. The top similarity score\n * determines whether the claim is GREEN (well-supported), YELLOW (possibly stale),\n * or RED (contradicted / not found).\n *\n * Claims are processed in batches to avoid ONNX pipeline contention.\n */\nimport type { Claim } from \"./claim-extractor.js\";\nimport type { SearchResult, EzSearchBridge } from \"../ez-search-bridge.js\";\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nexport const GREEN_THRESHOLD = 0.65;\nexport const YELLOW_THRESHOLD = 0.40;\nexport const BATCH_SIZE = 10;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport type ClaimStatus = \"GREEN\" | \"YELLOW\" | \"RED\";\n\nexport interface ScoredClaim {\n claim: Claim;\n status: ClaimStatus;\n score: number; // Top bridge.search() score (0.0-1.0)\n evidence: SearchResult[]; // Top k results\n}\n\n// ---------------------------------------------------------------------------\n// Internal helpers\n// ---------------------------------------------------------------------------\n\nfunction chunk<T>(arr: T[], size: number): T[][] {\n const chunks: T[][] = [];\n for (let i = 0; i < arr.length; i += size) {\n chunks.push(arr.slice(i, i + size));\n }\n return chunks;\n}\n\nfunction classifyScore(score: number): ClaimStatus {\n if (score >= GREEN_THRESHOLD) return \"GREEN\";\n if (score >= YELLOW_THRESHOLD) return \"YELLOW\";\n return \"RED\";\n}\n\nasync function scoreSingleClaim(\n claim: Claim,\n bridge: EzSearchBridge\n): Promise<ScoredClaim> {\n const evidence = await bridge.search(claim.text, { k: 5 });\n const topScore = evidence.length > 0 ? evidence[0]!.score : 0;\n return {\n claim,\n status: classifyScore(topScore),\n score: topScore,\n evidence,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Score all claims by searching the code index in batches of BATCH_SIZE.\n *\n * @param claims Claims to score\n * @param bridge EzSearchBridge instance bound to the project\n * @param onProgress Optional callback fired after each batch: (done, total)\n * @returns ScoredClaim[] in the same order as input claims\n */\nexport async function scoreClaims(\n claims: Claim[],\n bridge: EzSearchBridge,\n onProgress?: (completed: number, total: number) => void\n): Promise<ScoredClaim[]> {\n const total = claims.length;\n const batches = chunk(claims, BATCH_SIZE);\n const results: ScoredClaim[] = [];\n let completed = 0;\n\n for (const batch of batches) {\n const batchResults = await Promise.all(\n batch.map((claim) => scoreSingleClaim(claim, bridge))\n );\n results.push(...batchResults);\n completed += batch.length;\n onProgress?.(completed, total);\n }\n\n return results;\n}\n","/**\n * Drift report — aggregates scored claims into a health score and markdown report.\n *\n * Health score: mean of per-claim scores scaled 0-100 (rounded).\n * Zero claims yields a health score of 100 (nothing to contradict = healthy).\n *\n * Rendered markdown groups claims by status with evidence for stale/contradicted claims.\n */\nimport type { ScoredClaim } from \"./claim-scorer.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface DriftReport {\n sourceFile: string;\n healthScore: number;\n scoredClaims: ScoredClaim[];\n}\n\n// ---------------------------------------------------------------------------\n// Health score\n// ---------------------------------------------------------------------------\n\n/**\n * Compute the aggregate health score for a set of scored claims.\n * Returns 100 for empty input (no claims = no drift).\n */\nexport function computeHealthScore(scoredClaims: ScoredClaim[]): number {\n if (scoredClaims.length === 0) return 100;\n const mean =\n scoredClaims.reduce((sum, sc) => sum + sc.score, 0) / scoredClaims.length;\n return Math.round(mean * 100);\n}\n\n// ---------------------------------------------------------------------------\n// Report assembly\n// ---------------------------------------------------------------------------\n\n/**\n * Build a DriftReport from a source file path and its scored claims.\n */\nexport function buildDriftReport(\n sourceFile: string,\n scoredClaims: ScoredClaim[]\n): DriftReport {\n return {\n sourceFile,\n healthScore: computeHealthScore(scoredClaims),\n scoredClaims,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Renderer\n// ---------------------------------------------------------------------------\n\nconst STATUS_LABEL: Record<string, string> = {\n GREEN: \"Confirmed\",\n YELLOW: \"Possibly Stale\",\n RED: \"Contradicted\",\n};\n\n/**\n * Render a drift report as a readable markdown string.\n *\n * Layout:\n * # Drift Report\n * Health score, source file, claim count\n *\n * ## Confirmed (GREEN)\n * - [GREEN] claim text (score: X.XX)\n *\n * ## Possibly Stale (YELLOW)\n * - [YELLOW] claim text (score: X.XX)\n * - file: chunk_preview\n *\n * ## Contradicted (RED)\n * - [RED] claim text (score: X.XX)\n * - file: chunk_preview\n *\n * Summary: X confirmed, Y possibly stale, Z contradicted\n */\nexport function renderDriftReport(report: DriftReport): string {\n const { sourceFile, healthScore, scoredClaims } = report;\n const lines: string[] = [];\n\n const green = scoredClaims.filter((sc) => sc.status === \"GREEN\");\n const yellow = scoredClaims.filter((sc) => sc.status === \"YELLOW\");\n const red = scoredClaims.filter((sc) => sc.status === \"RED\");\n\n // Header\n lines.push(\"# Drift Report\");\n lines.push(\"\");\n lines.push(`**Health Score:** ${healthScore}/100`);\n lines.push(`**File:** ${sourceFile}`);\n lines.push(`**Claims:** ${scoredClaims.length}`);\n lines.push(\"\");\n\n // Render a group of claims\n const renderGroup = (group: ScoredClaim[], status: string) => {\n if (group.length === 0) return;\n const label = STATUS_LABEL[status] ?? status;\n lines.push(`## ${label} (${status})`);\n lines.push(\"\");\n for (const sc of group) {\n lines.push(`- [${sc.status}] ${sc.claim.text} (score: ${sc.score.toFixed(2)})`);\n // Show top 2 evidence items for non-GREEN claims\n if (sc.status !== \"GREEN\") {\n const topEvidence = sc.evidence.slice(0, 2);\n for (const ev of topEvidence) {\n const preview = ev.chunk.replace(/\\s+/g, \" \").trim().slice(0, 80);\n lines.push(` - ${ev.file}: ${preview}`);\n }\n }\n }\n lines.push(\"\");\n };\n\n renderGroup(green, \"GREEN\");\n renderGroup(yellow, \"YELLOW\");\n renderGroup(red, \"RED\");\n\n // Summary\n lines.push(\n `Summary: ${green.length} confirmed, ${yellow.length} possibly stale, ${red.length} contradicted`\n );\n\n return lines.join(\"\\n\");\n}\n","import path from \"node:path\";\nimport { readFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { createBridge } from \"../core/ez-search-bridge.js\";\nimport { extractClaims } from \"../core/drift/claim-extractor.js\";\nimport { scoreClaims } from \"../core/drift/claim-scorer.js\";\nimport { buildDriftReport, renderDriftReport, computeHealthScore } from \"../core/drift/report.js\";\n\nconst CANDIDATE_FILES = [\"CLAUDE.md\", \"AGENTS.md\", \".cursorrules\", \"CONTEXT.md\"];\n\nfunction healthColor(score: number): string {\n if (score >= 70) return chalk.green(String(score));\n if (score >= 40) return chalk.yellow(String(score));\n return chalk.red(String(score));\n}\n\nexport async function driftAction(\n pathArg: string,\n options: { file?: string }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n const spinner = ora(\"Loading context files...\").start();\n\n try {\n const bridge = await createBridge(projectPath);\n\n // Index check\n if (!(await bridge.hasIndex(projectPath))) {\n spinner.fail(\"No search index found\");\n console.error(\n chalk.red(\"Run 'ez-context generate' or 'ez-search index .' first to create an index.\")\n );\n process.exit(1);\n }\n\n // Resolve files\n let filePaths: string[];\n if (options.file) {\n filePaths = [path.resolve(projectPath, options.file)];\n } else {\n filePaths = CANDIDATE_FILES\n .map((name) => path.join(projectPath, name))\n .filter((p) => existsSync(p));\n }\n\n if (filePaths.length === 0) {\n spinner.fail(\"No context files found\");\n console.error(\n chalk.red(\"No CLAUDE.md, AGENTS.md, .cursorrules, or CONTEXT.md found. Use --file to specify one.\")\n );\n process.exit(1);\n }\n\n // Extract claims from each file\n const claimsByFile: Map<string, ReturnType<typeof extractClaims>> = new Map();\n for (const filePath of filePaths) {\n const content = await readFile(filePath, \"utf-8\");\n const claims = extractClaims(content, filePath);\n claimsByFile.set(filePath, claims);\n }\n\n const allClaims = [...claimsByFile.values()].flat();\n spinner.text = `Analyzing ${allClaims.length} claims...`;\n\n // Score claims with progress callback\n const scoredAll = await scoreClaims(allClaims, bridge, (done, total) => {\n spinner.text = `Checking claim ${done}/${total}...`;\n });\n\n // Build and render reports per file\n const reports = filePaths.map((filePath) => {\n const fileClaims = claimsByFile.get(filePath) ?? [];\n const fileScoredClaims = scoredAll.filter((sc) =>\n fileClaims.some((c) => c === sc.claim)\n );\n return buildDriftReport(filePath, fileScoredClaims);\n });\n\n const overallScore = computeHealthScore(scoredAll);\n spinner.succeed(`Drift analysis complete — health score: ${healthColor(overallScore)}/100`);\n\n console.log();\n for (const report of reports) {\n console.log(renderDriftReport(report));\n console.log();\n }\n } catch (err) {\n spinner.fail(\"Drift analysis failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","/**\n * Updater — targeted regeneration engine for `ez-context update`.\n *\n * Orchestrates:\n * 1. Marker validation (pre-flight check, markers strategy only)\n * 2. Drift detection (skip GREEN files, markers strategy only)\n * 3. File backup (before any write)\n * 4. Re-rendering (via FORMAT_EMITTER_MAP)\n * 5. Write-back (writeWithMarkers for markers strategy, writeFile for direct)\n */\nimport { copyFile, readFile, writeFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport path from \"node:path\";\n\nimport { MARKER_START, MARKER_END, writeWithMarkers } from \"../emitters/writer.js\";\nimport { FORMAT_EMITTER_MAP } from \"../emitters/index.js\";\nimport { extractClaims } from \"./drift/claim-extractor.js\";\nimport { scoreClaims } from \"./drift/claim-scorer.js\";\nimport type { EzSearchBridge } from \"./ez-search-bridge.js\";\nimport type { ConventionRegistry } from \"./schema.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface MarkerValidation {\n valid: boolean;\n mode: \"append\" | \"splice\" | \"invalid\";\n reason?: string;\n startIdx?: number;\n endIdx?: number;\n}\n\nexport type UpdateAction = \"skipped\" | \"updated\" | \"aborted\";\n\nexport interface FileUpdateResult {\n filePath: string;\n action: UpdateAction;\n reason: string;\n backupPath?: string;\n}\n\n// ---------------------------------------------------------------------------\n// validateMarkers\n// ---------------------------------------------------------------------------\n\n/**\n * Pre-flight marker check for updateFile.\n *\n * Unlike writeWithMarkers (which silently appends on unpaired markers),\n * validateMarkers rejects unpaired markers so updateFile can abort safely.\n *\n * Returns:\n * - { valid: true, mode: \"append\" } — no markers, safe to append\n * - { valid: true, mode: \"splice\", startIdx, endIdx } — well-formed pair\n * - { valid: false, mode: \"invalid\", reason } — unpaired or inverted markers\n */\nexport function validateMarkers(content: string): MarkerValidation {\n const startIdx = content.indexOf(MARKER_START);\n const endIdx = content.indexOf(MARKER_END);\n\n const hasStart = startIdx !== -1;\n const hasEnd = endIdx !== -1;\n\n // No markers at all -> safe to append\n if (!hasStart && !hasEnd) {\n return { valid: true, mode: \"append\" };\n }\n\n // Both markers present -> validate ordering\n if (hasStart && hasEnd) {\n if (endIdx < startIdx) {\n return {\n valid: false,\n mode: \"invalid\",\n reason: \"End marker appears before start marker (corrupted file)\",\n };\n }\n return { valid: true, mode: \"splice\", startIdx, endIdx };\n }\n\n // Unpaired: only one marker present\n if (hasStart && !hasEnd) {\n return {\n valid: false,\n mode: \"invalid\",\n reason: \"Unpaired ez-context marker: end marker missing\",\n };\n }\n\n // hasEnd && !hasStart\n return {\n valid: false,\n mode: \"invalid\",\n reason: \"Unpaired ez-context marker: start marker missing\",\n };\n}\n\n// ---------------------------------------------------------------------------\n// backupFile\n// ---------------------------------------------------------------------------\n\n/**\n * Copy filePath to filePath.bak and return the backup path.\n * Returns null if the file does not exist.\n * Overwrites any existing .bak silently (represents state before this run).\n */\nexport async function backupFile(filePath: string): Promise<string | null> {\n if (!existsSync(filePath)) {\n return null;\n }\n\n const backupPath = filePath + \".bak\";\n await copyFile(filePath, backupPath);\n return backupPath;\n}\n\n// ---------------------------------------------------------------------------\n// findFormatEntry\n// ---------------------------------------------------------------------------\n\n/**\n * Look up the FORMAT_EMITTER_MAP entry whose filename suffix matches filePath.\n * Returns undefined if the file doesn't correspond to a known format.\n */\nfunction findFormatEntry(filePath: string) {\n const normalized = path.normalize(filePath);\n for (const entry of Object.values(FORMAT_EMITTER_MAP)) {\n if (normalized.endsWith(path.normalize(entry.filename))) {\n return entry;\n }\n }\n return undefined;\n}\n\n// ---------------------------------------------------------------------------\n// updateFile\n// ---------------------------------------------------------------------------\n\n/**\n * Orchestrate drift detection and targeted re-rendering for a single file.\n *\n * The write strategy is determined by FORMAT_EMITTER_MAP:\n * - \"markers\" strategy: drift detection + writeWithMarkers (default)\n * - \"direct\" strategy: always regenerate + writeFile (full overwrite)\n *\n * Flow for markers strategy:\n * 1. File existence check — skip if missing\n * 2. Marker validation — abort on invalid markers\n * 3. Drift check (splice mode only) — skip if all claims GREEN\n * 4. Backup creation\n * 5. Re-render + writeWithMarkers\n *\n * Flow for direct strategy:\n * 1. File existence check — skip if missing\n * 2. Backup creation\n * 3. Re-render + writeFile (full overwrite)\n *\n * @param filePath Absolute path to the context file\n * @param registry Pre-computed convention registry (NOT extracted per-file)\n * @param bridge EzSearchBridge instance for drift scoring\n * @param confidenceThreshold Confidence floor passed to the renderer (default 0.7)\n */\nexport async function updateFile(\n filePath: string,\n registry: ConventionRegistry,\n bridge: EzSearchBridge,\n confidenceThreshold: number = 0.7\n): Promise<FileUpdateResult> {\n // 1. File existence check\n if (!existsSync(filePath)) {\n return { filePath, action: \"skipped\", reason: \"File does not exist\" };\n }\n\n const formatEntry = findFormatEntry(filePath);\n // Fall back to claude (markers) if the file isn't a known format\n const strategy = formatEntry?.strategy ?? \"markers\";\n const render = formatEntry?.render ?? FORMAT_EMITTER_MAP.claude.render;\n\n // ---------------------------------------------------------------------------\n // Direct strategy: full regeneration, no drift detection\n // ---------------------------------------------------------------------------\n if (strategy === \"direct\") {\n const backupPath = (await backupFile(filePath)) ?? undefined;\n const newContent = render(registry, confidenceThreshold);\n await writeFile(filePath, newContent, \"utf-8\");\n return {\n filePath,\n action: \"updated\",\n reason: \"Re-rendered (direct strategy)\",\n backupPath,\n };\n }\n\n // ---------------------------------------------------------------------------\n // Markers strategy: drift detection + writeWithMarkers\n // ---------------------------------------------------------------------------\n\n // 2. Read content and validate markers\n const content = await readFile(filePath, \"utf-8\");\n const validation = validateMarkers(content);\n\n if (!validation.valid) {\n return { filePath, action: \"aborted\", reason: validation.reason! };\n }\n\n // 3. Drift check (only when markers are already present)\n if (validation.mode === \"splice\") {\n const claims = extractClaims(content, filePath);\n\n // Nothing to check — skip (no claims extracted means no drift to detect)\n if (claims.length === 0) {\n return { filePath, action: \"skipped\", reason: \"No drift detected\" };\n }\n\n const scored = await scoreClaims(claims, bridge);\n const hasDrift = scored.some((s) => s.status !== \"GREEN\");\n\n if (!hasDrift) {\n return { filePath, action: \"skipped\", reason: \"No drift detected\" };\n }\n }\n // mode === \"append\": file has no generated section yet -> always proceed\n\n // 4. Backup before any write\n const backupPath = (await backupFile(filePath)) ?? undefined;\n\n // 5. Re-render + write\n const newContent = render(registry, confidenceThreshold);\n await writeWithMarkers(filePath, newContent);\n\n return {\n filePath,\n action: \"updated\",\n reason: \"Re-rendered drifted sections\",\n backupPath,\n };\n}\n","import path from \"node:path\";\nimport { existsSync } from \"node:fs\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { createBridge } from \"../core/ez-search-bridge.js\";\nimport { extractConventions } from \"../core/pipeline.js\";\nimport { updateFile } from \"../core/updater.js\";\nimport { extractClaims } from \"../core/drift/claim-extractor.js\";\nimport { scoreClaims } from \"../core/drift/claim-scorer.js\";\nimport { FORMAT_EMITTER_MAP } from \"../emitters/index.js\";\n\nexport async function updateAction(\n pathArg: string,\n options: { file?: string; dryRun?: boolean; yes?: boolean }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n const spinner = ora(\"Checking for drift...\").start();\n\n try {\n const bridge = await createBridge(projectPath);\n\n // Guard: no index\n if (!(await bridge.hasIndex(projectPath))) {\n spinner.fail(\"No search index found\");\n console.error(\n chalk.red(\"Run 'ez-context generate' or 'ez-search index .' first to create an index.\")\n );\n process.exit(1);\n }\n\n // Resolve target files\n let filePaths: string[];\n if (options.file) {\n filePaths = [path.resolve(projectPath, options.file)];\n } else {\n filePaths = Object.values(FORMAT_EMITTER_MAP)\n .map((entry) => path.join(projectPath, entry.filename))\n .filter((p) => existsSync(p));\n }\n\n if (filePaths.length === 0) {\n spinner.fail(\"No context files found\");\n console.error(\n chalk.red(\"No generated context files found. Run 'ez-context generate' first, or use --file to specify one.\")\n );\n process.exit(1);\n }\n\n if (options.dryRun) {\n // Dry-run: analyze drift per file without writing\n spinner.succeed(\"Dry run complete\");\n console.log();\n console.log(chalk.bold.yellow(\"╔══════════════════════════════════════╗\"));\n console.log(chalk.bold.yellow(\"║ DRY RUN -- no files will be written ║\"));\n console.log(chalk.bold.yellow(\"╚══════════════════════════════════════╝\"));\n console.log();\n\n for (const filePath of filePaths) {\n const basename = path.basename(filePath);\n const { readFile } = await import(\"node:fs/promises\");\n const content = await readFile(filePath, \"utf-8\");\n const claims = extractClaims(content, filePath);\n\n if (claims.length === 0) {\n console.log(` ${chalk.gray(\"-\")} ${basename} ${chalk.gray(\"(no claims to check)\")}`);\n continue;\n }\n\n const scored = await scoreClaims(claims, bridge);\n const hasDrift = scored.some((s) => s.status !== \"GREEN\");\n\n if (hasDrift) {\n console.log(` ${chalk.yellow(\"~\")} Would update ${chalk.cyan(basename)}`);\n } else {\n console.log(` ${chalk.gray(\"-\")} Up to date: ${chalk.gray(basename)}`);\n }\n }\n\n return;\n }\n\n // Real update: process each file\n spinner.text = \"Extracting conventions...\";\n const registry = await extractConventions(projectPath);\n const results = [];\n for (const filePath of filePaths) {\n const basename = path.basename(filePath);\n spinner.text = `Updating ${basename}...`;\n const result = await updateFile(filePath, registry, bridge);\n results.push(result);\n }\n\n // Summarize results\n const updated = results.filter((r) => r.action === \"updated\");\n const aborted = results.filter((r) => r.action === \"aborted\");\n\n if (updated.length === 0 && aborted.length === 0) {\n spinner.succeed(\"All context files are up to date\");\n } else if (updated.length > 0) {\n spinner.succeed(\n `Updated ${updated.length} file${updated.length === 1 ? \"\" : \"s\"}`\n );\n } else {\n spinner.fail(\"Update incomplete — some files could not be updated\");\n }\n\n // Per-file report\n console.log();\n for (const result of results) {\n const basename = path.basename(result.filePath);\n if (result.action === \"updated\") {\n const backup = result.backupPath ? ` (backup: ${path.basename(result.backupPath)})` : \"\";\n console.log(` ${chalk.green(\"✓\")} ${chalk.cyan(basename)}${chalk.gray(backup)}`);\n } else if (result.action === \"skipped\") {\n console.log(` ${chalk.gray(\"-\")} ${chalk.gray(basename)} ${chalk.gray(`(${result.reason})`)}`);\n } else {\n // aborted\n console.log(` ${chalk.yellow(\"⚠\")} ${chalk.yellow(basename)} ${chalk.yellow(`(${result.reason})`)}`);\n }\n }\n\n if (aborted.length > 0) {\n console.log();\n console.log(\n chalk.yellow(`Warning: ${aborted.length} file${aborted.length === 1 ? \"\" : \"s\"} could not be updated due to marker issues.`)\n );\n }\n\n } catch (err) {\n spinner.fail(\"Update failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","#!/usr/bin/env node\nimport { Command } from \"commander\";\nimport { generateAction } from \"./commands/generate.js\";\nimport { inspectAction } from \"./commands/inspect.js\";\nimport { driftAction } from \"./commands/drift.js\";\nimport { updateAction } from \"./commands/update.js\";\n\nconst program = new Command();\n\nprogram\n .name(\"ez-context\")\n .description(\"Generate AI context files from any project\")\n .version(\"0.1.0\");\n\nprogram\n .command(\"generate\")\n .description(\"Extract conventions and generate context files\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--dry-run\", \"preview without writing files\")\n .option(\"-y, --yes\", \"non-interactive mode\")\n .option(\"--output <dir>\", \"output directory\", \".\")\n .option(\"--threshold <number>\", \"confidence threshold 0-1\", \"0.7\")\n .option(\"--format <formats>\", \"output formats: claude,agents,cursor,copilot,skills,rulesync,ruler (comma-separated)\", \"claude,agents\")\n .action(generateAction);\n\nprogram\n .command(\"inspect\")\n .description(\"Display detected conventions\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--threshold <number>\", \"confidence threshold 0-1\", \"0.7\")\n .action(inspectAction);\n\nprogram\n .command(\"drift\")\n .description(\"Check context files against code for semantic drift\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--file <contextFile>\", \"specific context file to check\")\n .action(driftAction);\n\nprogram\n .command(\"update\")\n .description(\"Update drifted sections in context files, preserving manual edits\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--file <contextFile>\", \"specific context file to update\")\n .option(\"--dry-run\", \"preview changes without writing files\")\n .option(\"-y, --yes\", \"non-interactive mode\")\n .action(updateAction);\n\nawait program.parseAsync();\n"],"mappings":";;;;;;;;;;AAOA,MAAM,wBAAwB;AAE9B,MAAM,gBAAgC;CACpC;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,SAAgB,aAAa,KAA6B;CACxD,MAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,MAAM,IAAI,CAAC,KAAK,MAAM,EAAE,MAAM,CAAC,CAAC,OAAO,QAAQ,CAAC,CAAC;CACjF,MAAM,UAAU,QAAQ,QAAQ,MAAM,CAAC,cAAc,SAAS,EAAkB,CAAC;AACjF,KAAI,QAAQ,SAAS,EACnB,OAAM,IAAI,MACR,sBAAsB,QAAQ,KAAK,KAAK,CAAC,WAAW,cAAc,KAAK,KAAK,GAC7E;AAEH,QAAO;;AAGT,SAAS,gBAAgB,SAAyB;CAChD,MAAM,QAAQ,QAAQ,MAAM,KAAK;AACjC,KAAI,MAAM,UAAU,sBAClB,QAAO;AAGT,QAAO,GADS,MAAM,MAAM,GAAG,sBAAsB,CAAC,KAAK,KAAK,CAC9C,SAAS,MAAM,OAAO;;AAG1C,eAAsB,eACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CAIzC,MAAM,UAAU,IAAI,mCAAmC,CAAC,OAAO;AAE/D,KAAI;EACF,MAAM,WAAW,MAAM,mBAAmB,YAAY;EACtD,MAAM,kBAAkB,SAAS,YAAY;AAC7C,UAAQ,QAAQ,SAAS,gBAAgB,aAAa,oBAAoB,IAAI,KAAK,MAAM;EAEzF,MAAM,sBAAsB,WAAW,QAAQ,aAAa,MAAM;EAClE,MAAM,YAAY,KAAK,QAAQ,QAAQ,UAAU,IAAI;EAGrD,MAAM,UAAU,aAAa,QAAQ,UAAU,gBAAgB;EAE/D,MAAM,cAA2B;GAC/B;GACA;GACA,QAAQ,QAAQ,UAAU;GAC1B;GACD;EAMD,MAAM,aAAa,IAJD,QAAQ,WAAW,KAAK,QAAQ,SAAS,SAAS,IAAI,QAAQ,SAAS,SAAS,GAE9F,gCACA,cAAc,QAAQ,OAAO,eAAe,QAAQ,WAAW,IAAI,KAAK,IAAI,KAC1C,CAAC,OAAO;EAC9C,MAAM,SAAS,MAAM,KAAK,UAAU,YAAY;AAEhD,MAAI,QAAQ,QAAQ;AAClB,cAAW,QAAQ,mBAAmB;AACtC,WAAQ,KAAK;AACb,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,KAAK;AACb,QAAK,MAAM,CAAC,QAAQ,YAAY,OAAO,QAAQ,OAAO,SAAS,EAAE;AAC/D,YAAQ,IAAI,MAAM,KAAK,OAAO,OAAO,aAAa,CAAC,MAAM,CAAC;AAC1D,YAAQ,IAAI,gBAAgB,QAAQ,CAAC;AACrC,YAAQ,KAAK;;SAEV;AACL,cAAW,QAAQ,aAAa,OAAO,aAAa,OAAO,OAAO,OAAO,aAAa,WAAW,IAAI,KAAK,MAAM;AAChH,WAAQ,KAAK;AACb,WAAQ,IAAI,MAAM,KAAK,MAAM,mBAAmB,CAAC;AACjD,QAAK,MAAM,YAAY,OAAO,cAAc;IAC1C,MAAM,UAAU,KAAK,SAAS,WAAW,SAAS;AAClD,YAAQ,IAAI,KAAK,MAAM,KAAK,QAAQ,GAAG;;;UAGpC,KAAK;AACZ,UAAQ,KAAK,kBAAkB;EAC/B,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;AC7FnB,SAAS,cAAc,YAA4B;AACjD,KAAI,cAAc,GAAK,QAAO,MAAM,MAAM,IAAI;AAC9C,KAAI,cAAc,GAAK,QAAO,MAAM,OAAO,IAAI;AAC/C,QAAO,MAAM,IAAI,IAAI;;AAGvB,eAAsB,cACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CACzC,MAAM,UAAU,IAAI,mCAAmC,CAAC,OAAO;AAE/D,KAAI;EACF,MAAM,WAAW,MAAM,mBAAmB,YAAY;EACtD,MAAM,aAAa,SAAS,YAAY;AACxC,UAAQ,QAAQ,aAAa,WAAW,aAAa,eAAe,IAAI,KAAK,MAAM;EAEnF,MAAM,YAAY,WAAW,QAAQ,aAAa,MAAM;EAExD,MAAM,WAAW,SAAS,YAAY,QACnC,MAAM,EAAE,cAAc,UACxB;AAED,MAAI,SAAS,WAAW,GAAG;AACzB,WAAQ,IACN,MAAM,OACJ,gCAAgC,UAAU,kDAC3C,CACF;AACD;;EAIF,MAAM,6BAAa,IAAI,KAAgC;AACvD,OAAK,MAAM,cAAc,UAAU;GACjC,MAAM,QAAQ,WAAW,IAAI,WAAW,SAAS,IAAI,EAAE;AACvD,SAAM,KAAK,WAAW;AACtB,cAAW,IAAI,WAAW,UAAU,MAAM;;AAG5C,UAAQ,KAAK;AACb,OAAK,MAAM,CAAC,UAAU,gBAAgB,YAAY;AAChD,WAAQ,IAAI,MAAM,KAAK,SAAS,aAAa,CAAC,CAAC;AAC/C,QAAK,MAAM,cAAc,aAAa;IACpC,MAAM,MAAM,KAAK,MAAM,WAAW,aAAa,IAAI;AACnD,YAAQ,IACN,KAAK,cAAc,WAAW,WAAW,CAAC,GAAG,WAAW,QAAQ,GAAG,MAAM,KAAK,IAAI,IAAI,IAAI,GAC3F;;AAEH,WAAQ,KAAK;;EAGf,MAAM,gBAAgB,WAAW;AACjC,UAAQ,IACN,MAAM,KACJ,SAAS,SAAS,OAAO,aAAa,SAAS,WAAW,IAAI,KAAK,IAAI,UAAU,cAAc,UAAU,kBAAkB,IAAI,MAAM,MAAM,eAAe,UAAU,GACrK,CACF;UACM,KAAK;AACZ,UAAQ,KAAK,kBAAkB;EAC/B,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;;;;;;;;;AC9BnB,MAAM,oBACJ;;;;;;;;AAaF,SAAgB,cAAc,SAAiB,YAA6B;CAC1E,MAAM,SAAkB,EAAE;CAC1B,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,IAAI,iBAAiB;AAErB,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;EACrC,MAAM,OAAO,MAAM,GAAI,MAAM;EAC7B,MAAM,UAAU,IAAI;AAGpB,MAAI,CAAC,KAAM;AAGX,MAAI,KAAK,WAAW,OAAO,CAAE;AAG7B,MAAI,KAAK,SAAS,cAAc,CAAE;EAGlC,MAAM,UAAU,KAAK,MAAM,iBAAiB;AAC5C,MAAI,SAAS;AACX,oBAAiB,QAAQ,GAAI,MAAM;AACnC;;EAIF,MAAM,SAAS,KAAK,MAAM,gBAAgB;EAC1C,MAAM,WAAW,CAAC,SAAS,KAAK,MAAM,gBAAgB,GAAG;EACzD,MAAM,UAAU,SAAS,OAAO,KAAM,WAAW,SAAS,KAAM;AAEhE,MAAI,CAAC,QAAS;EAGd,MAAM,OAAO,QACV,QAAQ,oBAAoB,KAAK,CACjC,QAAQ,cAAc,KAAK,CAC3B,MAAM;AAGT,MAAI,KAAK,SAAS,MAAM,KAAK,SAAS,IAAK;AAG3C,MAAI,kBAAkB,KAAK,KAAK,CAAE;AAElC,SAAO,KAAK;GACV;GACA;GACA,YAAY;GACZ,eAAe;GAChB,CAAC;;AAGJ,QAAO;;;;;ACzFT,MAAa,kBAAkB;AAC/B,MAAa,mBAAmB;AAChC,MAAa,aAAa;AAmB1B,SAAS,MAAS,KAAU,MAAqB;CAC/C,MAAM,SAAgB,EAAE;AACxB,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,KACnC,QAAO,KAAK,IAAI,MAAM,GAAG,IAAI,KAAK,CAAC;AAErC,QAAO;;AAGT,SAAS,cAAc,OAA4B;AACjD,KAAI,SAAS,gBAAiB,QAAO;AACrC,KAAI,SAAS,iBAAkB,QAAO;AACtC,QAAO;;AAGT,eAAe,iBACb,OACA,QACsB;CACtB,MAAM,WAAW,MAAM,OAAO,OAAO,MAAM,MAAM,EAAE,GAAG,GAAG,CAAC;CAC1D,MAAM,WAAW,SAAS,SAAS,IAAI,SAAS,GAAI,QAAQ;AAC5D,QAAO;EACL;EACA,QAAQ,cAAc,SAAS;EAC/B,OAAO;EACP;EACD;;;;;;;;;;AAeH,eAAsB,YACpB,QACA,QACA,YACwB;CACxB,MAAM,QAAQ,OAAO;CACrB,MAAM,UAAU,MAAM,QAAQ,WAAW;CACzC,MAAM,UAAyB,EAAE;CACjC,IAAI,YAAY;AAEhB,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,eAAe,MAAM,QAAQ,IACjC,MAAM,KAAK,UAAU,iBAAiB,OAAO,OAAO,CAAC,CACtD;AACD,UAAQ,KAAK,GAAG,aAAa;AAC7B,eAAa,MAAM;AACnB,eAAa,WAAW,MAAM;;AAGhC,QAAO;;;;;;;;;ACpET,SAAgB,mBAAmB,cAAqC;AACtE,KAAI,aAAa,WAAW,EAAG,QAAO;CACtC,MAAM,OACJ,aAAa,QAAQ,KAAK,OAAO,MAAM,GAAG,OAAO,EAAE,GAAG,aAAa;AACrE,QAAO,KAAK,MAAM,OAAO,IAAI;;;;;AAU/B,SAAgB,iBACd,YACA,cACa;AACb,QAAO;EACL;EACA,aAAa,mBAAmB,aAAa;EAC7C;EACD;;AAOH,MAAM,eAAuC;CAC3C,OAAO;CACP,QAAQ;CACR,KAAK;CACN;;;;;;;;;;;;;;;;;;;;;AAsBD,SAAgB,kBAAkB,QAA6B;CAC7D,MAAM,EAAE,YAAY,aAAa,iBAAiB;CAClD,MAAM,QAAkB,EAAE;CAE1B,MAAM,QAAQ,aAAa,QAAQ,OAAO,GAAG,WAAW,QAAQ;CAChE,MAAM,SAAS,aAAa,QAAQ,OAAO,GAAG,WAAW,SAAS;CAClE,MAAM,MAAM,aAAa,QAAQ,OAAO,GAAG,WAAW,MAAM;AAG5D,OAAM,KAAK,iBAAiB;AAC5B,OAAM,KAAK,GAAG;AACd,OAAM,KAAK,qBAAqB,YAAY,MAAM;AAClD,OAAM,KAAK,aAAa,aAAa;AACrC,OAAM,KAAK,eAAe,aAAa,SAAS;AAChD,OAAM,KAAK,GAAG;CAGd,MAAM,eAAe,OAAsB,WAAmB;AAC5D,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,QAAQ,aAAa,WAAW;AACtC,QAAM,KAAK,MAAM,MAAM,IAAI,OAAO,GAAG;AACrC,QAAM,KAAK,GAAG;AACd,OAAK,MAAM,MAAM,OAAO;AACtB,SAAM,KAAK,MAAM,GAAG,OAAO,IAAI,GAAG,MAAM,KAAK,WAAW,GAAG,MAAM,QAAQ,EAAE,CAAC,GAAG;AAE/E,OAAI,GAAG,WAAW,SAAS;IACzB,MAAM,cAAc,GAAG,SAAS,MAAM,GAAG,EAAE;AAC3C,SAAK,MAAM,MAAM,aAAa;KAC5B,MAAM,UAAU,GAAG,MAAM,QAAQ,QAAQ,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,GAAG;AACjE,WAAM,KAAK,OAAO,GAAG,KAAK,IAAI,UAAU;;;;AAI9C,QAAM,KAAK,GAAG;;AAGhB,aAAY,OAAO,QAAQ;AAC3B,aAAY,QAAQ,SAAS;AAC7B,aAAY,KAAK,MAAM;AAGvB,OAAM,KACJ,YAAY,MAAM,OAAO,cAAc,OAAO,OAAO,mBAAmB,IAAI,OAAO,eACpF;AAED,QAAO,MAAM,KAAK,KAAK;;;;;ACtHzB,MAAM,kBAAkB;CAAC;CAAa;CAAa;CAAgB;CAAa;AAEhF,SAAS,YAAY,OAAuB;AAC1C,KAAI,SAAS,GAAI,QAAO,MAAM,MAAM,OAAO,MAAM,CAAC;AAClD,KAAI,SAAS,GAAI,QAAO,MAAM,OAAO,OAAO,MAAM,CAAC;AACnD,QAAO,MAAM,IAAI,OAAO,MAAM,CAAC;;AAGjC,eAAsB,YACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CACzC,MAAM,UAAU,IAAI,2BAA2B,CAAC,OAAO;AAEvD,KAAI;EACF,MAAM,SAAS,MAAM,aAAa,YAAY;AAG9C,MAAI,CAAE,MAAM,OAAO,SAAS,YAAY,EAAG;AACzC,WAAQ,KAAK,wBAAwB;AACrC,WAAQ,MACN,MAAM,IAAI,6EAA6E,CACxF;AACD,WAAQ,KAAK,EAAE;;EAIjB,IAAI;AACJ,MAAI,QAAQ,KACV,aAAY,CAAC,KAAK,QAAQ,aAAa,QAAQ,KAAK,CAAC;MAErD,aAAY,gBACT,KAAK,SAAS,KAAK,KAAK,aAAa,KAAK,CAAC,CAC3C,QAAQ,MAAM,WAAW,EAAE,CAAC;AAGjC,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAQ,KAAK,yBAAyB;AACtC,WAAQ,MACN,MAAM,IAAI,yFAAyF,CACpG;AACD,WAAQ,KAAK,EAAE;;EAIjB,MAAM,+BAA8D,IAAI,KAAK;AAC7E,OAAK,MAAM,YAAY,WAAW;GAEhC,MAAM,SAAS,cADC,MAAM,SAAS,UAAU,QAAQ,EACX,SAAS;AAC/C,gBAAa,IAAI,UAAU,OAAO;;EAGpC,MAAM,YAAY,CAAC,GAAG,aAAa,QAAQ,CAAC,CAAC,MAAM;AACnD,UAAQ,OAAO,aAAa,UAAU,OAAO;EAG7C,MAAM,YAAY,MAAM,YAAY,WAAW,SAAS,MAAM,UAAU;AACtE,WAAQ,OAAO,kBAAkB,KAAK,GAAG,MAAM;IAC/C;EAGF,MAAM,UAAU,UAAU,KAAK,aAAa;GAC1C,MAAM,aAAa,aAAa,IAAI,SAAS,IAAI,EAAE;AAInD,UAAO,iBAAiB,UAHC,UAAU,QAAQ,OACzC,WAAW,MAAM,MAAM,MAAM,GAAG,MAAM,CACvC,CACkD;IACnD;EAEF,MAAM,eAAe,mBAAmB,UAAU;AAClD,UAAQ,QAAQ,2CAA2C,YAAY,aAAa,CAAC,MAAM;AAE3F,UAAQ,KAAK;AACb,OAAK,MAAM,UAAU,SAAS;AAC5B,WAAQ,IAAI,kBAAkB,OAAO,CAAC;AACtC,WAAQ,KAAK;;UAER,KAAK;AACZ,UAAQ,KAAK,wBAAwB;EACrC,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;ACnCnB,SAAgB,gBAAgB,SAAmC;CACjE,MAAM,WAAW,QAAQ,QAAQ,aAAa;CAC9C,MAAM,SAAS,QAAQ,QAAQ,WAAW;CAE1C,MAAM,WAAW,aAAa;CAC9B,MAAM,SAAS,WAAW;AAG1B,KAAI,CAAC,YAAY,CAAC,OAChB,QAAO;EAAE,OAAO;EAAM,MAAM;EAAU;AAIxC,KAAI,YAAY,QAAQ;AACtB,MAAI,SAAS,SACX,QAAO;GACL,OAAO;GACP,MAAM;GACN,QAAQ;GACT;AAEH,SAAO;GAAE,OAAO;GAAM,MAAM;GAAU;GAAU;GAAQ;;AAI1D,KAAI,YAAY,CAAC,OACf,QAAO;EACL,OAAO;EACP,MAAM;EACN,QAAQ;EACT;AAIH,QAAO;EACL,OAAO;EACP,MAAM;EACN,QAAQ;EACT;;;;;;;AAYH,eAAsB,WAAW,UAA0C;AACzE,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;CAGT,MAAM,aAAa,WAAW;AAC9B,OAAM,SAAS,UAAU,WAAW;AACpC,QAAO;;;;;;AAWT,SAAS,gBAAgB,UAAkB;CACzC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,MAAK,MAAM,SAAS,OAAO,OAAO,mBAAmB,CACnD,KAAI,WAAW,SAAS,KAAK,UAAU,MAAM,SAAS,CAAC,CACrD,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCb,eAAsB,WACpB,UACA,UACA,QACA,sBAA8B,IACH;AAE3B,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;EAAE;EAAU,QAAQ;EAAW,QAAQ;EAAuB;CAGvE,MAAM,cAAc,gBAAgB,SAAS;CAE7C,MAAM,WAAW,aAAa,YAAY;CAC1C,MAAM,SAAS,aAAa,UAAU,mBAAmB,OAAO;AAKhE,KAAI,aAAa,UAAU;EACzB,MAAM,aAAc,MAAM,WAAW,SAAS,IAAK;AAEnD,QAAM,UAAU,UADG,OAAO,UAAU,oBAAoB,EAClB,QAAQ;AAC9C,SAAO;GACL;GACA,QAAQ;GACR,QAAQ;GACR;GACD;;CAQH,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;CACjD,MAAM,aAAa,gBAAgB,QAAQ;AAE3C,KAAI,CAAC,WAAW,MACd,QAAO;EAAE;EAAU,QAAQ;EAAW,QAAQ,WAAW;EAAS;AAIpE,KAAI,WAAW,SAAS,UAAU;EAChC,MAAM,SAAS,cAAc,SAAS,SAAS;AAG/C,MAAI,OAAO,WAAW,EACpB,QAAO;GAAE;GAAU,QAAQ;GAAW,QAAQ;GAAqB;AAMrE,MAAI,EAHW,MAAM,YAAY,QAAQ,OAAO,EACxB,MAAM,MAAM,EAAE,WAAW,QAAQ,CAGvD,QAAO;GAAE;GAAU,QAAQ;GAAW,QAAQ;GAAqB;;CAMvE,MAAM,aAAc,MAAM,WAAW,SAAS,IAAK;AAInD,OAAM,iBAAiB,UADJ,OAAO,UAAU,oBAAoB,CACZ;AAE5C,QAAO;EACL;EACA,QAAQ;EACR,QAAQ;EACR;EACD;;;;;ACjOH,eAAsB,aACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CACzC,MAAM,UAAU,IAAI,wBAAwB,CAAC,OAAO;AAEpD,KAAI;EACF,MAAM,SAAS,MAAM,aAAa,YAAY;AAG9C,MAAI,CAAE,MAAM,OAAO,SAAS,YAAY,EAAG;AACzC,WAAQ,KAAK,wBAAwB;AACrC,WAAQ,MACN,MAAM,IAAI,6EAA6E,CACxF;AACD,WAAQ,KAAK,EAAE;;EAIjB,IAAI;AACJ,MAAI,QAAQ,KACV,aAAY,CAAC,KAAK,QAAQ,aAAa,QAAQ,KAAK,CAAC;MAErD,aAAY,OAAO,OAAO,mBAAmB,CAC1C,KAAK,UAAU,KAAK,KAAK,aAAa,MAAM,SAAS,CAAC,CACtD,QAAQ,MAAM,WAAW,EAAE,CAAC;AAGjC,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAQ,KAAK,yBAAyB;AACtC,WAAQ,MACN,MAAM,IAAI,mGAAmG,CAC9G;AACD,WAAQ,KAAK,EAAE;;AAGjB,MAAI,QAAQ,QAAQ;AAElB,WAAQ,QAAQ,mBAAmB;AACnC,WAAQ,KAAK;AACb,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,KAAK;AAEb,QAAK,MAAM,YAAY,WAAW;IAChC,MAAM,WAAW,KAAK,SAAS,SAAS;IACxC,MAAM,EAAE,aAAa,MAAM,OAAO;IAElC,MAAM,SAAS,cADC,MAAM,SAAS,UAAU,QAAQ,EACX,SAAS;AAE/C,QAAI,OAAO,WAAW,GAAG;AACvB,aAAQ,IAAI,KAAK,MAAM,KAAK,IAAI,CAAC,GAAG,SAAS,GAAG,MAAM,KAAK,uBAAuB,GAAG;AACrF;;AAMF,SAHe,MAAM,YAAY,QAAQ,OAAO,EACxB,MAAM,MAAM,EAAE,WAAW,QAAQ,CAGvD,SAAQ,IAAI,KAAK,MAAM,OAAO,IAAI,CAAC,gBAAgB,MAAM,KAAK,SAAS,GAAG;QAE1E,SAAQ,IAAI,KAAK,MAAM,KAAK,IAAI,CAAC,eAAe,MAAM,KAAK,SAAS,GAAG;;AAI3E;;AAIF,UAAQ,OAAO;EACf,MAAM,WAAW,MAAM,mBAAmB,YAAY;EACtD,MAAM,UAAU,EAAE;AAClB,OAAK,MAAM,YAAY,WAAW;AAEhC,WAAQ,OAAO,YADE,KAAK,SAAS,SAAS,CACJ;GACpC,MAAM,SAAS,MAAM,WAAW,UAAU,UAAU,OAAO;AAC3D,WAAQ,KAAK,OAAO;;EAItB,MAAM,UAAU,QAAQ,QAAQ,MAAM,EAAE,WAAW,UAAU;EAC7D,MAAM,UAAU,QAAQ,QAAQ,MAAM,EAAE,WAAW,UAAU;AAE7D,MAAI,QAAQ,WAAW,KAAK,QAAQ,WAAW,EAC7C,SAAQ,QAAQ,mCAAmC;WAC1C,QAAQ,SAAS,EAC1B,SAAQ,QACN,WAAW,QAAQ,OAAO,OAAO,QAAQ,WAAW,IAAI,KAAK,MAC9D;MAED,SAAQ,KAAK,sDAAsD;AAIrE,UAAQ,KAAK;AACb,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,WAAW,KAAK,SAAS,OAAO,SAAS;AAC/C,OAAI,OAAO,WAAW,WAAW;IAC/B,MAAM,SAAS,OAAO,aAAa,aAAa,KAAK,SAAS,OAAO,WAAW,CAAC,KAAK;AACtF,YAAQ,IAAI,KAAK,MAAM,MAAM,IAAI,CAAC,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,KAAK,OAAO,GAAG;cACxE,OAAO,WAAW,UAC3B,SAAQ,IAAI,KAAK,MAAM,KAAK,IAAI,CAAC,GAAG,MAAM,KAAK,SAAS,CAAC,GAAG,MAAM,KAAK,IAAI,OAAO,OAAO,GAAG,GAAG;OAG/F,SAAQ,IAAI,KAAK,MAAM,OAAO,IAAI,CAAC,GAAG,MAAM,OAAO,SAAS,CAAC,GAAG,MAAM,OAAO,IAAI,OAAO,OAAO,GAAG,GAAG;;AAIzG,MAAI,QAAQ,SAAS,GAAG;AACtB,WAAQ,KAAK;AACb,WAAQ,IACN,MAAM,OAAO,YAAY,QAAQ,OAAO,OAAO,QAAQ,WAAW,IAAI,KAAK,IAAI,6CAA6C,CAC7H;;UAGI,KAAK;AACZ,UAAQ,KAAK,gBAAgB;EAC7B,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;AC7HnB,MAAM,UAAU,IAAI,SAAS;AAE7B,QACG,KAAK,aAAa,CAClB,YAAY,6CAA6C,CACzD,QAAQ,QAAQ;AAEnB,QACG,QAAQ,WAAW,CACnB,YAAY,iDAAiD,CAC7D,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,aAAa,gCAAgC,CACpD,OAAO,aAAa,uBAAuB,CAC3C,OAAO,kBAAkB,oBAAoB,IAAI,CACjD,OAAO,wBAAwB,4BAA4B,MAAM,CACjE,OAAO,sBAAsB,wFAAwF,gBAAgB,CACrI,OAAO,eAAe;AAEzB,QACG,QAAQ,UAAU,CAClB,YAAY,+BAA+B,CAC3C,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,wBAAwB,4BAA4B,MAAM,CACjE,OAAO,cAAc;AAExB,QACG,QAAQ,QAAQ,CAChB,YAAY,sDAAsD,CAClE,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,wBAAwB,iCAAiC,CAChE,OAAO,YAAY;AAEtB,QACG,QAAQ,SAAS,CACjB,YAAY,oEAAoE,CAChF,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,wBAAwB,kCAAkC,CACjE,OAAO,aAAa,wCAAwC,CAC5D,OAAO,aAAa,uBAAuB,CAC3C,OAAO,aAAa;AAEvB,MAAM,QAAQ,YAAY"}
1
+ {"version":3,"file":"cli.js","names":[],"sources":["../src/commands/generate.ts","../src/commands/inspect.ts","../src/core/drift/claim-extractor.ts","../src/core/drift/claim-scorer.ts","../src/core/drift/report.ts","../src/commands/drift.ts","../src/core/updater.ts","../src/commands/update.ts","../src/cli.ts"],"sourcesContent":["import path from \"node:path\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { extractConventions } from \"../core/pipeline.js\";\nimport { emit } from \"../emitters/index.js\";\nimport type { EmitOptions, OutputFormat } from \"../emitters/types.js\";\n\nconst DRY_RUN_PREVIEW_LINES = 20;\n\nconst VALID_FORMATS: OutputFormat[] = [\n \"claude\",\n \"agents\",\n \"cursor\",\n \"copilot\",\n \"skills\",\n \"rulesync\",\n \"ruler\",\n];\n\nexport function parseFormats(raw: string): OutputFormat[] {\n const formats = [...new Set(raw.split(\",\").map((s) => s.trim()).filter(Boolean))];\n const invalid = formats.filter((f) => !VALID_FORMATS.includes(f as OutputFormat));\n if (invalid.length > 0) {\n throw new Error(\n `Invalid format(s): ${invalid.join(\", \")}. Valid: ${VALID_FORMATS.join(\", \")}`\n );\n }\n return formats as OutputFormat[];\n}\n\nfunction truncatePreview(content: string): string {\n const lines = content.split(\"\\n\");\n if (lines.length <= DRY_RUN_PREVIEW_LINES) {\n return content;\n }\n const preview = lines.slice(0, DRY_RUN_PREVIEW_LINES).join(\"\\n\");\n return `${preview}\\n... (${lines.length} lines total)`;\n}\n\nexport async function generateAction(\n pathArg: string,\n options: { dryRun?: boolean; yes?: boolean; output?: string; threshold?: string; format?: string }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n\n // --yes and non-TTY environments: ora handles non-TTY gracefully by\n // falling back to plain text. No interactive prompts are used anywhere.\n const spinner = ora(\"Analyzing project conventions...\").start();\n\n try {\n const registry = await extractConventions(projectPath);\n const conventionCount = registry.conventions.length;\n spinner.succeed(`Found ${conventionCount} convention${conventionCount === 1 ? \"\" : \"s\"}`);\n\n const confidenceThreshold = parseFloat(options.threshold ?? \"0.7\");\n const outputDir = path.resolve(options.output ?? \".\");\n\n // Parse and validate --format (default: \"claude,agents\")\n const formats = parseFormats(options.format ?? \"claude,agents\");\n\n const emitOptions: EmitOptions = {\n outputDir,\n confidenceThreshold,\n dryRun: options.dryRun ?? false,\n formats,\n };\n\n const isDefault = formats.length === 2 && formats.includes(\"claude\") && formats.includes(\"agents\");\n const genSpinnerText = isDefault\n ? \"Generating context files...\"\n : `Generating ${formats.length} context file${formats.length === 1 ? \"\" : \"s\"}...`;\n const genSpinner = ora(genSpinnerText).start();\n const result = await emit(registry, emitOptions);\n\n if (options.dryRun) {\n genSpinner.succeed(\"Dry run complete\");\n console.log();\n console.log(chalk.bold.yellow(\"╔══════════════════════════════════════╗\"));\n console.log(chalk.bold.yellow(\"║ DRY RUN -- no files will be written ║\"));\n console.log(chalk.bold.yellow(\"╚══════════════════════════════════════╝\"));\n console.log();\n for (const [format, content] of Object.entries(result.rendered)) {\n console.log(chalk.cyan(`--- ${format.toUpperCase()} ---`));\n console.log(truncatePreview(content));\n console.log();\n }\n } else {\n genSpinner.succeed(`Generated ${result.filesWritten.length} file${result.filesWritten.length === 1 ? \"\" : \"s\"}`);\n console.log();\n console.log(chalk.bold.green(\"Generated files:\"));\n for (const filePath of result.filesWritten) {\n const relPath = path.relative(outputDir, filePath);\n console.log(` ${chalk.cyan(relPath)}`);\n }\n }\n } catch (err) {\n spinner.fail(\"Analysis failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","import path from \"node:path\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { extractConventions } from \"../core/pipeline.js\";\nimport type { ConventionEntry } from \"../core/schema.js\";\n\nfunction confidenceDot(confidence: number): string {\n if (confidence >= 0.8) return chalk.green(\"●\");\n if (confidence >= 0.6) return chalk.yellow(\"●\");\n return chalk.red(\"●\");\n}\n\nexport async function inspectAction(\n pathArg: string,\n options: { threshold?: string }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n const spinner = ora(\"Analyzing project conventions...\").start();\n\n try {\n const registry = await extractConventions(projectPath);\n const totalCount = registry.conventions.length;\n spinner.succeed(`Extracted ${totalCount} convention${totalCount === 1 ? \"\" : \"s\"}`);\n\n const threshold = parseFloat(options.threshold ?? \"0.7\");\n\n const filtered = registry.conventions.filter(\n (c) => c.confidence >= threshold\n );\n\n if (filtered.length === 0) {\n console.log(\n chalk.yellow(\n `\\nNo conventions found above ${threshold} confidence threshold. Try lowering --threshold.`\n )\n );\n return;\n }\n\n // Group by category\n const byCategory = new Map<string, ConventionEntry[]>();\n for (const convention of filtered) {\n const group = byCategory.get(convention.category) ?? [];\n group.push(convention);\n byCategory.set(convention.category, group);\n }\n\n console.log();\n for (const [category, conventions] of byCategory) {\n console.log(chalk.bold(category.toUpperCase()));\n for (const convention of conventions) {\n const pct = Math.round(convention.confidence * 100);\n console.log(\n ` ${confidenceDot(convention.confidence)} ${convention.pattern} ${chalk.gray(`(${pct}%)`)}`\n );\n }\n console.log();\n }\n\n const categoryCount = byCategory.size;\n console.log(\n chalk.gray(\n `Found ${filtered.length} convention${filtered.length === 1 ? \"\" : \"s\"} across ${categoryCount} categor${categoryCount === 1 ? \"y\" : \"ies\"} (threshold: ${threshold})`\n )\n );\n } catch (err) {\n spinner.fail(\"Analysis failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","/**\n * Claim extractor — parses markdown context files into individual testable claims.\n *\n * Input: raw markdown string (CLAUDE.md, AGENTS.md, .cursorrules, etc.)\n * Output: Claim[] where each claim is an atomic declarative statement\n *\n * Extraction rules:\n * - Bullet points: ^[-*+]\\s+\n * - Numbered list items: ^\\d+\\.\\s+\n * - Bold/code markers stripped from extracted text\n * - Boilerplate value lines skipped (Language: X, Framework: X, etc.)\n * - ez-context markers and HTML comments skipped\n * - Claims shorter than 10 chars or longer than 300 chars excluded\n * - Current section heading tracked for context\n */\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface Claim {\n text: string; // The claim text (bold/code markers stripped)\n sourceFile: string; // Which file it came from\n sourceLine: number; // 1-based line number\n sourceSection: string; // Nearest parent heading\n}\n\n// ---------------------------------------------------------------------------\n// Filters\n// ---------------------------------------------------------------------------\n\n/**\n * Matches boilerplate key-value lines that are structural metadata, not\n * behavioral claims. Applied AFTER bold/code stripping.\n *\n * Examples skipped:\n * \"Language: TypeScript\"\n * \"Package Manager: bun\"\n */\nconst BOILERPLATE_VALUE =\n /^(Language|Framework|Build|Package Manager|Test Runner|Pattern|Layers):\\s/i;\n\n// ---------------------------------------------------------------------------\n// Core function\n// ---------------------------------------------------------------------------\n\n/**\n * Extract all testable claims from a markdown string.\n *\n * @param content Raw markdown content of the context file\n * @param sourceFile Path to the source file (stored on each claim)\n * @returns Array of extracted claims, filtered and deduplicated\n */\nexport function extractClaims(content: string, sourceFile: string): Claim[] {\n const claims: Claim[] = [];\n const lines = content.split(\"\\n\");\n let currentSection = \"\";\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i]!.trim();\n const lineNum = i + 1; // 1-based\n\n // Skip blank lines\n if (!line) continue;\n\n // Skip HTML comments (includes ez-context markers like <!-- ez-context:... -->)\n if (line.startsWith(\"<!--\")) continue;\n\n // Skip lines containing ez-context markers (belt-and-suspenders for inline markers)\n if (line.includes(\"ez-context:\")) continue;\n\n // Track section headings — H1, H2, H3\n const heading = line.match(/^#{1,3}\\s+(.+)/);\n if (heading) {\n currentSection = heading[1]!.trim();\n continue;\n }\n\n // Match bullet points or numbered list items\n const bullet = line.match(/^[-*+]\\s+(.+)/);\n const numbered = !bullet ? line.match(/^\\d+\\.\\s+(.+)/) : null;\n const rawText = bullet ? bullet[1]! : numbered ? numbered[1]! : null;\n\n if (!rawText) continue;\n\n // Strip bold markers (**text** -> text) and inline code markers (`text` -> text)\n const text = rawText\n .replace(/\\*\\*([^*]+)\\*\\*/g, \"$1\")\n .replace(/`([^`]+)`/g, \"$1\")\n .trim();\n\n // Apply length filters\n if (text.length < 10 || text.length > 300) continue;\n\n // Skip boilerplate key-value lines\n if (BOILERPLATE_VALUE.test(text)) continue;\n\n claims.push({\n text,\n sourceFile,\n sourceLine: lineNum,\n sourceSection: currentSection,\n });\n }\n\n return claims;\n}\n","/**\n * Claim scorer — compares extracted claims against the code index via semantic search.\n *\n * Each claim is searched against the indexed codebase. The top similarity score\n * determines whether the claim is GREEN (well-supported), YELLOW (possibly stale),\n * or RED (contradicted / not found).\n *\n * Claims are processed in batches to avoid ONNX pipeline contention.\n */\nimport type { Claim } from \"./claim-extractor.js\";\nimport type { SearchResult, EzSearchBridge } from \"../ez-search-bridge.js\";\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nexport const GREEN_THRESHOLD = 0.65;\nexport const YELLOW_THRESHOLD = 0.40;\nexport const BATCH_SIZE = 10;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport type ClaimStatus = \"GREEN\" | \"YELLOW\" | \"RED\";\n\nexport interface ScoredClaim {\n claim: Claim;\n status: ClaimStatus;\n score: number; // Top bridge.search() score (0.0-1.0)\n evidence: SearchResult[]; // Top k results\n}\n\n// ---------------------------------------------------------------------------\n// Internal helpers\n// ---------------------------------------------------------------------------\n\nfunction chunk<T>(arr: T[], size: number): T[][] {\n const chunks: T[][] = [];\n for (let i = 0; i < arr.length; i += size) {\n chunks.push(arr.slice(i, i + size));\n }\n return chunks;\n}\n\nfunction classifyScore(score: number): ClaimStatus {\n if (score >= GREEN_THRESHOLD) return \"GREEN\";\n if (score >= YELLOW_THRESHOLD) return \"YELLOW\";\n return \"RED\";\n}\n\nasync function scoreSingleClaim(\n claim: Claim,\n bridge: EzSearchBridge\n): Promise<ScoredClaim> {\n const evidence = await bridge.search(claim.text, { k: 5 });\n const topScore = evidence.length > 0 ? evidence[0]!.score : 0;\n return {\n claim,\n status: classifyScore(topScore),\n score: topScore,\n evidence,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Score all claims by searching the code index in batches of BATCH_SIZE.\n *\n * @param claims Claims to score\n * @param bridge EzSearchBridge instance bound to the project\n * @param onProgress Optional callback fired after each batch: (done, total)\n * @returns ScoredClaim[] in the same order as input claims\n */\nexport async function scoreClaims(\n claims: Claim[],\n bridge: EzSearchBridge,\n onProgress?: (completed: number, total: number) => void\n): Promise<ScoredClaim[]> {\n const total = claims.length;\n const batches = chunk(claims, BATCH_SIZE);\n const results: ScoredClaim[] = [];\n let completed = 0;\n\n for (const batch of batches) {\n const batchResults = await Promise.all(\n batch.map((claim) => scoreSingleClaim(claim, bridge))\n );\n results.push(...batchResults);\n completed += batch.length;\n onProgress?.(completed, total);\n }\n\n return results;\n}\n","/**\n * Drift report — aggregates scored claims into a health score and markdown report.\n *\n * Health score: mean of per-claim scores scaled 0-100 (rounded).\n * Zero claims yields a health score of 100 (nothing to contradict = healthy).\n *\n * Rendered markdown groups claims by status with evidence for stale/contradicted claims.\n */\nimport type { ScoredClaim } from \"./claim-scorer.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface DriftReport {\n sourceFile: string;\n healthScore: number;\n scoredClaims: ScoredClaim[];\n}\n\n// ---------------------------------------------------------------------------\n// Health score\n// ---------------------------------------------------------------------------\n\n/**\n * Compute the aggregate health score for a set of scored claims.\n * Returns 100 for empty input (no claims = no drift).\n */\nexport function computeHealthScore(scoredClaims: ScoredClaim[]): number {\n if (scoredClaims.length === 0) return 100;\n const mean =\n scoredClaims.reduce((sum, sc) => sum + sc.score, 0) / scoredClaims.length;\n return Math.round(mean * 100);\n}\n\n// ---------------------------------------------------------------------------\n// Report assembly\n// ---------------------------------------------------------------------------\n\n/**\n * Build a DriftReport from a source file path and its scored claims.\n */\nexport function buildDriftReport(\n sourceFile: string,\n scoredClaims: ScoredClaim[]\n): DriftReport {\n return {\n sourceFile,\n healthScore: computeHealthScore(scoredClaims),\n scoredClaims,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Renderer\n// ---------------------------------------------------------------------------\n\nconst STATUS_LABEL: Record<string, string> = {\n GREEN: \"Confirmed\",\n YELLOW: \"Possibly Stale\",\n RED: \"Contradicted\",\n};\n\n/**\n * Render a drift report as a readable markdown string.\n *\n * Layout:\n * # Drift Report\n * Health score, source file, claim count\n *\n * ## Confirmed (GREEN)\n * - [GREEN] claim text (score: X.XX)\n *\n * ## Possibly Stale (YELLOW)\n * - [YELLOW] claim text (score: X.XX)\n * - file: chunk_preview\n *\n * ## Contradicted (RED)\n * - [RED] claim text (score: X.XX)\n * - file: chunk_preview\n *\n * Summary: X confirmed, Y possibly stale, Z contradicted\n */\nexport function renderDriftReport(report: DriftReport): string {\n const { sourceFile, healthScore, scoredClaims } = report;\n const lines: string[] = [];\n\n const green = scoredClaims.filter((sc) => sc.status === \"GREEN\");\n const yellow = scoredClaims.filter((sc) => sc.status === \"YELLOW\");\n const red = scoredClaims.filter((sc) => sc.status === \"RED\");\n\n // Header\n lines.push(\"# Drift Report\");\n lines.push(\"\");\n lines.push(`**Health Score:** ${healthScore}/100`);\n lines.push(`**File:** ${sourceFile}`);\n lines.push(`**Claims:** ${scoredClaims.length}`);\n lines.push(\"\");\n\n // Render a group of claims\n const renderGroup = (group: ScoredClaim[], status: string) => {\n if (group.length === 0) return;\n const label = STATUS_LABEL[status] ?? status;\n lines.push(`## ${label} (${status})`);\n lines.push(\"\");\n for (const sc of group) {\n lines.push(`- [${sc.status}] ${sc.claim.text} (score: ${sc.score.toFixed(2)})`);\n // Show top 2 evidence items for non-GREEN claims\n if (sc.status !== \"GREEN\") {\n const topEvidence = sc.evidence.slice(0, 2);\n for (const ev of topEvidence) {\n const preview = ev.chunk.replace(/\\s+/g, \" \").trim().slice(0, 80);\n lines.push(` - ${ev.file}: ${preview}`);\n }\n }\n }\n lines.push(\"\");\n };\n\n renderGroup(green, \"GREEN\");\n renderGroup(yellow, \"YELLOW\");\n renderGroup(red, \"RED\");\n\n // Summary\n lines.push(\n `Summary: ${green.length} confirmed, ${yellow.length} possibly stale, ${red.length} contradicted`\n );\n\n return lines.join(\"\\n\");\n}\n","import path from \"node:path\";\nimport { readFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { createBridge } from \"../core/ez-search-bridge.js\";\nimport { extractClaims } from \"../core/drift/claim-extractor.js\";\nimport { scoreClaims } from \"../core/drift/claim-scorer.js\";\nimport { buildDriftReport, renderDriftReport, computeHealthScore } from \"../core/drift/report.js\";\n\nconst CANDIDATE_FILES = [\"CLAUDE.md\", \"AGENTS.md\", \".cursorrules\", \"CONTEXT.md\"];\n\nfunction healthColor(score: number): string {\n if (score >= 70) return chalk.green(String(score));\n if (score >= 40) return chalk.yellow(String(score));\n return chalk.red(String(score));\n}\n\nexport async function driftAction(\n pathArg: string,\n options: { file?: string }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n const spinner = ora(\"Loading context files...\").start();\n\n try {\n const bridge = await createBridge(projectPath);\n\n // Index check — auto-create if missing\n if (!(await bridge.hasIndex(projectPath))) {\n spinner.text = \"No search index found — creating index...\";\n await bridge.ensureIndex(projectPath);\n spinner.text = \"Index created. Loading context files...\";\n }\n\n // Resolve files\n let filePaths: string[];\n if (options.file) {\n filePaths = [path.resolve(projectPath, options.file)];\n } else {\n filePaths = CANDIDATE_FILES\n .map((name) => path.join(projectPath, name))\n .filter((p) => existsSync(p));\n }\n\n if (filePaths.length === 0) {\n spinner.fail(\"No context files found\");\n console.error(\n chalk.red(\"No CLAUDE.md, AGENTS.md, .cursorrules, or CONTEXT.md found. Use --file to specify one.\")\n );\n process.exit(1);\n }\n\n // Extract claims from each file\n const claimsByFile: Map<string, ReturnType<typeof extractClaims>> = new Map();\n for (const filePath of filePaths) {\n const content = await readFile(filePath, \"utf-8\");\n const claims = extractClaims(content, filePath);\n claimsByFile.set(filePath, claims);\n }\n\n const allClaims = [...claimsByFile.values()].flat();\n spinner.text = `Analyzing ${allClaims.length} claims...`;\n\n // Score claims with progress callback\n const scoredAll = await scoreClaims(allClaims, bridge, (done, total) => {\n spinner.text = `Checking claim ${done}/${total}...`;\n });\n\n // Build and render reports per file\n const reports = filePaths.map((filePath) => {\n const fileClaims = claimsByFile.get(filePath) ?? [];\n const fileScoredClaims = scoredAll.filter((sc) =>\n fileClaims.some((c) => c === sc.claim)\n );\n return buildDriftReport(filePath, fileScoredClaims);\n });\n\n const overallScore = computeHealthScore(scoredAll);\n spinner.succeed(`Drift analysis complete — health score: ${healthColor(overallScore)}/100`);\n\n console.log();\n for (const report of reports) {\n console.log(renderDriftReport(report));\n console.log();\n }\n } catch (err) {\n spinner.fail(\"Drift analysis failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","/**\n * Updater — targeted regeneration engine for `ez-context update`.\n *\n * Orchestrates:\n * 1. Marker validation (pre-flight check, markers strategy only)\n * 2. Drift detection (skip GREEN files, markers strategy only)\n * 3. File backup (before any write)\n * 4. Re-rendering (via FORMAT_EMITTER_MAP)\n * 5. Write-back (writeWithMarkers for markers strategy, writeFile for direct)\n */\nimport { copyFile, readFile, writeFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport path from \"node:path\";\n\nimport { MARKER_START, MARKER_END, writeWithMarkers } from \"../emitters/writer.js\";\nimport { FORMAT_EMITTER_MAP } from \"../emitters/index.js\";\nimport { extractClaims } from \"./drift/claim-extractor.js\";\nimport { scoreClaims } from \"./drift/claim-scorer.js\";\nimport type { EzSearchBridge } from \"./ez-search-bridge.js\";\nimport type { ConventionRegistry } from \"./schema.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface MarkerValidation {\n valid: boolean;\n mode: \"append\" | \"splice\" | \"invalid\";\n reason?: string;\n startIdx?: number;\n endIdx?: number;\n}\n\nexport type UpdateAction = \"skipped\" | \"updated\" | \"aborted\";\n\nexport interface FileUpdateResult {\n filePath: string;\n action: UpdateAction;\n reason: string;\n backupPath?: string;\n}\n\n// ---------------------------------------------------------------------------\n// validateMarkers\n// ---------------------------------------------------------------------------\n\n/**\n * Pre-flight marker check for updateFile.\n *\n * Unlike writeWithMarkers (which silently appends on unpaired markers),\n * validateMarkers rejects unpaired markers so updateFile can abort safely.\n *\n * Returns:\n * - { valid: true, mode: \"append\" } — no markers, safe to append\n * - { valid: true, mode: \"splice\", startIdx, endIdx } — well-formed pair\n * - { valid: false, mode: \"invalid\", reason } — unpaired or inverted markers\n */\nexport function validateMarkers(content: string): MarkerValidation {\n const startIdx = content.indexOf(MARKER_START);\n const endIdx = content.indexOf(MARKER_END);\n\n const hasStart = startIdx !== -1;\n const hasEnd = endIdx !== -1;\n\n // No markers at all -> safe to append\n if (!hasStart && !hasEnd) {\n return { valid: true, mode: \"append\" };\n }\n\n // Both markers present -> validate ordering\n if (hasStart && hasEnd) {\n if (endIdx < startIdx) {\n return {\n valid: false,\n mode: \"invalid\",\n reason: \"End marker appears before start marker (corrupted file)\",\n };\n }\n return { valid: true, mode: \"splice\", startIdx, endIdx };\n }\n\n // Unpaired: only one marker present\n if (hasStart && !hasEnd) {\n return {\n valid: false,\n mode: \"invalid\",\n reason: \"Unpaired ez-context marker: end marker missing\",\n };\n }\n\n // hasEnd && !hasStart\n return {\n valid: false,\n mode: \"invalid\",\n reason: \"Unpaired ez-context marker: start marker missing\",\n };\n}\n\n// ---------------------------------------------------------------------------\n// backupFile\n// ---------------------------------------------------------------------------\n\n/**\n * Copy filePath to filePath.bak and return the backup path.\n * Returns null if the file does not exist.\n * Overwrites any existing .bak silently (represents state before this run).\n */\nexport async function backupFile(filePath: string): Promise<string | null> {\n if (!existsSync(filePath)) {\n return null;\n }\n\n const backupPath = filePath + \".bak\";\n await copyFile(filePath, backupPath);\n return backupPath;\n}\n\n// ---------------------------------------------------------------------------\n// findFormatEntry\n// ---------------------------------------------------------------------------\n\n/**\n * Look up the FORMAT_EMITTER_MAP entry whose filename suffix matches filePath.\n * Returns undefined if the file doesn't correspond to a known format.\n */\nfunction findFormatEntry(filePath: string) {\n const normalized = path.normalize(filePath);\n for (const entry of Object.values(FORMAT_EMITTER_MAP)) {\n if (normalized.endsWith(path.normalize(entry.filename))) {\n return entry;\n }\n }\n return undefined;\n}\n\n// ---------------------------------------------------------------------------\n// updateFile\n// ---------------------------------------------------------------------------\n\n/**\n * Orchestrate drift detection and targeted re-rendering for a single file.\n *\n * The write strategy is determined by FORMAT_EMITTER_MAP:\n * - \"markers\" strategy: drift detection + writeWithMarkers (default)\n * - \"direct\" strategy: always regenerate + writeFile (full overwrite)\n *\n * Flow for markers strategy:\n * 1. File existence check — skip if missing\n * 2. Marker validation — abort on invalid markers\n * 3. Drift check (splice mode only) — skip if all claims GREEN\n * 4. Backup creation\n * 5. Re-render + writeWithMarkers\n *\n * Flow for direct strategy:\n * 1. File existence check — skip if missing\n * 2. Backup creation\n * 3. Re-render + writeFile (full overwrite)\n *\n * @param filePath Absolute path to the context file\n * @param registry Pre-computed convention registry (NOT extracted per-file)\n * @param bridge EzSearchBridge instance for drift scoring\n * @param confidenceThreshold Confidence floor passed to the renderer (default 0.7)\n */\nexport async function updateFile(\n filePath: string,\n registry: ConventionRegistry,\n bridge: EzSearchBridge,\n confidenceThreshold: number = 0.7\n): Promise<FileUpdateResult> {\n // 1. File existence check\n if (!existsSync(filePath)) {\n return { filePath, action: \"skipped\", reason: \"File does not exist\" };\n }\n\n const formatEntry = findFormatEntry(filePath);\n // Fall back to claude (markers) if the file isn't a known format\n const strategy = formatEntry?.strategy ?? \"markers\";\n const render = formatEntry?.render ?? FORMAT_EMITTER_MAP.claude.render;\n\n // ---------------------------------------------------------------------------\n // Direct strategy: full regeneration, no drift detection\n // ---------------------------------------------------------------------------\n if (strategy === \"direct\") {\n const backupPath = (await backupFile(filePath)) ?? undefined;\n const newContent = render(registry, confidenceThreshold);\n await writeFile(filePath, newContent, \"utf-8\");\n return {\n filePath,\n action: \"updated\",\n reason: \"Re-rendered (direct strategy)\",\n backupPath,\n };\n }\n\n // ---------------------------------------------------------------------------\n // Markers strategy: drift detection + writeWithMarkers\n // ---------------------------------------------------------------------------\n\n // 2. Read content and validate markers\n const content = await readFile(filePath, \"utf-8\");\n const validation = validateMarkers(content);\n\n if (!validation.valid) {\n return { filePath, action: \"aborted\", reason: validation.reason! };\n }\n\n // 3. Drift check (only when markers are already present)\n if (validation.mode === \"splice\") {\n const claims = extractClaims(content, filePath);\n\n // Nothing to check — skip (no claims extracted means no drift to detect)\n if (claims.length === 0) {\n return { filePath, action: \"skipped\", reason: \"No drift detected\" };\n }\n\n const scored = await scoreClaims(claims, bridge);\n const hasDrift = scored.some((s) => s.status !== \"GREEN\");\n\n if (!hasDrift) {\n return { filePath, action: \"skipped\", reason: \"No drift detected\" };\n }\n }\n // mode === \"append\": file has no generated section yet -> always proceed\n\n // 4. Backup before any write\n const backupPath = (await backupFile(filePath)) ?? undefined;\n\n // 5. Re-render + write\n const newContent = render(registry, confidenceThreshold);\n await writeWithMarkers(filePath, newContent);\n\n return {\n filePath,\n action: \"updated\",\n reason: \"Re-rendered drifted sections\",\n backupPath,\n };\n}\n","import path from \"node:path\";\nimport { existsSync } from \"node:fs\";\nimport ora from \"ora\";\nimport chalk from \"chalk\";\nimport { createBridge } from \"../core/ez-search-bridge.js\";\nimport { extractConventions } from \"../core/pipeline.js\";\nimport { updateFile } from \"../core/updater.js\";\nimport { extractClaims } from \"../core/drift/claim-extractor.js\";\nimport { scoreClaims } from \"../core/drift/claim-scorer.js\";\nimport { FORMAT_EMITTER_MAP } from \"../emitters/index.js\";\n\nexport async function updateAction(\n pathArg: string,\n options: { file?: string; dryRun?: boolean; yes?: boolean }\n): Promise<void> {\n const projectPath = path.resolve(pathArg);\n const spinner = ora(\"Checking for drift...\").start();\n\n try {\n const bridge = await createBridge(projectPath);\n\n // Guard: no index\n if (!(await bridge.hasIndex(projectPath))) {\n spinner.fail(\"No search index found\");\n console.error(\n chalk.red(\"Run 'ez-context generate' or 'ez-search index .' first to create an index.\")\n );\n process.exit(1);\n }\n\n // Resolve target files\n let filePaths: string[];\n if (options.file) {\n filePaths = [path.resolve(projectPath, options.file)];\n } else {\n filePaths = Object.values(FORMAT_EMITTER_MAP)\n .map((entry) => path.join(projectPath, entry.filename))\n .filter((p) => existsSync(p));\n }\n\n if (filePaths.length === 0) {\n spinner.fail(\"No context files found\");\n console.error(\n chalk.red(\"No generated context files found. Run 'ez-context generate' first, or use --file to specify one.\")\n );\n process.exit(1);\n }\n\n if (options.dryRun) {\n // Dry-run: analyze drift per file without writing\n spinner.succeed(\"Dry run complete\");\n console.log();\n console.log(chalk.bold.yellow(\"╔══════════════════════════════════════╗\"));\n console.log(chalk.bold.yellow(\"║ DRY RUN -- no files will be written ║\"));\n console.log(chalk.bold.yellow(\"╚══════════════════════════════════════╝\"));\n console.log();\n\n for (const filePath of filePaths) {\n const basename = path.basename(filePath);\n const { readFile } = await import(\"node:fs/promises\");\n const content = await readFile(filePath, \"utf-8\");\n const claims = extractClaims(content, filePath);\n\n if (claims.length === 0) {\n console.log(` ${chalk.gray(\"-\")} ${basename} ${chalk.gray(\"(no claims to check)\")}`);\n continue;\n }\n\n const scored = await scoreClaims(claims, bridge);\n const hasDrift = scored.some((s) => s.status !== \"GREEN\");\n\n if (hasDrift) {\n console.log(` ${chalk.yellow(\"~\")} Would update ${chalk.cyan(basename)}`);\n } else {\n console.log(` ${chalk.gray(\"-\")} Up to date: ${chalk.gray(basename)}`);\n }\n }\n\n return;\n }\n\n // Real update: process each file\n spinner.text = \"Extracting conventions...\";\n const registry = await extractConventions(projectPath);\n const results = [];\n for (const filePath of filePaths) {\n const basename = path.basename(filePath);\n spinner.text = `Updating ${basename}...`;\n const result = await updateFile(filePath, registry, bridge);\n results.push(result);\n }\n\n // Summarize results\n const updated = results.filter((r) => r.action === \"updated\");\n const aborted = results.filter((r) => r.action === \"aborted\");\n\n if (updated.length === 0 && aborted.length === 0) {\n spinner.succeed(\"All context files are up to date\");\n } else if (updated.length > 0) {\n spinner.succeed(\n `Updated ${updated.length} file${updated.length === 1 ? \"\" : \"s\"}`\n );\n } else {\n spinner.fail(\"Update incomplete — some files could not be updated\");\n }\n\n // Per-file report\n console.log();\n for (const result of results) {\n const basename = path.basename(result.filePath);\n if (result.action === \"updated\") {\n const backup = result.backupPath ? ` (backup: ${path.basename(result.backupPath)})` : \"\";\n console.log(` ${chalk.green(\"✓\")} ${chalk.cyan(basename)}${chalk.gray(backup)}`);\n } else if (result.action === \"skipped\") {\n console.log(` ${chalk.gray(\"-\")} ${chalk.gray(basename)} ${chalk.gray(`(${result.reason})`)}`);\n } else {\n // aborted\n console.log(` ${chalk.yellow(\"⚠\")} ${chalk.yellow(basename)} ${chalk.yellow(`(${result.reason})`)}`);\n }\n }\n\n if (aborted.length > 0) {\n console.log();\n console.log(\n chalk.yellow(`Warning: ${aborted.length} file${aborted.length === 1 ? \"\" : \"s\"} could not be updated due to marker issues.`)\n );\n }\n\n } catch (err) {\n spinner.fail(\"Update failed\");\n const message = err instanceof Error ? err.message : String(err);\n console.error(chalk.red(message));\n process.exit(1);\n }\n}\n","#!/usr/bin/env node\nimport { Command } from \"commander\";\nimport { generateAction } from \"./commands/generate.js\";\nimport { inspectAction } from \"./commands/inspect.js\";\nimport { driftAction } from \"./commands/drift.js\";\nimport { updateAction } from \"./commands/update.js\";\n\nconst program = new Command();\n\nprogram\n .name(\"ez-context\")\n .description(\"Generate AI context files from any project\")\n .version(\"0.1.0\");\n\nprogram\n .command(\"generate\")\n .description(\"Extract conventions and generate context files\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--dry-run\", \"preview without writing files\")\n .option(\"-y, --yes\", \"non-interactive mode\")\n .option(\"--output <dir>\", \"output directory\", \".\")\n .option(\"--threshold <number>\", \"confidence threshold 0-1\", \"0.7\")\n .option(\"--format <formats>\", \"output formats: claude,agents,cursor,copilot,skills,rulesync,ruler (comma-separated)\", \"claude,agents\")\n .action(generateAction);\n\nprogram\n .command(\"inspect\")\n .description(\"Display detected conventions\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--threshold <number>\", \"confidence threshold 0-1\", \"0.7\")\n .action(inspectAction);\n\nprogram\n .command(\"drift\")\n .description(\"Check context files against code for semantic drift\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--file <contextFile>\", \"specific context file to check\")\n .action(driftAction);\n\nprogram\n .command(\"update\")\n .description(\"Update drifted sections in context files, preserving manual edits\")\n .argument(\"[path]\", \"project root to analyze\", \".\")\n .option(\"--file <contextFile>\", \"specific context file to update\")\n .option(\"--dry-run\", \"preview changes without writing files\")\n .option(\"-y, --yes\", \"non-interactive mode\")\n .action(updateAction);\n\nawait program.parseAsync();\n"],"mappings":";;;;;;;;;;AAOA,MAAM,wBAAwB;AAE9B,MAAM,gBAAgC;CACpC;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,SAAgB,aAAa,KAA6B;CACxD,MAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,MAAM,IAAI,CAAC,KAAK,MAAM,EAAE,MAAM,CAAC,CAAC,OAAO,QAAQ,CAAC,CAAC;CACjF,MAAM,UAAU,QAAQ,QAAQ,MAAM,CAAC,cAAc,SAAS,EAAkB,CAAC;AACjF,KAAI,QAAQ,SAAS,EACnB,OAAM,IAAI,MACR,sBAAsB,QAAQ,KAAK,KAAK,CAAC,WAAW,cAAc,KAAK,KAAK,GAC7E;AAEH,QAAO;;AAGT,SAAS,gBAAgB,SAAyB;CAChD,MAAM,QAAQ,QAAQ,MAAM,KAAK;AACjC,KAAI,MAAM,UAAU,sBAClB,QAAO;AAGT,QAAO,GADS,MAAM,MAAM,GAAG,sBAAsB,CAAC,KAAK,KAAK,CAC9C,SAAS,MAAM,OAAO;;AAG1C,eAAsB,eACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CAIzC,MAAM,UAAU,IAAI,mCAAmC,CAAC,OAAO;AAE/D,KAAI;EACF,MAAM,WAAW,MAAM,mBAAmB,YAAY;EACtD,MAAM,kBAAkB,SAAS,YAAY;AAC7C,UAAQ,QAAQ,SAAS,gBAAgB,aAAa,oBAAoB,IAAI,KAAK,MAAM;EAEzF,MAAM,sBAAsB,WAAW,QAAQ,aAAa,MAAM;EAClE,MAAM,YAAY,KAAK,QAAQ,QAAQ,UAAU,IAAI;EAGrD,MAAM,UAAU,aAAa,QAAQ,UAAU,gBAAgB;EAE/D,MAAM,cAA2B;GAC/B;GACA;GACA,QAAQ,QAAQ,UAAU;GAC1B;GACD;EAMD,MAAM,aAAa,IAJD,QAAQ,WAAW,KAAK,QAAQ,SAAS,SAAS,IAAI,QAAQ,SAAS,SAAS,GAE9F,gCACA,cAAc,QAAQ,OAAO,eAAe,QAAQ,WAAW,IAAI,KAAK,IAAI,KAC1C,CAAC,OAAO;EAC9C,MAAM,SAAS,MAAM,KAAK,UAAU,YAAY;AAEhD,MAAI,QAAQ,QAAQ;AAClB,cAAW,QAAQ,mBAAmB;AACtC,WAAQ,KAAK;AACb,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,KAAK;AACb,QAAK,MAAM,CAAC,QAAQ,YAAY,OAAO,QAAQ,OAAO,SAAS,EAAE;AAC/D,YAAQ,IAAI,MAAM,KAAK,OAAO,OAAO,aAAa,CAAC,MAAM,CAAC;AAC1D,YAAQ,IAAI,gBAAgB,QAAQ,CAAC;AACrC,YAAQ,KAAK;;SAEV;AACL,cAAW,QAAQ,aAAa,OAAO,aAAa,OAAO,OAAO,OAAO,aAAa,WAAW,IAAI,KAAK,MAAM;AAChH,WAAQ,KAAK;AACb,WAAQ,IAAI,MAAM,KAAK,MAAM,mBAAmB,CAAC;AACjD,QAAK,MAAM,YAAY,OAAO,cAAc;IAC1C,MAAM,UAAU,KAAK,SAAS,WAAW,SAAS;AAClD,YAAQ,IAAI,KAAK,MAAM,KAAK,QAAQ,GAAG;;;UAGpC,KAAK;AACZ,UAAQ,KAAK,kBAAkB;EAC/B,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;AC7FnB,SAAS,cAAc,YAA4B;AACjD,KAAI,cAAc,GAAK,QAAO,MAAM,MAAM,IAAI;AAC9C,KAAI,cAAc,GAAK,QAAO,MAAM,OAAO,IAAI;AAC/C,QAAO,MAAM,IAAI,IAAI;;AAGvB,eAAsB,cACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CACzC,MAAM,UAAU,IAAI,mCAAmC,CAAC,OAAO;AAE/D,KAAI;EACF,MAAM,WAAW,MAAM,mBAAmB,YAAY;EACtD,MAAM,aAAa,SAAS,YAAY;AACxC,UAAQ,QAAQ,aAAa,WAAW,aAAa,eAAe,IAAI,KAAK,MAAM;EAEnF,MAAM,YAAY,WAAW,QAAQ,aAAa,MAAM;EAExD,MAAM,WAAW,SAAS,YAAY,QACnC,MAAM,EAAE,cAAc,UACxB;AAED,MAAI,SAAS,WAAW,GAAG;AACzB,WAAQ,IACN,MAAM,OACJ,gCAAgC,UAAU,kDAC3C,CACF;AACD;;EAIF,MAAM,6BAAa,IAAI,KAAgC;AACvD,OAAK,MAAM,cAAc,UAAU;GACjC,MAAM,QAAQ,WAAW,IAAI,WAAW,SAAS,IAAI,EAAE;AACvD,SAAM,KAAK,WAAW;AACtB,cAAW,IAAI,WAAW,UAAU,MAAM;;AAG5C,UAAQ,KAAK;AACb,OAAK,MAAM,CAAC,UAAU,gBAAgB,YAAY;AAChD,WAAQ,IAAI,MAAM,KAAK,SAAS,aAAa,CAAC,CAAC;AAC/C,QAAK,MAAM,cAAc,aAAa;IACpC,MAAM,MAAM,KAAK,MAAM,WAAW,aAAa,IAAI;AACnD,YAAQ,IACN,KAAK,cAAc,WAAW,WAAW,CAAC,GAAG,WAAW,QAAQ,GAAG,MAAM,KAAK,IAAI,IAAI,IAAI,GAC3F;;AAEH,WAAQ,KAAK;;EAGf,MAAM,gBAAgB,WAAW;AACjC,UAAQ,IACN,MAAM,KACJ,SAAS,SAAS,OAAO,aAAa,SAAS,WAAW,IAAI,KAAK,IAAI,UAAU,cAAc,UAAU,kBAAkB,IAAI,MAAM,MAAM,eAAe,UAAU,GACrK,CACF;UACM,KAAK;AACZ,UAAQ,KAAK,kBAAkB;EAC/B,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;;;;;;;;;AC9BnB,MAAM,oBACJ;;;;;;;;AAaF,SAAgB,cAAc,SAAiB,YAA6B;CAC1E,MAAM,SAAkB,EAAE;CAC1B,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,IAAI,iBAAiB;AAErB,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;EACrC,MAAM,OAAO,MAAM,GAAI,MAAM;EAC7B,MAAM,UAAU,IAAI;AAGpB,MAAI,CAAC,KAAM;AAGX,MAAI,KAAK,WAAW,OAAO,CAAE;AAG7B,MAAI,KAAK,SAAS,cAAc,CAAE;EAGlC,MAAM,UAAU,KAAK,MAAM,iBAAiB;AAC5C,MAAI,SAAS;AACX,oBAAiB,QAAQ,GAAI,MAAM;AACnC;;EAIF,MAAM,SAAS,KAAK,MAAM,gBAAgB;EAC1C,MAAM,WAAW,CAAC,SAAS,KAAK,MAAM,gBAAgB,GAAG;EACzD,MAAM,UAAU,SAAS,OAAO,KAAM,WAAW,SAAS,KAAM;AAEhE,MAAI,CAAC,QAAS;EAGd,MAAM,OAAO,QACV,QAAQ,oBAAoB,KAAK,CACjC,QAAQ,cAAc,KAAK,CAC3B,MAAM;AAGT,MAAI,KAAK,SAAS,MAAM,KAAK,SAAS,IAAK;AAG3C,MAAI,kBAAkB,KAAK,KAAK,CAAE;AAElC,SAAO,KAAK;GACV;GACA;GACA,YAAY;GACZ,eAAe;GAChB,CAAC;;AAGJ,QAAO;;;;;ACzFT,MAAa,kBAAkB;AAC/B,MAAa,mBAAmB;AAChC,MAAa,aAAa;AAmB1B,SAAS,MAAS,KAAU,MAAqB;CAC/C,MAAM,SAAgB,EAAE;AACxB,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,KACnC,QAAO,KAAK,IAAI,MAAM,GAAG,IAAI,KAAK,CAAC;AAErC,QAAO;;AAGT,SAAS,cAAc,OAA4B;AACjD,KAAI,SAAS,gBAAiB,QAAO;AACrC,KAAI,SAAS,iBAAkB,QAAO;AACtC,QAAO;;AAGT,eAAe,iBACb,OACA,QACsB;CACtB,MAAM,WAAW,MAAM,OAAO,OAAO,MAAM,MAAM,EAAE,GAAG,GAAG,CAAC;CAC1D,MAAM,WAAW,SAAS,SAAS,IAAI,SAAS,GAAI,QAAQ;AAC5D,QAAO;EACL;EACA,QAAQ,cAAc,SAAS;EAC/B,OAAO;EACP;EACD;;;;;;;;;;AAeH,eAAsB,YACpB,QACA,QACA,YACwB;CACxB,MAAM,QAAQ,OAAO;CACrB,MAAM,UAAU,MAAM,QAAQ,WAAW;CACzC,MAAM,UAAyB,EAAE;CACjC,IAAI,YAAY;AAEhB,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,eAAe,MAAM,QAAQ,IACjC,MAAM,KAAK,UAAU,iBAAiB,OAAO,OAAO,CAAC,CACtD;AACD,UAAQ,KAAK,GAAG,aAAa;AAC7B,eAAa,MAAM;AACnB,eAAa,WAAW,MAAM;;AAGhC,QAAO;;;;;;;;;ACpET,SAAgB,mBAAmB,cAAqC;AACtE,KAAI,aAAa,WAAW,EAAG,QAAO;CACtC,MAAM,OACJ,aAAa,QAAQ,KAAK,OAAO,MAAM,GAAG,OAAO,EAAE,GAAG,aAAa;AACrE,QAAO,KAAK,MAAM,OAAO,IAAI;;;;;AAU/B,SAAgB,iBACd,YACA,cACa;AACb,QAAO;EACL;EACA,aAAa,mBAAmB,aAAa;EAC7C;EACD;;AAOH,MAAM,eAAuC;CAC3C,OAAO;CACP,QAAQ;CACR,KAAK;CACN;;;;;;;;;;;;;;;;;;;;;AAsBD,SAAgB,kBAAkB,QAA6B;CAC7D,MAAM,EAAE,YAAY,aAAa,iBAAiB;CAClD,MAAM,QAAkB,EAAE;CAE1B,MAAM,QAAQ,aAAa,QAAQ,OAAO,GAAG,WAAW,QAAQ;CAChE,MAAM,SAAS,aAAa,QAAQ,OAAO,GAAG,WAAW,SAAS;CAClE,MAAM,MAAM,aAAa,QAAQ,OAAO,GAAG,WAAW,MAAM;AAG5D,OAAM,KAAK,iBAAiB;AAC5B,OAAM,KAAK,GAAG;AACd,OAAM,KAAK,qBAAqB,YAAY,MAAM;AAClD,OAAM,KAAK,aAAa,aAAa;AACrC,OAAM,KAAK,eAAe,aAAa,SAAS;AAChD,OAAM,KAAK,GAAG;CAGd,MAAM,eAAe,OAAsB,WAAmB;AAC5D,MAAI,MAAM,WAAW,EAAG;EACxB,MAAM,QAAQ,aAAa,WAAW;AACtC,QAAM,KAAK,MAAM,MAAM,IAAI,OAAO,GAAG;AACrC,QAAM,KAAK,GAAG;AACd,OAAK,MAAM,MAAM,OAAO;AACtB,SAAM,KAAK,MAAM,GAAG,OAAO,IAAI,GAAG,MAAM,KAAK,WAAW,GAAG,MAAM,QAAQ,EAAE,CAAC,GAAG;AAE/E,OAAI,GAAG,WAAW,SAAS;IACzB,MAAM,cAAc,GAAG,SAAS,MAAM,GAAG,EAAE;AAC3C,SAAK,MAAM,MAAM,aAAa;KAC5B,MAAM,UAAU,GAAG,MAAM,QAAQ,QAAQ,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,GAAG;AACjE,WAAM,KAAK,OAAO,GAAG,KAAK,IAAI,UAAU;;;;AAI9C,QAAM,KAAK,GAAG;;AAGhB,aAAY,OAAO,QAAQ;AAC3B,aAAY,QAAQ,SAAS;AAC7B,aAAY,KAAK,MAAM;AAGvB,OAAM,KACJ,YAAY,MAAM,OAAO,cAAc,OAAO,OAAO,mBAAmB,IAAI,OAAO,eACpF;AAED,QAAO,MAAM,KAAK,KAAK;;;;;ACtHzB,MAAM,kBAAkB;CAAC;CAAa;CAAa;CAAgB;CAAa;AAEhF,SAAS,YAAY,OAAuB;AAC1C,KAAI,SAAS,GAAI,QAAO,MAAM,MAAM,OAAO,MAAM,CAAC;AAClD,KAAI,SAAS,GAAI,QAAO,MAAM,OAAO,OAAO,MAAM,CAAC;AACnD,QAAO,MAAM,IAAI,OAAO,MAAM,CAAC;;AAGjC,eAAsB,YACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CACzC,MAAM,UAAU,IAAI,2BAA2B,CAAC,OAAO;AAEvD,KAAI;EACF,MAAM,SAAS,MAAM,aAAa,YAAY;AAG9C,MAAI,CAAE,MAAM,OAAO,SAAS,YAAY,EAAG;AACzC,WAAQ,OAAO;AACf,SAAM,OAAO,YAAY,YAAY;AACrC,WAAQ,OAAO;;EAIjB,IAAI;AACJ,MAAI,QAAQ,KACV,aAAY,CAAC,KAAK,QAAQ,aAAa,QAAQ,KAAK,CAAC;MAErD,aAAY,gBACT,KAAK,SAAS,KAAK,KAAK,aAAa,KAAK,CAAC,CAC3C,QAAQ,MAAM,WAAW,EAAE,CAAC;AAGjC,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAQ,KAAK,yBAAyB;AACtC,WAAQ,MACN,MAAM,IAAI,yFAAyF,CACpG;AACD,WAAQ,KAAK,EAAE;;EAIjB,MAAM,+BAA8D,IAAI,KAAK;AAC7E,OAAK,MAAM,YAAY,WAAW;GAEhC,MAAM,SAAS,cADC,MAAM,SAAS,UAAU,QAAQ,EACX,SAAS;AAC/C,gBAAa,IAAI,UAAU,OAAO;;EAGpC,MAAM,YAAY,CAAC,GAAG,aAAa,QAAQ,CAAC,CAAC,MAAM;AACnD,UAAQ,OAAO,aAAa,UAAU,OAAO;EAG7C,MAAM,YAAY,MAAM,YAAY,WAAW,SAAS,MAAM,UAAU;AACtE,WAAQ,OAAO,kBAAkB,KAAK,GAAG,MAAM;IAC/C;EAGF,MAAM,UAAU,UAAU,KAAK,aAAa;GAC1C,MAAM,aAAa,aAAa,IAAI,SAAS,IAAI,EAAE;AAInD,UAAO,iBAAiB,UAHC,UAAU,QAAQ,OACzC,WAAW,MAAM,MAAM,MAAM,GAAG,MAAM,CACvC,CACkD;IACnD;EAEF,MAAM,eAAe,mBAAmB,UAAU;AAClD,UAAQ,QAAQ,2CAA2C,YAAY,aAAa,CAAC,MAAM;AAE3F,UAAQ,KAAK;AACb,OAAK,MAAM,UAAU,SAAS;AAC5B,WAAQ,IAAI,kBAAkB,OAAO,CAAC;AACtC,WAAQ,KAAK;;UAER,KAAK;AACZ,UAAQ,KAAK,wBAAwB;EACrC,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;ACjCnB,SAAgB,gBAAgB,SAAmC;CACjE,MAAM,WAAW,QAAQ,QAAQ,aAAa;CAC9C,MAAM,SAAS,QAAQ,QAAQ,WAAW;CAE1C,MAAM,WAAW,aAAa;CAC9B,MAAM,SAAS,WAAW;AAG1B,KAAI,CAAC,YAAY,CAAC,OAChB,QAAO;EAAE,OAAO;EAAM,MAAM;EAAU;AAIxC,KAAI,YAAY,QAAQ;AACtB,MAAI,SAAS,SACX,QAAO;GACL,OAAO;GACP,MAAM;GACN,QAAQ;GACT;AAEH,SAAO;GAAE,OAAO;GAAM,MAAM;GAAU;GAAU;GAAQ;;AAI1D,KAAI,YAAY,CAAC,OACf,QAAO;EACL,OAAO;EACP,MAAM;EACN,QAAQ;EACT;AAIH,QAAO;EACL,OAAO;EACP,MAAM;EACN,QAAQ;EACT;;;;;;;AAYH,eAAsB,WAAW,UAA0C;AACzE,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;CAGT,MAAM,aAAa,WAAW;AAC9B,OAAM,SAAS,UAAU,WAAW;AACpC,QAAO;;;;;;AAWT,SAAS,gBAAgB,UAAkB;CACzC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,MAAK,MAAM,SAAS,OAAO,OAAO,mBAAmB,CACnD,KAAI,WAAW,SAAS,KAAK,UAAU,MAAM,SAAS,CAAC,CACrD,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCb,eAAsB,WACpB,UACA,UACA,QACA,sBAA8B,IACH;AAE3B,KAAI,CAAC,WAAW,SAAS,CACvB,QAAO;EAAE;EAAU,QAAQ;EAAW,QAAQ;EAAuB;CAGvE,MAAM,cAAc,gBAAgB,SAAS;CAE7C,MAAM,WAAW,aAAa,YAAY;CAC1C,MAAM,SAAS,aAAa,UAAU,mBAAmB,OAAO;AAKhE,KAAI,aAAa,UAAU;EACzB,MAAM,aAAc,MAAM,WAAW,SAAS,IAAK;AAEnD,QAAM,UAAU,UADG,OAAO,UAAU,oBAAoB,EAClB,QAAQ;AAC9C,SAAO;GACL;GACA,QAAQ;GACR,QAAQ;GACR;GACD;;CAQH,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;CACjD,MAAM,aAAa,gBAAgB,QAAQ;AAE3C,KAAI,CAAC,WAAW,MACd,QAAO;EAAE;EAAU,QAAQ;EAAW,QAAQ,WAAW;EAAS;AAIpE,KAAI,WAAW,SAAS,UAAU;EAChC,MAAM,SAAS,cAAc,SAAS,SAAS;AAG/C,MAAI,OAAO,WAAW,EACpB,QAAO;GAAE;GAAU,QAAQ;GAAW,QAAQ;GAAqB;AAMrE,MAAI,EAHW,MAAM,YAAY,QAAQ,OAAO,EACxB,MAAM,MAAM,EAAE,WAAW,QAAQ,CAGvD,QAAO;GAAE;GAAU,QAAQ;GAAW,QAAQ;GAAqB;;CAMvE,MAAM,aAAc,MAAM,WAAW,SAAS,IAAK;AAInD,OAAM,iBAAiB,UADJ,OAAO,UAAU,oBAAoB,CACZ;AAE5C,QAAO;EACL;EACA,QAAQ;EACR,QAAQ;EACR;EACD;;;;;ACjOH,eAAsB,aACpB,SACA,SACe;CACf,MAAM,cAAc,KAAK,QAAQ,QAAQ;CACzC,MAAM,UAAU,IAAI,wBAAwB,CAAC,OAAO;AAEpD,KAAI;EACF,MAAM,SAAS,MAAM,aAAa,YAAY;AAG9C,MAAI,CAAE,MAAM,OAAO,SAAS,YAAY,EAAG;AACzC,WAAQ,KAAK,wBAAwB;AACrC,WAAQ,MACN,MAAM,IAAI,6EAA6E,CACxF;AACD,WAAQ,KAAK,EAAE;;EAIjB,IAAI;AACJ,MAAI,QAAQ,KACV,aAAY,CAAC,KAAK,QAAQ,aAAa,QAAQ,KAAK,CAAC;MAErD,aAAY,OAAO,OAAO,mBAAmB,CAC1C,KAAK,UAAU,KAAK,KAAK,aAAa,MAAM,SAAS,CAAC,CACtD,QAAQ,MAAM,WAAW,EAAE,CAAC;AAGjC,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAQ,KAAK,yBAAyB;AACtC,WAAQ,MACN,MAAM,IAAI,mGAAmG,CAC9G;AACD,WAAQ,KAAK,EAAE;;AAGjB,MAAI,QAAQ,QAAQ;AAElB,WAAQ,QAAQ,mBAAmB;AACnC,WAAQ,KAAK;AACb,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,IAAI,MAAM,KAAK,OAAO,2CAA2C,CAAC;AAC1E,WAAQ,KAAK;AAEb,QAAK,MAAM,YAAY,WAAW;IAChC,MAAM,WAAW,KAAK,SAAS,SAAS;IACxC,MAAM,EAAE,aAAa,MAAM,OAAO;IAElC,MAAM,SAAS,cADC,MAAM,SAAS,UAAU,QAAQ,EACX,SAAS;AAE/C,QAAI,OAAO,WAAW,GAAG;AACvB,aAAQ,IAAI,KAAK,MAAM,KAAK,IAAI,CAAC,GAAG,SAAS,GAAG,MAAM,KAAK,uBAAuB,GAAG;AACrF;;AAMF,SAHe,MAAM,YAAY,QAAQ,OAAO,EACxB,MAAM,MAAM,EAAE,WAAW,QAAQ,CAGvD,SAAQ,IAAI,KAAK,MAAM,OAAO,IAAI,CAAC,gBAAgB,MAAM,KAAK,SAAS,GAAG;QAE1E,SAAQ,IAAI,KAAK,MAAM,KAAK,IAAI,CAAC,eAAe,MAAM,KAAK,SAAS,GAAG;;AAI3E;;AAIF,UAAQ,OAAO;EACf,MAAM,WAAW,MAAM,mBAAmB,YAAY;EACtD,MAAM,UAAU,EAAE;AAClB,OAAK,MAAM,YAAY,WAAW;AAEhC,WAAQ,OAAO,YADE,KAAK,SAAS,SAAS,CACJ;GACpC,MAAM,SAAS,MAAM,WAAW,UAAU,UAAU,OAAO;AAC3D,WAAQ,KAAK,OAAO;;EAItB,MAAM,UAAU,QAAQ,QAAQ,MAAM,EAAE,WAAW,UAAU;EAC7D,MAAM,UAAU,QAAQ,QAAQ,MAAM,EAAE,WAAW,UAAU;AAE7D,MAAI,QAAQ,WAAW,KAAK,QAAQ,WAAW,EAC7C,SAAQ,QAAQ,mCAAmC;WAC1C,QAAQ,SAAS,EAC1B,SAAQ,QACN,WAAW,QAAQ,OAAO,OAAO,QAAQ,WAAW,IAAI,KAAK,MAC9D;MAED,SAAQ,KAAK,sDAAsD;AAIrE,UAAQ,KAAK;AACb,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,WAAW,KAAK,SAAS,OAAO,SAAS;AAC/C,OAAI,OAAO,WAAW,WAAW;IAC/B,MAAM,SAAS,OAAO,aAAa,aAAa,KAAK,SAAS,OAAO,WAAW,CAAC,KAAK;AACtF,YAAQ,IAAI,KAAK,MAAM,MAAM,IAAI,CAAC,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,KAAK,OAAO,GAAG;cACxE,OAAO,WAAW,UAC3B,SAAQ,IAAI,KAAK,MAAM,KAAK,IAAI,CAAC,GAAG,MAAM,KAAK,SAAS,CAAC,GAAG,MAAM,KAAK,IAAI,OAAO,OAAO,GAAG,GAAG;OAG/F,SAAQ,IAAI,KAAK,MAAM,OAAO,IAAI,CAAC,GAAG,MAAM,OAAO,SAAS,CAAC,GAAG,MAAM,OAAO,IAAI,OAAO,OAAO,GAAG,GAAG;;AAIzG,MAAI,QAAQ,SAAS,GAAG;AACtB,WAAQ,KAAK;AACb,WAAQ,IACN,MAAM,OAAO,YAAY,QAAQ,OAAO,OAAO,QAAQ,WAAW,IAAI,KAAK,IAAI,6CAA6C,CAC7H;;UAGI,KAAK;AACZ,UAAQ,KAAK,gBAAgB;EAC7B,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,UAAQ,MAAM,MAAM,IAAI,QAAQ,CAAC;AACjC,UAAQ,KAAK,EAAE;;;;;;AC7HnB,MAAM,UAAU,IAAI,SAAS;AAE7B,QACG,KAAK,aAAa,CAClB,YAAY,6CAA6C,CACzD,QAAQ,QAAQ;AAEnB,QACG,QAAQ,WAAW,CACnB,YAAY,iDAAiD,CAC7D,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,aAAa,gCAAgC,CACpD,OAAO,aAAa,uBAAuB,CAC3C,OAAO,kBAAkB,oBAAoB,IAAI,CACjD,OAAO,wBAAwB,4BAA4B,MAAM,CACjE,OAAO,sBAAsB,wFAAwF,gBAAgB,CACrI,OAAO,eAAe;AAEzB,QACG,QAAQ,UAAU,CAClB,YAAY,+BAA+B,CAC3C,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,wBAAwB,4BAA4B,MAAM,CACjE,OAAO,cAAc;AAExB,QACG,QAAQ,QAAQ,CAChB,YAAY,sDAAsD,CAClE,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,wBAAwB,iCAAiC,CAChE,OAAO,YAAY;AAEtB,QACG,QAAQ,SAAS,CACjB,YAAY,oEAAoE,CAChF,SAAS,UAAU,2BAA2B,IAAI,CAClD,OAAO,wBAAwB,kCAAkC,CACjE,OAAO,aAAa,wCAAwC,CAC5D,OAAO,aAAa,uBAAuB,CAC3C,OAAO,aAAa;AAEvB,MAAM,QAAQ,YAAY"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ez-corp/ez-context",
3
- "version": "0.1.0",
3
+ "version": "0.1.2",
4
4
  "description": "Extract coding conventions and generate AI context files (CLAUDE.md, AGENTS.md, Cursor rules, Copilot instructions) with semantic drift detection",
5
5
  "type": "module",
6
6
  "license": "ISC",
@@ -29,7 +29,7 @@
29
29
  "types": "./dist/index.d.ts"
30
30
  }
31
31
  },
32
- "files": ["dist/", "README.md", "LICENSE"],
32
+ "files": ["dist/", "!dist/ez-context", "README.md", "LICENSE"],
33
33
  "scripts": {
34
34
  "build": "tsdown",
35
35
  "compile": "bun run build && bun build src/cli.ts --compile --outfile dist/ez-context",
package/dist/ez-context DELETED
Binary file