@iloom/cli 0.7.6 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +32 -3
- package/dist/{ClaudeContextManager-Y2YJC6BU.js → ClaudeContextManager-RDP6CLK6.js} +5 -5
- package/dist/{ClaudeService-NDVFQRKC.js → ClaudeService-FKPOQRA4.js} +4 -4
- package/dist/GitHubService-ACZVNTJE.js +12 -0
- package/dist/{LoomLauncher-U2B3VHPC.js → LoomLauncher-NHZMEVTQ.js} +5 -5
- package/dist/{MetadataManager-XJ2YB762.js → MetadataManager-W3C54UYT.js} +2 -2
- package/dist/{PRManager-7F3AAY66.js → PRManager-H4TUZTZL.js} +5 -5
- package/dist/{PromptTemplateManager-7L3HJQQU.js → PromptTemplateManager-OUYDHOPI.js} +2 -2
- package/dist/README.md +32 -3
- package/dist/{SettingsManager-YU4VYPTW.js → SettingsManager-VCVLL32H.js} +4 -2
- package/dist/{SettingsMigrationManager-KZKDG66H.js → SettingsMigrationManager-LEBMJP3B.js} +3 -3
- package/dist/agents/iloom-code-reviewer.md +735 -0
- package/dist/agents/iloom-framework-detector.md +1 -1
- package/dist/agents/iloom-issue-analyze-and-plan.md +2 -2
- package/dist/agents/iloom-issue-analyzer.md +2 -2
- package/dist/agents/iloom-issue-complexity-evaluator.md +1 -1
- package/dist/agents/iloom-issue-enhancer.md +2 -2
- package/dist/agents/iloom-issue-implementer.md +3 -3
- package/dist/agents/iloom-issue-planner.md +2 -2
- package/dist/{build-HQ5HGA3T.js → build-H4DK3DMQ.js} +7 -7
- package/dist/{chunk-N7FVXZNI.js → chunk-4BSXZ5YZ.js} +31 -9
- package/dist/chunk-4BSXZ5YZ.js.map +1 -0
- package/dist/{chunk-VYKKWU36.js → chunk-4KGRPHM6.js} +3 -3
- package/dist/{chunk-CFQVOTHO.js → chunk-52MVUK5V.js} +2 -2
- package/dist/{chunk-TIYJEEVO.js → chunk-66QOCD5N.js} +1 -1
- package/dist/chunk-66QOCD5N.js.map +1 -0
- package/dist/chunk-7JDMYTFZ.js +251 -0
- package/dist/chunk-7JDMYTFZ.js.map +1 -0
- package/dist/{chunk-7LSSNB7Y.js → chunk-7ZEHSSUP.js} +2 -2
- package/dist/chunk-A4UQY3M2.js +75 -0
- package/dist/chunk-A4UQY3M2.js.map +1 -0
- package/dist/{chunk-ELJKYFSH.js → chunk-BCQDYAOJ.js} +4 -4
- package/dist/{chunk-F2PWIRV4.js → chunk-BYUMEDDD.js} +2 -2
- package/dist/{chunk-CAXFWFV6.js → chunk-ECP77QGE.js} +4 -4
- package/dist/{chunk-6YAMWLCP.js → chunk-EQOFNPEY.js} +3 -3
- package/dist/{chunk-ZA575VLF.js → chunk-GDS2HXSW.js} +4 -4
- package/dist/{chunk-UDRZY65Y.js → chunk-HSGZW3ID.js} +2 -2
- package/dist/{chunk-WFQ5CLTR.js → chunk-IWIIOFEB.js} +56 -5
- package/dist/chunk-IWIIOFEB.js.map +1 -0
- package/dist/{chunk-VWGKGNJP.js → chunk-KBEIQP4G.js} +3 -1
- package/dist/chunk-KBEIQP4G.js.map +1 -0
- package/dist/{chunk-ETY2SBW5.js → chunk-NR64HNF7.js} +17 -15
- package/dist/chunk-NR64HNF7.js.map +1 -0
- package/dist/{chunk-WT4UGBE2.js → chunk-PBSHQVCT.js} +5 -5
- package/dist/{chunk-64HCHVJM.js → chunk-PLI3JQWT.js} +2 -2
- package/dist/{chunk-USJSNHGG.js → chunk-PVW6JE7E.js} +3 -3
- package/dist/{chunk-HBJITKSZ.js → chunk-RNBIISBZ.js} +161 -3
- package/dist/chunk-RNBIISBZ.js.map +1 -0
- package/dist/{chunk-C7YW5IMS.js → chunk-RODL2HVY.js} +17 -6
- package/dist/{chunk-C7YW5IMS.js.map → chunk-RODL2HVY.js.map} +1 -1
- package/dist/{chunk-3K3WY3BN.js → chunk-SC6X5EBG.js} +4 -4
- package/dist/{chunk-NEPH2O4C.js → chunk-SSASIBDJ.js} +3 -3
- package/dist/{chunk-GCPAZSGV.js → chunk-THS5L54H.js} +150 -3
- package/dist/chunk-THS5L54H.js.map +1 -0
- package/dist/{chunk-5V74K5ZA.js → chunk-TVH67KEO.js} +25 -2
- package/dist/chunk-TVH67KEO.js.map +1 -0
- package/dist/{chunk-NPEMVE27.js → chunk-UDZCTLD6.js} +115 -3
- package/dist/chunk-UDZCTLD6.js.map +1 -0
- package/dist/{chunk-ENMTWE74.js → chunk-VZYSM7N7.js} +2 -2
- package/dist/{chunk-WZYBHD7P.js → chunk-XHNACIHO.js} +2 -2
- package/dist/{chunk-XAMBIVXE.js → chunk-XJHQVOT6.js} +2 -2
- package/dist/{chunk-O36JLYNW.js → chunk-XU5A6BWA.js} +4 -7
- package/dist/chunk-XU5A6BWA.js.map +1 -0
- package/dist/{cleanup-IO4KV2DL.js → cleanup-OGE7V7AD.js} +16 -16
- package/dist/cli.js +317 -164
- package/dist/cli.js.map +1 -1
- package/dist/{commit-3ULFKXNB.js → commit-534QIRHY.js} +10 -10
- package/dist/{compile-CT7IR7O2.js → compile-ZOAODFN2.js} +7 -7
- package/dist/{contribute-GXKOIA42.js → contribute-7USRBWRM.js} +6 -6
- package/dist/{dev-server-OAP3RZC6.js → dev-server-TYYJM3XA.js} +9 -9
- package/dist/{feedback-ZLAX3BVL.js → feedback-HZVLOTQJ.js} +9 -9
- package/dist/{git-ENLT2VNI.js → git-GUNOPP4Q.js} +4 -4
- package/dist/hooks/iloom-hook.js +75 -3
- package/dist/{ignite-HA2OJF6Z.js → ignite-ZO7SGUKP.js} +85 -25
- package/dist/ignite-ZO7SGUKP.js.map +1 -0
- package/dist/index.d.ts +85 -2
- package/dist/index.js +133 -73
- package/dist/index.js.map +1 -1
- package/dist/init-MZBIXQ7V.js +21 -0
- package/dist/{lint-HAVU4U34.js → lint-MDVUV3W2.js} +7 -7
- package/dist/mcp/issue-management-server.js +569 -2
- package/dist/mcp/issue-management-server.js.map +1 -1
- package/dist/{neon-helpers-3KBC4A3Y.js → neon-helpers-VVFFTLXE.js} +3 -3
- package/dist/{open-IN3LUZXX.js → open-2LPZ7XXW.js} +9 -9
- package/dist/plan-PIME6UNY.js +371 -0
- package/dist/plan-PIME6UNY.js.map +1 -0
- package/dist/{projects-CTRTTMSK.js → projects-325GEEGJ.js} +2 -2
- package/dist/{prompt-3SAZYRUN.js → prompt-ONNPSNKM.js} +2 -2
- package/dist/prompts/init-prompt.txt +83 -3
- package/dist/prompts/issue-prompt.txt +51 -3
- package/dist/prompts/plan-prompt.txt +435 -0
- package/dist/prompts/pr-prompt.txt +38 -0
- package/dist/prompts/regular-prompt.txt +53 -3
- package/dist/{rebase-RLEVFHWN.js → rebase-7YS3N274.js} +6 -6
- package/dist/{recap-ZKGHZCX6.js → recap-GSXFEOD6.js} +6 -6
- package/dist/{run-QEIS2EH2.js → run-XPGCMFFO.js} +9 -9
- package/dist/schema/settings.schema.json +57 -1
- package/dist/{shell-2NNSIU34.js → shell-2SPM3Z5O.js} +6 -6
- package/dist/{summary-MPOOQIOX.js → summary-C5VVSJAJ.js} +11 -11
- package/dist/{test-75WAA6DU.js → test-N2725YRI.js} +7 -7
- package/dist/{test-git-E2BLXR6M.js → test-git-ZPSPA2TP.js} +4 -4
- package/dist/{test-prefix-A7JGGYAA.js → test-prefix-6DLB2BHE.js} +4 -4
- package/dist/{test-webserver-J6SMNLU2.js → test-webserver-XLJ2TZFP.js} +6 -6
- package/package.json +1 -1
- package/dist/GitHubService-O7U4UQ7N.js +0 -12
- package/dist/agents/iloom-issue-reviewer.md +0 -139
- package/dist/chunk-5V74K5ZA.js.map +0 -1
- package/dist/chunk-ETY2SBW5.js.map +0 -1
- package/dist/chunk-GCPAZSGV.js.map +0 -1
- package/dist/chunk-HBJITKSZ.js.map +0 -1
- package/dist/chunk-N7FVXZNI.js.map +0 -1
- package/dist/chunk-NPEMVE27.js.map +0 -1
- package/dist/chunk-O36JLYNW.js.map +0 -1
- package/dist/chunk-TIYJEEVO.js.map +0 -1
- package/dist/chunk-VWGKGNJP.js.map +0 -1
- package/dist/chunk-WFQ5CLTR.js.map +0 -1
- package/dist/chunk-ZX3GTM7O.js +0 -119
- package/dist/chunk-ZX3GTM7O.js.map +0 -1
- package/dist/ignite-HA2OJF6Z.js.map +0 -1
- package/dist/init-S6IEGRSX.js +0 -21
- /package/dist/{ClaudeContextManager-Y2YJC6BU.js.map → ClaudeContextManager-RDP6CLK6.js.map} +0 -0
- /package/dist/{ClaudeService-NDVFQRKC.js.map → ClaudeService-FKPOQRA4.js.map} +0 -0
- /package/dist/{GitHubService-O7U4UQ7N.js.map → GitHubService-ACZVNTJE.js.map} +0 -0
- /package/dist/{LoomLauncher-U2B3VHPC.js.map → LoomLauncher-NHZMEVTQ.js.map} +0 -0
- /package/dist/{MetadataManager-XJ2YB762.js.map → MetadataManager-W3C54UYT.js.map} +0 -0
- /package/dist/{PRManager-7F3AAY66.js.map → PRManager-H4TUZTZL.js.map} +0 -0
- /package/dist/{PromptTemplateManager-7L3HJQQU.js.map → PromptTemplateManager-OUYDHOPI.js.map} +0 -0
- /package/dist/{SettingsManager-YU4VYPTW.js.map → SettingsManager-VCVLL32H.js.map} +0 -0
- /package/dist/{SettingsMigrationManager-KZKDG66H.js.map → SettingsMigrationManager-LEBMJP3B.js.map} +0 -0
- /package/dist/{build-HQ5HGA3T.js.map → build-H4DK3DMQ.js.map} +0 -0
- /package/dist/{chunk-VYKKWU36.js.map → chunk-4KGRPHM6.js.map} +0 -0
- /package/dist/{chunk-CFQVOTHO.js.map → chunk-52MVUK5V.js.map} +0 -0
- /package/dist/{chunk-7LSSNB7Y.js.map → chunk-7ZEHSSUP.js.map} +0 -0
- /package/dist/{chunk-ELJKYFSH.js.map → chunk-BCQDYAOJ.js.map} +0 -0
- /package/dist/{chunk-F2PWIRV4.js.map → chunk-BYUMEDDD.js.map} +0 -0
- /package/dist/{chunk-CAXFWFV6.js.map → chunk-ECP77QGE.js.map} +0 -0
- /package/dist/{chunk-6YAMWLCP.js.map → chunk-EQOFNPEY.js.map} +0 -0
- /package/dist/{chunk-ZA575VLF.js.map → chunk-GDS2HXSW.js.map} +0 -0
- /package/dist/{chunk-UDRZY65Y.js.map → chunk-HSGZW3ID.js.map} +0 -0
- /package/dist/{chunk-WT4UGBE2.js.map → chunk-PBSHQVCT.js.map} +0 -0
- /package/dist/{chunk-64HCHVJM.js.map → chunk-PLI3JQWT.js.map} +0 -0
- /package/dist/{chunk-USJSNHGG.js.map → chunk-PVW6JE7E.js.map} +0 -0
- /package/dist/{chunk-3K3WY3BN.js.map → chunk-SC6X5EBG.js.map} +0 -0
- /package/dist/{chunk-NEPH2O4C.js.map → chunk-SSASIBDJ.js.map} +0 -0
- /package/dist/{chunk-ENMTWE74.js.map → chunk-VZYSM7N7.js.map} +0 -0
- /package/dist/{chunk-WZYBHD7P.js.map → chunk-XHNACIHO.js.map} +0 -0
- /package/dist/{chunk-XAMBIVXE.js.map → chunk-XJHQVOT6.js.map} +0 -0
- /package/dist/{cleanup-IO4KV2DL.js.map → cleanup-OGE7V7AD.js.map} +0 -0
- /package/dist/{commit-3ULFKXNB.js.map → commit-534QIRHY.js.map} +0 -0
- /package/dist/{compile-CT7IR7O2.js.map → compile-ZOAODFN2.js.map} +0 -0
- /package/dist/{contribute-GXKOIA42.js.map → contribute-7USRBWRM.js.map} +0 -0
- /package/dist/{dev-server-OAP3RZC6.js.map → dev-server-TYYJM3XA.js.map} +0 -0
- /package/dist/{feedback-ZLAX3BVL.js.map → feedback-HZVLOTQJ.js.map} +0 -0
- /package/dist/{git-ENLT2VNI.js.map → git-GUNOPP4Q.js.map} +0 -0
- /package/dist/{init-S6IEGRSX.js.map → init-MZBIXQ7V.js.map} +0 -0
- /package/dist/{lint-HAVU4U34.js.map → lint-MDVUV3W2.js.map} +0 -0
- /package/dist/{neon-helpers-3KBC4A3Y.js.map → neon-helpers-VVFFTLXE.js.map} +0 -0
- /package/dist/{open-IN3LUZXX.js.map → open-2LPZ7XXW.js.map} +0 -0
- /package/dist/{projects-CTRTTMSK.js.map → projects-325GEEGJ.js.map} +0 -0
- /package/dist/{prompt-3SAZYRUN.js.map → prompt-ONNPSNKM.js.map} +0 -0
- /package/dist/{rebase-RLEVFHWN.js.map → rebase-7YS3N274.js.map} +0 -0
- /package/dist/{recap-ZKGHZCX6.js.map → recap-GSXFEOD6.js.map} +0 -0
- /package/dist/{run-QEIS2EH2.js.map → run-XPGCMFFO.js.map} +0 -0
- /package/dist/{shell-2NNSIU34.js.map → shell-2SPM3Z5O.js.map} +0 -0
- /package/dist/{summary-MPOOQIOX.js.map → summary-C5VVSJAJ.js.map} +0 -0
- /package/dist/{test-75WAA6DU.js.map → test-N2725YRI.js.map} +0 -0
- /package/dist/{test-git-E2BLXR6M.js.map → test-git-ZPSPA2TP.js.map} +0 -0
- /package/dist/{test-prefix-A7JGGYAA.js.map → test-prefix-6DLB2BHE.js.map} +0 -0
- /package/dist/{test-webserver-J6SMNLU2.js.map → test-webserver-XLJ2TZFP.js.map} +0 -0
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/utils/image-processor.ts","../src/mcp/GitHubIssueManagementProvider.ts","../src/utils/linear-markup-converter.ts","../src/mcp/LinearIssueManagementProvider.ts","../src/mcp/IssueManagementProviderFactory.ts"],"sourcesContent":["/* global fetch, AbortController, setTimeout, clearTimeout */\nimport { tmpdir } from 'node:os'\nimport { join, extname } from 'node:path'\nimport { existsSync, mkdirSync, createWriteStream, unlinkSync } from 'node:fs'\nimport { pipeline } from 'node:stream/promises'\nimport { Readable } from 'node:stream'\nimport { createHash } from 'node:crypto'\nimport { execa } from 'execa'\nimport { logger } from './logger.js'\nimport type { IssueProvider } from '../mcp/types.js'\n\n/**\n * Represents a matched image in markdown content\n */\nexport interface ImageMatch {\n fullMatch: string\n url: string\n isMarkdown: boolean // true for , false for <img>\n}\n\n/**\n * Supported image extensions\n */\nconst SUPPORTED_EXTENSIONS = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg']\n\n/**\n * Maximum allowed image size (10MB)\n */\nconst MAX_IMAGE_SIZE = 10 * 1024 * 1024\n\n/**\n * Request timeout in milliseconds (30 seconds)\n */\nconst REQUEST_TIMEOUT_MS = 30000\n\n/**\n * Cache directory path for downloaded images\n */\nexport const CACHE_DIR = join(tmpdir(), 'iloom-images')\n\n/**\n * Cached GitHub auth token (module-level to avoid repeated `gh auth token` calls)\n */\nlet cachedGitHubToken: string | undefined\n\n/**\n * Extract all image URLs from markdown content\n * Handles both  and <img src=\"url\"> formats\n *\n * @param content - Markdown content to parse\n * @returns Array of image matches with full match string and URL\n */\nexport function extractMarkdownImageUrls(content: string): ImageMatch[] {\n if (!content) {\n return []\n }\n\n const matches: ImageMatch[] = []\n\n // Regex for markdown images: \n // Captures the entire match and the URL separately\n // Handles parentheses in URLs by matching balanced parens\n // The URL part matches: non-paren chars OR (balanced paren group)*, followed by non-paren/non-space chars\n const markdownRegex = /!\\[([^\\]]*)\\]\\(((?:[^()\\s]|\\((?:[^()\\s]|\\([^()]*\\))*\\))+)\\)/g\n let match: RegExpExecArray | null\n\n while ((match = markdownRegex.exec(content)) !== null) {\n const url = match[2]\n if (url) {\n matches.push({\n fullMatch: match[0],\n url,\n isMarkdown: true\n })\n }\n }\n\n // Regex for HTML img tags: <img ... src=\"url\" ...>\n // Handles both double and single quotes, and self-closing tags\n const htmlImgRegex = /<img\\s+[^>]*src=[\"']([^\"']+)[\"'][^>]*\\/?>/gi\n\n while ((match = htmlImgRegex.exec(content)) !== null) {\n const url = match[1]\n if (url) {\n matches.push({\n fullMatch: match[0],\n url,\n isMarkdown: false\n })\n }\n }\n\n return matches\n}\n\n/**\n * Check if URL requires authentication to download\n * - Linear: uploads.linear.app\n * - GitHub: private-user-images.githubusercontent.com\n *\n * @param url - Image URL to check\n * @returns true if URL requires authentication\n */\nexport function isAuthenticatedImageUrl(url: string): boolean {\n try {\n const parsedUrl = new URL(url)\n const hostname = parsedUrl.hostname.toLowerCase()\n\n // Linear uploads require authentication\n if (hostname === 'uploads.linear.app') {\n return true\n }\n\n // GitHub private user images require authentication\n if (hostname === 'private-user-images.githubusercontent.com') {\n return true\n }\n\n // GitHub user-attachments (uploaded images in issues/PRs) require authentication\n if (hostname === 'github.com' && parsedUrl.pathname.startsWith('/user-attachments/assets/')) {\n return true\n }\n\n return false\n } catch {\n // Invalid URL - treat as not authenticated\n return false\n }\n}\n\n/**\n * Get extension from URL pathname\n *\n * @param url - URL to extract extension from\n * @returns Extension including dot (e.g., '.png') or null if not found\n */\nfunction getExtensionFromUrl(url: string): string | null {\n try {\n const parsedUrl = new URL(url)\n const pathname = parsedUrl.pathname\n const ext = extname(pathname).toLowerCase()\n\n if (SUPPORTED_EXTENSIONS.includes(ext)) {\n return ext\n }\n return null\n } catch {\n return null\n }\n}\n\n/**\n * Generate cache key from URL\n * For GitHub URLs, strips JWT query params to ensure consistent caching\n * Returns hash + original extension\n *\n * @param url - Image URL to generate cache key for\n * @returns Cache key (hash + extension)\n */\nexport function getCacheKey(url: string): string {\n const parsedUrl = new URL(url)\n\n // For GitHub private images, remove jwt query param to get stable cache key\n // The jwt changes each fetch but the base URL is the same for the same image\n if (parsedUrl.hostname === 'private-user-images.githubusercontent.com') {\n parsedUrl.searchParams.delete('jwt')\n }\n\n // Get URL without volatile params for hashing\n const stableUrl = parsedUrl.toString()\n\n // Generate SHA256 hash of the stable URL (first 16 chars for brevity)\n const hash = createHash('sha256').update(stableUrl).digest('hex').slice(0, 16)\n\n // Extract extension from URL pathname, default to .png\n const ext = getExtensionFromUrl(url) ?? '.png'\n\n return `${hash}${ext}`\n}\n\n/**\n * Check if image is already cached\n * Returns file path if exists, undefined otherwise\n *\n * @param url - Image URL to check cache for\n * @returns Cached file path or undefined\n */\nexport function getCachedImagePath(url: string): string | undefined {\n const cacheKey = getCacheKey(url)\n const cachedPath = join(CACHE_DIR, cacheKey)\n\n if (existsSync(cachedPath)) {\n return cachedPath\n }\n return undefined\n}\n\n/**\n * Get authentication token for the given provider\n *\n * @param provider - Provider type ('github' or 'linear')\n * @returns Authentication token or undefined\n */\nasync function getAuthToken(provider: IssueProvider): Promise<string | undefined> {\n if (provider === 'github') {\n // Return cached token if available\n if (cachedGitHubToken !== undefined) {\n return cachedGitHubToken\n }\n\n try {\n // Execute `gh auth token` to get GitHub token\n const result = await execa('gh', ['auth', 'token'])\n cachedGitHubToken = result.stdout.trim()\n return cachedGitHubToken\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n logger.warn(`Failed to get GitHub auth token via gh CLI: ${message}`)\n return undefined\n }\n }\n\n if (provider === 'linear') {\n // Linear token from environment variable\n return process.env.LINEAR_API_TOKEN\n }\n\n return undefined\n}\n\n/**\n * Clear the cached GitHub auth token (for testing purposes)\n */\nexport function clearCachedGitHubToken(): void {\n cachedGitHubToken = undefined\n}\n\n/**\n * Download image from URL and stream it directly to a file\n *\n * @param url - Image URL to download\n * @param destPath - Destination file path\n * @param authHeader - Optional Authorization header value\n * @throws Error if download fails, times out, or exceeds size limit\n */\nexport async function downloadAndSaveImage(\n url: string,\n destPath: string,\n authHeader?: string\n): Promise<void> {\n const headers: Record<string, string> = {}\n if (authHeader) {\n headers['Authorization'] = authHeader\n }\n\n // Set up abort controller for timeout\n const controller = new AbortController()\n const timeoutId = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS)\n\n try {\n const response = await fetch(url, { headers, signal: controller.signal })\n\n if (!response.ok) {\n throw new Error(`Failed to download image: ${response.status} ${response.statusText}`)\n }\n\n // Check Content-Length header if available\n const contentLength = response.headers.get('Content-Length')\n if (contentLength && parseInt(contentLength, 10) > MAX_IMAGE_SIZE) {\n throw new Error(`Image too large: ${contentLength} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`)\n }\n\n if (!response.body) {\n throw new Error('Response body is null')\n }\n\n // Convert ReadableStream to Node.js Readable\n const reader = response.body.getReader()\n let bytesWritten = 0\n\n const nodeReadable = new Readable({\n async read(): Promise<void> {\n try {\n const { done, value } = await reader.read()\n if (done) {\n this.push(null)\n return\n }\n\n bytesWritten += value.byteLength\n if (bytesWritten > MAX_IMAGE_SIZE) {\n reader.cancel()\n this.destroy(new Error(`Image too large: ${bytesWritten} bytes exceeds ${MAX_IMAGE_SIZE} byte limit`))\n return\n }\n\n this.push(Buffer.from(value))\n } catch (err) {\n this.destroy(err instanceof Error ? err : new Error(String(err)))\n }\n }\n })\n\n // Ensure cache directory exists\n if (!existsSync(CACHE_DIR)) {\n mkdirSync(CACHE_DIR, { recursive: true })\n }\n\n // Stream to file\n const writeStream = createWriteStream(destPath)\n\n try {\n await pipeline(nodeReadable, writeStream)\n } catch (pipelineError) {\n // Clean up partial file on error\n try {\n if (existsSync(destPath)) {\n unlinkSync(destPath)\n }\n } catch {\n // Ignore cleanup errors\n }\n throw pipelineError\n }\n } catch (error) {\n if (error instanceof Error && error.name === 'AbortError') {\n throw new Error(`Image download timed out after ${REQUEST_TIMEOUT_MS}ms`)\n }\n throw error\n } finally {\n clearTimeout(timeoutId)\n }\n}\n\n/**\n * Get the destination path for caching an image\n *\n * @param url - Original image URL (used to generate cache key)\n * @returns Local file path where image should be saved\n */\nexport function getCacheDestPath(url: string): string {\n // Ensure cache directory exists\n if (!existsSync(CACHE_DIR)) {\n mkdirSync(CACHE_DIR, { recursive: true })\n }\n\n // Generate cache key from URL\n const cacheKey = getCacheKey(url)\n return join(CACHE_DIR, cacheKey)\n}\n\n/**\n * Rewrite image URLs in markdown content\n *\n * @param content - Original markdown content\n * @param urlMap - Map of original URLs to local file paths\n * @returns Content with URLs replaced\n */\nexport function rewriteMarkdownUrls(\n content: string,\n urlMap: Map<string, string>\n): string {\n let result = content\n\n for (const [originalUrl, localPath] of urlMap) {\n // Escape special regex characters in the URL\n const escapedUrl = originalUrl.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n const urlRegex = new RegExp(escapedUrl, 'g')\n result = result.replace(urlRegex, localPath)\n }\n\n return result\n}\n\n/**\n * Main entry point: process all images in markdown content\n * Downloads authenticated images (with caching), saves locally, rewrites URLs\n *\n * @param content - Markdown content to process\n * @param provider - Image provider for authentication ('github' or 'linear')\n * @returns Content with authenticated image URLs replaced with local file paths\n */\nexport async function processMarkdownImages(\n content: string,\n provider: IssueProvider\n): Promise<string> {\n // Early return if empty\n if (!content) {\n return ''\n }\n\n // Extract all image URLs\n const images = extractMarkdownImageUrls(content)\n if (images.length === 0) {\n return content\n }\n\n // Filter to only authenticated URLs\n const authImages = images.filter(img => isAuthenticatedImageUrl(img.url))\n if (authImages.length === 0) {\n return content\n }\n\n // Get auth token for provider\n const authToken = await getAuthToken(provider)\n\n // Deduplicate URLs (same image might appear multiple times)\n const uniqueUrls = [...new Set(authImages.map(img => img.url))]\n\n // Build URL map - process all unique URLs in parallel\n const urlMap = new Map<string, string>()\n\n // Download/cache images in parallel\n const downloadPromises = uniqueUrls.map(async (url) => {\n try {\n // Check cache first\n const cachedPath = getCachedImagePath(url)\n if (cachedPath) {\n logger.debug(`Using cached image: ${cachedPath}`)\n return { url, localPath: cachedPath }\n }\n\n // Cache miss - download and stream directly to file\n logger.debug(`Downloading image: ${url}`)\n const destPath = getCacheDestPath(url)\n await downloadAndSaveImage(\n url,\n destPath,\n authToken ? `Bearer ${authToken}` : undefined\n )\n return { url, localPath: destPath }\n } catch (error) {\n // Graceful degradation - log warning, return null to keep original URL\n const message = error instanceof Error ? error.message : String(error)\n logger.warn(`Failed to download image ${url}: ${message}`)\n return null\n }\n })\n\n const results = await Promise.all(downloadPromises)\n\n // Build URL map from results\n for (const result of results) {\n if (result !== null) {\n urlMap.set(result.url, result.localPath)\n }\n }\n\n // Rewrite and return\n return rewriteMarkdownUrls(content, urlMap)\n}\n","/**\n * GitHub implementation of Issue Management Provider\n * Uses GitHub CLI for all operations\n * Normalizes GitHub-specific fields (login) to provider-agnostic core fields (id, displayName)\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetPRInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tPRResult,\n\tCommentDetailResult,\n\tCommentResult,\n\tFlexibleAuthor,\n} from './types.js'\nimport {\n\texecuteGhCommand,\n\tcreateIssueComment,\n\tupdateIssueComment,\n\tcreatePRComment,\n\tcreateIssue,\n\tgetIssueNodeId,\n\taddSubIssue,\n} from '../utils/github.js'\nimport { processMarkdownImages } from '../utils/image-processor.js'\n\n/**\n * GitHub-specific author structure from API\n */\ninterface GitHubAuthor {\n\tlogin: string\n\tid?: number\n\tavatarUrl?: string\n\turl?: string\n}\n\n/**\n * Normalize GitHub author to FlexibleAuthor format\n */\nfunction normalizeAuthor(author: GitHubAuthor | null | undefined): FlexibleAuthor | null {\n\tif (!author) return null\n\n\treturn {\n\t\tid: author.id ? String(author.id) : author.login,\n\t\tdisplayName: author.login, // GitHub uses login as primary identifier\n\t\tlogin: author.login, // Preserve original GitHub field\n\t\t...(author.avatarUrl && { avatarUrl: author.avatarUrl }),\n\t\t...(author.url && { url: author.url }),\n\t}\n}\n\n/**\n * Extract numeric comment ID from GitHub comment URL\n * URL format: https://github.com/owner/repo/issues/123#issuecomment-3615239386\n */\nexport function extractNumericIdFromUrl(url: string): string {\n\tconst match = url.match(/#issuecomment-(\\d+)$/)\n\tif (!match?.[1]) {\n\t\tthrow new Error(`Cannot extract comment ID from URL: ${url}`)\n\t}\n\treturn match[1]\n}\n\n/**\n * GitHub-specific implementation of IssueManagementProvider\n */\nexport class GitHubIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'github'\n\treadonly issuePrefix = '#'\n\n\t/**\n\t * Fetch issue details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true, repo } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst issueNumber = parseInt(number, 10)\n\t\tif (isNaN(issueNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub issue number: ${number}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst fields = includeComments\n\t\t\t? 'body,title,comments,labels,assignees,milestone,author,state,number,url'\n\t\t\t: 'body,title,labels,assignees,milestone,author,state,number,url'\n\n\t\t// Use gh issue view to fetch issue details\n\t\tinterface GitHubIssueResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\tlabels?: Array<{ name: string; color?: string; description?: string }>\n\t\t\tassignees?: Array<GitHubAuthor>\n\t\t\tmilestone?: { title: string; number?: number; state?: string }\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst args = [\n\t\t\t'issue',\n\t\t\t'view',\n\t\t\tString(issueNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t]\n\n\t\t// Add --repo flag if repo is provided (gh CLI handles both owner/repo and URL formats)\n\t\tif (repo) {\n\t\t\targs.push('--repo', repo)\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubIssueResponse>(args)\n\n\t\t// Normalize to IssueResult with core fields + passthrough\n\t\tconst result: IssueResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'github',\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// Optional flexible fields\n\t\t\t...(raw.assignees && {\n\t\t\t\tassignees: raw.assignees.map(a => normalizeAuthor(a)).filter((a): a is FlexibleAuthor => a !== null),\n\t\t\t}),\n\t\t\t...(raw.labels && {\n\t\t\t\tlabels: raw.labels,\n\t\t\t}),\n\n\t\t\t// GitHub-specific passthrough fields\n\t\t\t...(raw.milestone && {\n\t\t\t\tmilestone: raw.milestone,\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\t// (GitHub CLI returns GraphQL node IDs in the id field, but REST API expects numeric IDs)\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\t// Process authenticated images in body and comments\n\t\tresult.body = await processMarkdownImages(result.body, 'github')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'github')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch pull request details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getPR(input: GetPRInput): Promise<PRResult> {\n\t\tconst { number, includeComments = true, repo } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst prNumber = parseInt(number, 10)\n\t\tif (isNaN(prNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub PR number: ${number}. GitHub PR IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst baseFields = 'number,title,body,state,url,author,headRefName,baseRefName,files,commits'\n\t\tconst fields = includeComments\n\t\t\t? `${baseFields},comments`\n\t\t\t: baseFields\n\n\t\t// GitHub PR response structure\n\t\tinterface GitHubPRResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\theadRefName: string\n\t\t\tbaseRefName: string\n\t\t\tfiles?: Array<{\n\t\t\t\tpath: string\n\t\t\t\tadditions: number\n\t\t\t\tdeletions: number\n\t\t\t}>\n\t\t\tcommits?: Array<{\n\t\t\t\toid: string\n\t\t\t\tmessageHeadline: string\n\t\t\t\tauthors: Array<{ name: string; email: string }>\n\t\t\t}>\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst args = [\n\t\t\t'pr',\n\t\t\t'view',\n\t\t\tString(prNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t]\n\n\t\t// Add --repo flag if repo is provided\n\t\tif (repo) {\n\t\t\targs.push('--repo', repo)\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubPRResponse>(args)\n\n\t\t// Normalize to PRResult with core fields + passthrough\n\t\tconst result: PRResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\tnumber: raw.number,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// PR-specific fields\n\t\t\theadRefName: raw.headRefName,\n\t\t\tbaseRefName: raw.baseRefName,\n\n\t\t\t// Optional files\n\t\t\t...(raw.files && {\n\t\t\t\tfiles: raw.files,\n\t\t\t}),\n\n\t\t\t// Optional commits - normalize author\n\t\t\t...(raw.commits && {\n\t\t\t\tcommits: raw.commits.map(commit => ({\n\t\t\t\t\toid: commit.oid,\n\t\t\t\t\tmessageHeadline: commit.messageHeadline,\n\t\t\t\t\tauthor: commit.authors?.[0]\n\t\t\t\t\t\t? {\n\t\t\t\t\t\t\tid: commit.authors[0].email,\n\t\t\t\t\t\t\tdisplayName: commit.authors[0].name,\n\t\t\t\t\t\t\tname: commit.authors[0].name,\n\t\t\t\t\t\t\temail: commit.authors[0].email,\n\t\t\t\t\t\t}\n\t\t\t\t\t\t: null,\n\t\t\t\t})),\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\t// Process authenticated images in body and comments\n\t\tresult.body = await processMarkdownImages(result.body, 'github')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'github')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID using gh API\n\t * Normalizes author to FlexibleAuthor format\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId, repo } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub API\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// GitHub API response structure\n\t\tinterface GitHubCommentResponse {\n\t\t\tid: number\n\t\t\tbody: string\n\t\t\tuser: GitHubAuthor\n\t\t\tcreated_at: string\n\t\t\tupdated_at?: string\n\t\t\thtml_url?: string\n\t\t\treactions?: Record<string, unknown>\n\t\t}\n\n\t\t// Use explicit repo path if provided, otherwise use :owner/:repo placeholder\n\t\tconst apiPath = repo\n\t\t\t? `repos/${repo}/issues/comments/${numericCommentId}`\n\t\t\t: `repos/:owner/:repo/issues/comments/${numericCommentId}`\n\n\t\t// Use gh api to fetch specific comment\n\t\tconst raw = await executeGhCommand<GitHubCommentResponse>([\n\t\t\t'api',\n\t\t\tapiPath,\n\t\t\t'--jq',\n\t\t\t'{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}',\n\t\t])\n\n\t\t// Process authenticated images in comment body\n\t\tconst processedBody = await processMarkdownImages(raw.body, 'github')\n\n\t\t// Normalize to CommentDetailResult\n\t\treturn {\n\t\t\tid: String(raw.id),\n\t\t\tbody: processedBody,\n\t\t\tauthor: normalizeAuthor(raw.user),\n\t\t\tcreated_at: raw.created_at,\n\t\t\t...(raw.updated_at && { updated_at: raw.updated_at }),\n\t\t\t// Passthrough GitHub-specific fields\n\t\t\t...(raw.html_url && { html_url: raw.html_url }),\n\t\t\t...(raw.reactions && { reactions: raw.reactions }),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue or PR\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body, type } = input\n\n\t\t// Convert string ID to number for GitHub utilities\n\t\tconst numericId = parseInt(number, 10)\n\t\tif (isNaN(numericId)) {\n\t\t\tthrow new Error(`Invalid GitHub ${type} number: ${number}. GitHub IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utilities\n\t\tconst result =\n\t\t\ttype === 'issue'\n\t\t\t\t? await createIssueComment(numericId, body)\n\t\t\t\t: await createPRComment(numericId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub utility\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utility\n\t\tconst result = await updateIssueComment(numericCommentId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\tconst result = await createIssue(title, body, { labels, repo })\n\n\t\t// Ensure number is numeric\n\t\tconst issueNumber = typeof result.number === 'number'\n\t\t\t? result.number\n\t\t\t: parseInt(String(result.number), 10)\n\n\t\treturn {\n\t\t\tid: String(issueNumber),\n\t\t\turl: result.url,\n\t\t\tnumber: issueNumber,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * GitHub requires two-step process: create issue, then link via GraphQL\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\t// Convert parent identifier to number\n\t\tconst parentNumber = parseInt(parentId, 10)\n\t\tif (isNaN(parentNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub parent issue number: ${parentId}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Step 1: Get parent issue's GraphQL node ID\n\t\tconst parentNodeId = await getIssueNodeId(parentNumber, repo)\n\n\t\t// Step 2: Create the child issue\n\t\tconst childResult = await createIssue(title, body, { labels, repo })\n\t\tconst childNumber = typeof childResult.number === 'number'\n\t\t\t? childResult.number\n\t\t\t: parseInt(String(childResult.number), 10)\n\n\t\t// Step 3: Get child issue's GraphQL node ID\n\t\tconst childNodeId = await getIssueNodeId(childNumber, repo)\n\n\t\t// Step 4: Link child to parent via GraphQL mutation\n\t\tawait addSubIssue(parentNodeId, childNodeId)\n\n\t\treturn {\n\t\t\tid: String(childNumber),\n\t\t\turl: childResult.url,\n\t\t\tnumber: childNumber,\n\t\t}\n\t}\n}\n","import { appendFileSync } from 'node:fs'\nimport { join, dirname, basename, extname } from 'node:path'\n\n/**\n * Utility class for converting HTML details/summary format to Linear's collapsible format\n *\n * Converts:\n * <details>\n * <summary>Header</summary>\n * CONTENT\n * </details>\n *\n * Into Linear format:\n * +++ Header\n *\n * CONTENT\n *\n * +++\n */\nexport class LinearMarkupConverter {\n\t/**\n\t * Convert HTML details/summary blocks to Linear's collapsible format\n\t * Handles nested details blocks recursively\n\t *\n\t * @param text - Text containing HTML details/summary blocks\n\t * @returns Text with details/summary converted to Linear format\n\t */\n\tstatic convertDetailsToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Process from innermost to outermost to handle nesting correctly\n\t\t// Keep converting until no more details blocks are found\n\t\tlet previousText = ''\n\t\tlet currentText = text\n\n\t\twhile (previousText !== currentText) {\n\t\t\tpreviousText = currentText\n\t\t\tcurrentText = this.convertSinglePass(currentText)\n\t\t}\n\n\t\treturn currentText\n\t}\n\n\t/**\n\t * Perform a single pass of details block conversion\n\t * Converts the innermost details blocks first\n\t */\n\tprivate static convertSinglePass(text: string): string {\n\t\t// Match <details> blocks with optional attributes on the details tag\n\t\t// Supports multiline content between tags\n\t\tconst detailsRegex = /<details[^>]*>\\s*<summary[^>]*>(.*?)<\\/summary>\\s*(.*?)\\s*<\\/details>/gis\n\n\t\treturn text.replace(detailsRegex, (_match, summary, content) => {\n\t\t\t// Clean up the summary - trim whitespace and decode HTML entities\n\t\t\tconst cleanSummary = this.cleanText(summary)\n\n\t\t\t// Clean up the content - preserve internal structure but normalize outer whitespace\n\t\t\t// Note: Don't recursively convert here - the while loop handles that\n\t\t\tconst cleanContent = this.cleanContent(content)\n\n\t\t\t// Build Linear collapsible format\n\t\t\t// Always include blank lines around content for readability\n\t\t\tif (cleanContent) {\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n${cleanContent}\\n\\n+++`\n\t\t\t} else {\n\t\t\t\t// Empty content - use minimal format\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n+++`\n\t\t\t}\n\t\t})\n\t}\n\n\t/**\n\t * Clean text by trimming whitespace and decoding common HTML entities\n\t */\n\tprivate static cleanText(text: string): string {\n\t\treturn text\n\t\t\t.trim()\n\t\t\t.replace(/</g, '<')\n\t\t\t.replace(/>/g, '>')\n\t\t\t.replace(/&/g, '&')\n\t\t\t.replace(/"/g, '\"')\n\t\t\t.replace(/'/g, \"'\")\n\t}\n\n\t/**\n\t * Clean content while preserving internal structure\n\t * - Removes leading/trailing whitespace\n\t * - Normalizes internal blank lines (max 2 consecutive newlines)\n\t * - Preserves code blocks and other formatting\n\t */\n\tprivate static cleanContent(content: string): string {\n\t\tif (!content) {\n\t\t\treturn ''\n\t\t}\n\n\t\t// Trim outer whitespace\n\t\tlet cleaned = content.trim()\n\n\t\t// Normalize excessive blank lines (3+ newlines -> 2 newlines)\n\t\tcleaned = cleaned.replace(/\\n{3,}/g, '\\n\\n')\n\n\t\treturn cleaned\n\t}\n\n\t/**\n\t * Check if text contains HTML details/summary blocks\n\t * Useful for conditional conversion\n\t */\n\tstatic hasDetailsBlocks(text: string): boolean {\n\t\tif (!text) {\n\t\t\treturn false\n\t\t}\n\n\t\tconst detailsRegex = /<details[^>]*>.*?<summary[^>]*>.*?<\\/summary>.*?<\\/details>/is\n\t\treturn detailsRegex.test(text)\n\t}\n\n\t/**\n\t * Remove wrapper tags from code sample details blocks\n\t * Identifies details blocks where summary contains \"X lines\" pattern\n\t * and removes the details/summary tags while preserving the content\n\t *\n\t * @param text - Text containing potential code sample details blocks\n\t * @returns Text with code sample wrappers removed\n\t */\n\tstatic removeCodeSampleWrappers(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Match details blocks where summary contains \"X lines\" (e.g., \"45 lines\", \"120 lines\")\n\t\t// Pattern: <details><summary>...N lines...</summary>CONTENT</details>\n\t\t// Use [^<]* to match summary content without allowing nested tags to interfere\n\t\t// Then use [\\s\\S]*? for the content to allow any characters including newlines\n\t\tconst codeSampleRegex = /<details[^>]*>\\s*<summary[^>]*>([^<]*\\d+\\s+lines[^<]*)<\\/summary>\\s*([\\s\\S]*?)<\\/details>/gi\n\n\t\treturn text.replace(codeSampleRegex, (_match, _summary, content) => {\n\t\t\t// Return just the content, without any wrapper tags\n\t\t\t// Preserve the content exactly as-is\n\t\t\treturn content.trim()\n\t\t})\n\t}\n\n\t/**\n\t * Convert text for Linear - applies all necessary conversions\n\t * Currently only converts details/summary blocks, but can be extended\n\t * for other HTML to Linear markdown conversions\n\t */\n\tstatic convertToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Log input if logging is enabled\n\t\tthis.logConversion('INPUT', text)\n\n\t\t// Apply all conversions\n\t\tlet converted = text\n\n\t\t// First, remove code sample wrappers (details blocks with \"X lines\" pattern)\n\t\t// This prevents them from being converted to Linear's +++ format\n\t\tconverted = this.removeCodeSampleWrappers(converted)\n\n\t\t// Then convert remaining details/summary blocks to Linear format\n\t\tconverted = this.convertDetailsToLinear(converted)\n\n\t\t// Log output if logging is enabled\n\t\tthis.logConversion('OUTPUT', converted)\n\n\t\treturn converted\n\t}\n\n\t/**\n\t * Log conversion input/output if LINEAR_MARKDOWN_LOG_FILE is set\n\t */\n\tprivate static logConversion(label: string, content: string): void {\n\t\tconst logFilePath = process.env.LINEAR_MARKDOWN_LOG_FILE\n\t\tif (!logFilePath) {\n\t\t\treturn\n\t\t}\n\n\t\ttry {\n\t\t\tconst timestampedPath = this.getTimestampedLogPath(logFilePath)\n\t\t\tconst timestamp = new Date().toISOString()\n\t\t\tconst separator = '================================'\n\n\t\t\tconst logEntry = `${separator}\\n[${timestamp}] CONVERSION ${label}\\n${separator}\\n${label}:\\n${content}\\n\\n`\n\n\t\t\tappendFileSync(timestampedPath, logEntry, 'utf-8')\n\t\t} catch {\n\t\t\t// Silently fail - don't crash if logging fails\n\t\t\t// This is a debug feature and shouldn't break the conversion\n\t\t}\n\t}\n\n\t/**\n\t * Generate timestamped log file path\n\t * Example: debug.log -> debug-20231202-161234.log\n\t */\n\tprivate static getTimestampedLogPath(logFilePath: string): string {\n\t\tconst dir = dirname(logFilePath)\n\t\tconst ext = extname(logFilePath)\n\t\tconst base = basename(logFilePath, ext)\n\n\t\t// Generate timestamp: YYYYMMDD-HHMMSS\n\t\tconst now = new Date()\n\t\tconst timestamp = [\n\t\t\tnow.getFullYear(),\n\t\t\tString(now.getMonth() + 1).padStart(2, '0'),\n\t\t\tString(now.getDate()).padStart(2, '0'),\n\t\t].join('') + '-' + [\n\t\t\tString(now.getHours()).padStart(2, '0'),\n\t\t\tString(now.getMinutes()).padStart(2, '0'),\n\t\t\tString(now.getSeconds()).padStart(2, '0'),\n\t\t].join('')\n\n\t\treturn join(dir, `${base}-${timestamp}${ext}`)\n\t}\n}\n","/**\n * Linear implementation of Issue Management Provider\n * Uses @linear/sdk for all operations\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetPRInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tPRResult,\n\tCommentDetailResult,\n\tCommentResult,\n} from './types.js'\nimport {\n\tfetchLinearIssue,\n\tcreateLinearComment,\n\tgetLinearComment,\n\tupdateLinearComment,\n\tfetchLinearIssueComments,\n\tcreateLinearIssue,\n\tcreateLinearChildIssue,\n} from '../utils/linear.js'\nimport { LinearMarkupConverter } from '../utils/linear-markup-converter.js'\nimport { processMarkdownImages } from '../utils/image-processor.js'\n\n/**\n * Linear-specific implementation of IssueManagementProvider\n */\nexport class LinearIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'linear'\n\treadonly issuePrefix = ''\n\n\t/**\n\t * Cached team key extracted from issue identifiers (e.g., \"ENG-123\" -> \"ENG\")\n\t * Used as fallback when teamKey is not explicitly provided to createIssue()\n\t */\n\tprivate cachedTeamKey: string | undefined = undefined\n\n\t/**\n\t * Fetch issue details using Linear SDK\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true } = input\n\n\t\t// Extract and cache team key from identifier (e.g., \"ENG-123\" -> \"ENG\")\n\t\t// This enables createIssue() to use the team key as a fallback\n\t\tconst match = number.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tif (match?.[1]) {\n\t\t\tthis.cachedTeamKey = match[1].toUpperCase()\n\t\t}\n\n\t\t// Fetch issue - Linear uses alphanumeric identifiers like \"ENG-123\"\n\t\tconst raw = await fetchLinearIssue(number)\n\n\t\t// Map Linear state name to open/closed\n\t\tconst state = raw.state && (raw.state.toLowerCase().includes('done') || raw.state.toLowerCase().includes('completed') || raw.state.toLowerCase().includes('canceled'))\n\t\t\t? 'closed'\n\t\t\t: 'open'\n\n\t\t// Build result\n\t\tconst result: IssueResult = {\n\t\t\tid: raw.identifier,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.description ?? '',\n\t\t\tstate,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'linear',\n\t\t\tauthor: null, // Linear SDK doesn't return author in basic fetch\n\n\t\t\t// Linear-specific fields\n\t\t\tlinearState: raw.state,\n\t\t\tcreatedAt: raw.createdAt,\n\t\t\tupdatedAt: raw.updatedAt,\n\t\t}\n\n\t\t// Fetch comments if requested\n\t\tif (includeComments) {\n\t\t\ttry {\n\t\t\t\tconst comments = await this.fetchIssueComments(number)\n\t\t\t\tif (comments) {\n\t\t\t\t\tresult.comments = comments\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// If comments fail, continue without them\n\t\t\t}\n\t\t}\n\n\t\t// Process images in body and comments to make them accessible\n\t\tresult.body = await processMarkdownImages(result.body, 'linear')\n\t\tif (result.comments) {\n\t\t\tfor (const comment of result.comments) {\n\t\t\t\tcomment.body = await processMarkdownImages(comment.body, 'linear')\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch pull request details\n\t * Linear does not support PRs - this throws an error directing to use GitHub\n\t */\n\tasync getPR(_input: GetPRInput): Promise<PRResult> {\n\t\tthrow new Error('Linear does not support pull requests. PRs exist only on GitHub. Use the GitHub provider for PR operations.')\n\t}\n\n\t/**\n\t * Fetch comments for an issue\n\t */\n\tprivate async fetchIssueComments(identifier: string): Promise<IssueResult['comments']> {\n\t\ttry {\n\t\t\tconst comments = await fetchLinearIssueComments(identifier)\n\n\t\t\treturn comments.map(comment => ({\n\t\t\t\tid: comment.id,\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t} catch {\n\t\t\treturn []\n\t\t}\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId } = input\n\n\t\tconst raw = await getLinearComment(commentId)\n\n\t\t// Process images to make them accessible\n\t\tconst processedBody = await processMarkdownImages(raw.body, 'linear')\n\n\t\treturn {\n\t\t\tid: raw.id,\n\t\t\tbody: processedBody,\n\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\tcreated_at: raw.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body } = input\n\t\t// Note: Linear doesn't distinguish between issue and PR comments\n\t\t// (Linear doesn't have PRs - that's GitHub-specific)\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await createLinearComment(number, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tcreated_at: result.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await updateLinearComment(commentId, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tupdated_at: result.updatedAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, teamKey } = input\n\n\t\t// Fallback chain: explicit param > settings (via env) > cached key from getIssue()\n\t\tconst effectiveTeamKey = teamKey ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear issue creation. Configure issueManagement.linear.teamId in settings, or call getIssue first to extract the team from an issue identifier.')\n\t\t}\n\n\t\tconst result = await createLinearIssue(title, body, effectiveTeamKey, labels)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * Linear supports atomic creation with parentId field\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, teamKey } = input\n\n\t\t// Fetch parent issue to get UUID (parentId in input is identifier like \"ENG-123\")\n\t\tconst parentIssue = await fetchLinearIssue(parentId)\n\n\t\t// Extract team key from parent identifier if not provided\n\t\tconst match = parentId.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tconst effectiveTeamKey = teamKey ?? match?.[1]?.toUpperCase() ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear child issue creation. Provide teamKey parameter or use a parent identifier with team prefix.')\n\t\t}\n\n\t\t// Create child issue with parent's UUID\n\t\tconst result = await createLinearChildIssue(\n\t\t\ttitle,\n\t\t\tbody,\n\t\t\teffectiveTeamKey,\n\t\t\tparentIssue.id, // UUID, not identifier\n\t\t\tlabels\n\t\t)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n}\n","/**\n * Factory for creating issue management providers\n */\n\nimport type { IssueManagementProvider, IssueProvider } from './types.js'\nimport { GitHubIssueManagementProvider } from './GitHubIssueManagementProvider.js'\nimport { LinearIssueManagementProvider } from './LinearIssueManagementProvider.js'\n\n/**\n * Factory class for creating issue management providers\n */\nexport class IssueManagementProviderFactory {\n\t/**\n\t * Create an issue management provider based on the provider type\n\t */\n\tstatic create(provider: IssueProvider): IssueManagementProvider {\n\t\tswitch (provider) {\n\t\t\tcase 'github':\n\t\t\t\treturn new GitHubIssueManagementProvider()\n\t\t\tcase 'linear':\n\t\t\t\treturn new LinearIssueManagementProvider()\n\t\t\tdefault:\n\t\t\t\tthrow new Error(`Unsupported issue management provider: ${provider}`)\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AACA,SAAS,cAAc;AACvB,SAAS,MAAM,eAAe;AAC9B,SAAS,YAAY,WAAW,mBAAmB,kBAAkB;AACrE,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AACzB,SAAS,kBAAkB;AAC3B,SAAS,aAAa;AAgBtB,IAAM,uBAAuB,CAAC,QAAQ,QAAQ,SAAS,QAAQ,SAAS,MAAM;AAK9E,IAAM,iBAAiB,KAAK,OAAO;AAKnC,IAAM,qBAAqB;AAKpB,IAAM,YAAY,KAAK,OAAO,GAAG,cAAc;AAKtD,IAAI;AASG,SAAS,yBAAyB,SAA+B;AACtE,MAAI,CAAC,SAAS;AACZ,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAwB,CAAC;AAM/B,QAAM,gBAAgB;AACtB,MAAI;AAEJ,UAAQ,QAAQ,cAAc,KAAK,OAAO,OAAO,MAAM;AACrD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,KAAK;AACP,cAAQ,KAAK;AAAA,QACX,WAAW,MAAM,CAAC;AAAA,QAClB;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAIA,QAAM,eAAe;AAErB,UAAQ,QAAQ,aAAa,KAAK,OAAO,OAAO,MAAM;AACpD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,KAAK;AACP,cAAQ,KAAK;AAAA,QACX,WAAW,MAAM,CAAC;AAAA,QAClB;AAAA,QACA,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,wBAAwB,KAAsB;AAC5D,MAAI;AACF,UAAM,YAAY,IAAI,IAAI,GAAG;AAC7B,UAAM,WAAW,UAAU,SAAS,YAAY;AAGhD,QAAI,aAAa,sBAAsB;AACrC,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,6CAA6C;AAC5D,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,gBAAgB,UAAU,SAAS,WAAW,2BAA2B,GAAG;AAC3F,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAQA,SAAS,oBAAoB,KAA4B;AACvD,MAAI;AACF,UAAM,YAAY,IAAI,IAAI,GAAG;AAC7B,UAAM,WAAW,UAAU;AAC3B,UAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAE1C,QAAI,qBAAqB,SAAS,GAAG,GAAG;AACtC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAUO,SAAS,YAAY,KAAqB;AAC/C,QAAM,YAAY,IAAI,IAAI,GAAG;AAI7B,MAAI,UAAU,aAAa,6CAA6C;AACtE,cAAU,aAAa,OAAO,KAAK;AAAA,EACrC;AAGA,QAAM,YAAY,UAAU,SAAS;AAGrC,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,SAAS,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AAG7E,QAAM,MAAM,oBAAoB,GAAG,KAAK;AAExC,SAAO,GAAG,IAAI,GAAG,GAAG;AACtB;AASO,SAAS,mBAAmB,KAAiC;AAClE,QAAM,WAAW,YAAY,GAAG;AAChC,QAAM,aAAa,KAAK,WAAW,QAAQ;AAE3C,MAAI,WAAW,UAAU,GAAG;AAC1B,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAQA,eAAe,aAAa,UAAsD;AAChF,MAAI,aAAa,UAAU;AAEzB,QAAI,sBAAsB,QAAW;AACnC,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,YAAM,SAAS,MAAM,MAAM,MAAM,CAAC,QAAQ,OAAO,CAAC;AAClD,0BAAoB,OAAO,OAAO,KAAK;AACvC,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,KAAK,+CAA+C,OAAO,EAAE;AACpE,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,aAAa,UAAU;AAEzB,WAAO,QAAQ,IAAI;AAAA,EACrB;AAEA,SAAO;AACT;AAiBA,eAAsB,qBACpB,KACA,UACA,YACe;AACf,QAAM,UAAkC,CAAC;AACzC,MAAI,YAAY;AACd,YAAQ,eAAe,IAAI;AAAA,EAC7B;AAGA,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,kBAAkB;AAEzE,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,KAAK,EAAE,SAAS,QAAQ,WAAW,OAAO,CAAC;AAExE,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,6BAA6B,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IACvF;AAGA,UAAM,gBAAgB,SAAS,QAAQ,IAAI,gBAAgB;AAC3D,QAAI,iBAAiB,SAAS,eAAe,EAAE,IAAI,gBAAgB;AACjE,YAAM,IAAI,MAAM,oBAAoB,aAAa,kBAAkB,cAAc,aAAa;AAAA,IAChG;AAEA,QAAI,CAAC,SAAS,MAAM;AAClB,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAGA,UAAM,SAAS,SAAS,KAAK,UAAU;AACvC,QAAI,eAAe;AAEnB,UAAM,eAAe,IAAI,SAAS;AAAA,MAChC,MAAM,OAAsB;AAC1B,YAAI;AACF,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR,iBAAK,KAAK,IAAI;AACd;AAAA,UACF;AAEA,0BAAgB,MAAM;AACtB,cAAI,eAAe,gBAAgB;AACjC,mBAAO,OAAO;AACd,iBAAK,QAAQ,IAAI,MAAM,oBAAoB,YAAY,kBAAkB,cAAc,aAAa,CAAC;AACrG;AAAA,UACF;AAEA,eAAK,KAAK,OAAO,KAAK,KAAK,CAAC;AAAA,QAC9B,SAAS,KAAK;AACZ,eAAK,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,QAClE;AAAA,MACF;AAAA,IACF,CAAC;AAGD,QAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,gBAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,IAC1C;AAGA,UAAM,cAAc,kBAAkB,QAAQ;AAE9C,QAAI;AACF,YAAM,SAAS,cAAc,WAAW;AAAA,IAC1C,SAAS,eAAe;AAEtB,UAAI;AACF,YAAI,WAAW,QAAQ,GAAG;AACxB,qBAAW,QAAQ;AAAA,QACrB;AAAA,MACF,QAAQ;AAAA,MAER;AACA,YAAM;AAAA,IACR;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,SAAS,MAAM,SAAS,cAAc;AACzD,YAAM,IAAI,MAAM,kCAAkC,kBAAkB,IAAI;AAAA,IAC1E;AACA,UAAM;AAAA,EACR,UAAE;AACA,iBAAa,SAAS;AAAA,EACxB;AACF;AAQO,SAAS,iBAAiB,KAAqB;AAEpD,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC1C;AAGA,QAAM,WAAW,YAAY,GAAG;AAChC,SAAO,KAAK,WAAW,QAAQ;AACjC;AASO,SAAS,oBACd,SACA,QACQ;AACR,MAAI,SAAS;AAEb,aAAW,CAAC,aAAa,SAAS,KAAK,QAAQ;AAE7C,UAAM,aAAa,YAAY,QAAQ,uBAAuB,MAAM;AACpE,UAAM,WAAW,IAAI,OAAO,YAAY,GAAG;AAC3C,aAAS,OAAO,QAAQ,UAAU,SAAS;AAAA,EAC7C;AAEA,SAAO;AACT;AAUA,eAAsB,sBACpB,SACA,UACiB;AAEjB,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,yBAAyB,OAAO;AAC/C,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAGA,QAAM,aAAa,OAAO,OAAO,SAAO,wBAAwB,IAAI,GAAG,CAAC;AACxE,MAAI,WAAW,WAAW,GAAG;AAC3B,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,MAAM,aAAa,QAAQ;AAG7C,QAAM,aAAa,CAAC,GAAG,IAAI,IAAI,WAAW,IAAI,SAAO,IAAI,GAAG,CAAC,CAAC;AAG9D,QAAM,SAAS,oBAAI,IAAoB;AAGvC,QAAM,mBAAmB,WAAW,IAAI,OAAO,QAAQ;AACrD,QAAI;AAEF,YAAM,aAAa,mBAAmB,GAAG;AACzC,UAAI,YAAY;AACd,eAAO,MAAM,uBAAuB,UAAU,EAAE;AAChD,eAAO,EAAE,KAAK,WAAW,WAAW;AAAA,MACtC;AAGA,aAAO,MAAM,sBAAsB,GAAG,EAAE;AACxC,YAAM,WAAW,iBAAiB,GAAG;AACrC,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,YAAY,UAAU,SAAS,KAAK;AAAA,MACtC;AACA,aAAO,EAAE,KAAK,WAAW,SAAS;AAAA,IACpC,SAAS,OAAO;AAEd,YAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,aAAO,KAAK,4BAA4B,GAAG,KAAK,OAAO,EAAE;AACzD,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,UAAU,MAAM,QAAQ,IAAI,gBAAgB;AAGlD,aAAW,UAAU,SAAS;AAC5B,QAAI,WAAW,MAAM;AACnB,aAAO,IAAI,OAAO,KAAK,OAAO,SAAS;AAAA,IACzC;AAAA,EACF;AAGA,SAAO,oBAAoB,SAAS,MAAM;AAC5C;;;ACpZA,SAAS,gBAAgB,QAAgE;AACxF,MAAI,CAAC,OAAQ,QAAO;AAEpB,SAAO;AAAA,IACN,IAAI,OAAO,KAAK,OAAO,OAAO,EAAE,IAAI,OAAO;AAAA,IAC3C,aAAa,OAAO;AAAA;AAAA,IACpB,OAAO,OAAO;AAAA;AAAA,IACd,GAAI,OAAO,aAAa,EAAE,WAAW,OAAO,UAAU;AAAA,IACtD,GAAI,OAAO,OAAO,EAAE,KAAK,OAAO,IAAI;AAAA,EACrC;AACD;AAMO,SAAS,wBAAwB,KAAqB;AAC5D,QAAM,QAAQ,IAAI,MAAM,sBAAsB;AAC9C,MAAI,EAAC,+BAAQ,KAAI;AAChB,UAAM,IAAI,MAAM,uCAAuC,GAAG,EAAE;AAAA,EAC7D;AACA,SAAO,MAAM,CAAC;AACf;AAKO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvB,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,MAAM,KAAK,IAAI;AAGjD,UAAM,cAAc,SAAS,QAAQ,EAAE;AACvC,QAAI,MAAM,WAAW,GAAG;AACvB,YAAM,IAAI,MAAM,gCAAgC,MAAM,qCAAqC;AAAA,IAC5F;AAGA,UAAM,SAAS,kBACZ,2EACA;AAuBH,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,OAAO,WAAW;AAAA,MAClB;AAAA,MACA;AAAA,IACD;AAGA,QAAI,MAAM;AACT,WAAK,KAAK,UAAU,IAAI;AAAA,IACzB;AAEA,UAAM,MAAM,MAAM,iBAAsC,IAAI;AAG5D,UAAM,SAAsB;AAAA;AAAA,MAE3B,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA,MACT,UAAU;AAAA;AAAA,MAGV,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI,UAAU,IAAI,OAAK,gBAAgB,CAAC,CAAC,EAAE,OAAO,CAAC,MAA2B,MAAM,IAAI;AAAA,MACpG;AAAA,MACA,GAAI,IAAI,UAAU;AAAA,QACjB,QAAQ,IAAI;AAAA,MACb;AAAA;AAAA,MAGA,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI;AAAA,MAChB;AAAA,IACD;AAKA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,OAAsC;AACjD,UAAM,EAAE,QAAQ,kBAAkB,MAAM,KAAK,IAAI;AAGjD,UAAM,WAAW,SAAS,QAAQ,EAAE;AACpC,QAAI,MAAM,QAAQ,GAAG;AACpB,YAAM,IAAI,MAAM,6BAA6B,MAAM,kCAAkC;AAAA,IACtF;AAGA,UAAM,aAAa;AACnB,UAAM,SAAS,kBACZ,GAAG,UAAU,cACb;AAgCH,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,OAAO,QAAQ;AAAA,MACf;AAAA,MACA;AAAA,IACD;AAGA,QAAI,MAAM;AACT,WAAK,KAAK,UAAU,IAAI;AAAA,IACzB;AAEA,UAAM,MAAM,MAAM,iBAAmC,IAAI;AAGzD,UAAM,SAAmB;AAAA;AAAA,MAExB,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,QAAQ,IAAI;AAAA,MACZ,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA;AAAA,MAGT,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,aAAa,IAAI;AAAA,MACjB,aAAa,IAAI;AAAA;AAAA,MAGjB,GAAI,IAAI,SAAS;AAAA,QAChB,OAAO,IAAI;AAAA,MACZ;AAAA;AAAA,MAGA,GAAI,IAAI,WAAW;AAAA,QAClB,SAAS,IAAI,QAAQ,IAAI,YAAO;AA9QpC;AA8QwC;AAAA,YACnC,KAAK,OAAO;AAAA,YACZ,iBAAiB,OAAO;AAAA,YACxB,UAAQ,YAAO,YAAP,mBAAiB,MACtB;AAAA,cACD,IAAI,OAAO,QAAQ,CAAC,EAAE;AAAA,cACtB,aAAa,OAAO,QAAQ,CAAC,EAAE;AAAA,cAC/B,MAAM,OAAO,QAAQ,CAAC,EAAE;AAAA,cACxB,OAAO,OAAO,QAAQ,CAAC,EAAE;AAAA,YAC1B,IACE;AAAA,UACJ;AAAA,SAAE;AAAA,MACH;AAAA,IACD;AAIA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAcA,UAAM,UAAU,OACb,SAAS,IAAI,oBAAoB,gBAAgB,KACjD,sCAAsC,gBAAgB;AAGzD,UAAM,MAAM,MAAM,iBAAwC;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD,CAAC;AAGD,UAAM,gBAAgB,MAAM,sBAAsB,IAAI,MAAM,QAAQ;AAGpE,WAAO;AAAA,MACN,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM;AAAA,MACN,QAAQ,gBAAgB,IAAI,IAAI;AAAA,MAChC,YAAY,IAAI;AAAA,MAChB,GAAI,IAAI,cAAc,EAAE,YAAY,IAAI,WAAW;AAAA;AAAA,MAEnD,GAAI,IAAI,YAAY,EAAE,UAAU,IAAI,SAAS;AAAA,MAC7C,GAAI,IAAI,aAAa,EAAE,WAAW,IAAI,UAAU;AAAA,IACjD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,MAAM,KAAK,IAAI;AAG/B,UAAM,YAAY,SAAS,QAAQ,EAAE;AACrC,QAAI,MAAM,SAAS,GAAG;AACrB,YAAM,IAAI,MAAM,kBAAkB,IAAI,YAAY,MAAM,+BAA+B;AAAA,IACxF;AAGA,UAAM,SACL,SAAS,UACN,MAAM,mBAAmB,WAAW,IAAI,IACxC,MAAM,gBAAgB,WAAW,IAAI;AAGzC,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAGA,UAAM,SAAS,MAAM,mBAAmB,kBAAkB,IAAI;AAG9D,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,KAAK,IAAI;AAGtC,UAAM,SAAS,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAG9D,UAAM,cAAc,OAAO,OAAO,WAAW,WAC1C,OAAO,SACP,SAAS,OAAO,OAAO,MAAM,GAAG,EAAE;AAErC,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,OAAO;AAAA,MACZ,QAAQ;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAChF,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,KAAK,IAAI;AAIhD,UAAM,eAAe,SAAS,UAAU,EAAE;AAC1C,QAAI,MAAM,YAAY,GAAG;AACxB,YAAM,IAAI,MAAM,uCAAuC,QAAQ,qCAAqC;AAAA,IACrG;AAGA,UAAM,eAAe,MAAM,eAAe,cAAc,IAAI;AAG5D,UAAM,cAAc,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AACnE,UAAM,cAAc,OAAO,YAAY,WAAW,WAC/C,YAAY,SACZ,SAAS,OAAO,YAAY,MAAM,GAAG,EAAE;AAG1C,UAAM,cAAc,MAAM,eAAe,aAAa,IAAI;AAG1D,UAAM,YAAY,cAAc,WAAW;AAE3C,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,YAAY;AAAA,MACjB,QAAQ;AAAA,IACT;AAAA,EACD;AACD;;;ACpdA,SAAS,sBAAsB;AAC/B,SAAS,QAAAA,OAAM,SAAS,UAAU,WAAAC,gBAAe;AAkB1C,IAAM,wBAAN,MAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlC,OAAO,uBAAuB,MAAsB;AACnD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAIA,QAAI,eAAe;AACnB,QAAI,cAAc;AAElB,WAAO,iBAAiB,aAAa;AACpC,qBAAe;AACf,oBAAc,KAAK,kBAAkB,WAAW;AAAA,IACjD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,kBAAkB,MAAsB;AAGtD,UAAM,eAAe;AAErB,WAAO,KAAK,QAAQ,cAAc,CAAC,QAAQ,SAAS,YAAY;AAE/D,YAAM,eAAe,KAAK,UAAU,OAAO;AAI3C,YAAM,eAAe,KAAK,aAAa,OAAO;AAI9C,UAAI,cAAc;AACjB,eAAO,OAAO,YAAY;AAAA;AAAA,EAAO,YAAY;AAAA;AAAA;AAAA,MAC9C,OAAO;AAEN,eAAO,OAAO,YAAY;AAAA;AAAA;AAAA,MAC3B;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,UAAU,MAAsB;AAC9C,WAAO,KACL,KAAK,EACL,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,GAAG,EACpB,QAAQ,UAAU,GAAG,EACrB,QAAQ,WAAW,GAAG,EACtB,QAAQ,UAAU,GAAG;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAe,aAAa,SAAyB;AACpD,QAAI,CAAC,SAAS;AACb,aAAO;AAAA,IACR;AAGA,QAAI,UAAU,QAAQ,KAAK;AAG3B,cAAU,QAAQ,QAAQ,WAAW,MAAM;AAE3C,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,iBAAiB,MAAuB;AAC9C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAEA,UAAM,eAAe;AACrB,WAAO,aAAa,KAAK,IAAI;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OAAO,yBAAyB,MAAsB;AACrD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAMA,UAAM,kBAAkB;AAExB,WAAO,KAAK,QAAQ,iBAAiB,CAAC,QAAQ,UAAU,YAAY;AAGnE,aAAO,QAAQ,KAAK;AAAA,IACrB,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,gBAAgB,MAAsB;AAC5C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAGA,SAAK,cAAc,SAAS,IAAI;AAGhC,QAAI,YAAY;AAIhB,gBAAY,KAAK,yBAAyB,SAAS;AAGnD,gBAAY,KAAK,uBAAuB,SAAS;AAGjD,SAAK,cAAc,UAAU,SAAS;AAEtC,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,cAAc,OAAe,SAAuB;AAClE,UAAM,cAAc,QAAQ,IAAI;AAChC,QAAI,CAAC,aAAa;AACjB;AAAA,IACD;AAEA,QAAI;AACH,YAAM,kBAAkB,KAAK,sBAAsB,WAAW;AAC9D,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY;AAElB,YAAM,WAAW,GAAG,SAAS;AAAA,GAAM,SAAS,gBAAgB,KAAK;AAAA,EAAK,SAAS;AAAA,EAAK,KAAK;AAAA,EAAM,OAAO;AAAA;AAAA;AAEtG,qBAAe,iBAAiB,UAAU,OAAO;AAAA,IAClD,QAAQ;AAAA,IAGR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,sBAAsB,aAA6B;AACjE,UAAM,MAAM,QAAQ,WAAW;AAC/B,UAAM,MAAMA,SAAQ,WAAW;AAC/B,UAAM,OAAO,SAAS,aAAa,GAAG;AAGtC,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,YAAY;AAAA,MACjB,IAAI,YAAY;AAAA,MAChB,OAAO,IAAI,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MAC1C,OAAO,IAAI,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACtC,EAAE,KAAK,EAAE,IAAI,MAAM;AAAA,MAClB,OAAO,IAAI,SAAS,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACtC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACxC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACzC,EAAE,KAAK,EAAE;AAET,WAAOD,MAAK,KAAK,GAAG,IAAI,IAAI,SAAS,GAAG,GAAG,EAAE;AAAA,EAC9C;AACD;;;ACzLO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAMvB;AAAA;AAAA;AAAA;AAAA,SAAQ,gBAAoC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK5C,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,KAAK,IAAI;AAI3C,UAAM,QAAQ,OAAO,MAAM,oBAAoB;AAC/C,QAAI,+BAAQ,IAAI;AACf,WAAK,gBAAgB,MAAM,CAAC,EAAE,YAAY;AAAA,IAC3C;AAGA,UAAM,MAAM,MAAM,iBAAiB,MAAM;AAGzC,UAAM,QAAQ,IAAI,UAAU,IAAI,MAAM,YAAY,EAAE,SAAS,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,UAAU,KACjK,WACA;AAGH,UAAM,SAAsB;AAAA,MAC3B,IAAI,IAAI;AAAA,MACR,OAAO,IAAI;AAAA,MACX,MAAM,IAAI,eAAe;AAAA,MACzB;AAAA,MACA,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV,QAAQ;AAAA;AAAA;AAAA,MAGR,aAAa,IAAI;AAAA,MACjB,WAAW,IAAI;AAAA,MACf,WAAW,IAAI;AAAA,IAChB;AAGA,QAAI,iBAAiB;AACpB,UAAI;AACH,cAAM,WAAW,MAAM,KAAK,mBAAmB,MAAM;AACrD,YAAI,UAAU;AACb,iBAAO,WAAW;AAAA,QACnB;AAAA,MACD,QAAQ;AAAA,MAER;AAAA,IACD;AAGA,WAAO,OAAO,MAAM,sBAAsB,OAAO,MAAM,QAAQ;AAC/D,QAAI,OAAO,UAAU;AACpB,iBAAW,WAAW,OAAO,UAAU;AACtC,gBAAQ,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ;AAAA,MAClE;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,QAAuC;AAClD,UAAM,IAAI,MAAM,6GAA6G;AAAA,EAC9H;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAAmB,YAAsD;AACtF,QAAI;AACH,YAAM,WAAW,MAAM,yBAAyB,UAAU;AAE1D,aAAO,SAAS,IAAI,cAAY;AAAA,QAC/B,IAAI,QAAQ;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA;AAAA,QACR,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH,QAAQ;AACP,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,UAAU,IAAI;AAEtB,UAAM,MAAM,MAAM,iBAAiB,SAAS;AAG5C,UAAM,gBAAgB,MAAM,sBAAsB,IAAI,MAAM,QAAQ;AAEpE,WAAO;AAAA,MACN,IAAI,IAAI;AAAA,MACR,MAAM;AAAA,MACN,QAAQ;AAAA;AAAA,MACR,YAAY,IAAI;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,KAAK,IAAI;AAKzB,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,QAAQ,aAAa;AAE9D,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAG5B,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,WAAW,aAAa;AAEjE,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGzC,UAAM,mBAAmB,WAAW,QAAQ,IAAI,mBAAmB,KAAK;AAExE,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,0KAA0K;AAAA,IAC3L;AAEA,UAAM,SAAS,MAAM,kBAAkB,OAAO,MAAM,kBAAkB,MAAM;AAE5E,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAtNlF;AAuNE,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGnD,UAAM,cAAc,MAAM,iBAAiB,QAAQ;AAGnD,UAAM,QAAQ,SAAS,MAAM,oBAAoB;AACjD,UAAM,mBAAmB,aAAW,oCAAQ,OAAR,mBAAY,kBAAiB,QAAQ,IAAI,mBAAmB,KAAK;AAErG,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,6HAA6H;AAAA,IAC9I;AAGA,UAAM,SAAS,MAAM;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY;AAAA;AAAA,MACZ;AAAA,IACD;AAEA,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AACD;;;ACvOO,IAAM,iCAAN,MAAqC;AAAA;AAAA;AAAA;AAAA,EAI3C,OAAO,OAAO,UAAkD;AAC/D,YAAQ,UAAU;AAAA,MACjB,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C;AACC,cAAM,IAAI,MAAM,0CAA0C,QAAQ,EAAE;AAAA,IACtE;AAAA,EACD;AACD;","names":["join","extname"]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/ClaudeService.ts"],"sourcesContent":["import { detectClaudeCli, launchClaude, launchClaudeInNewTerminalWindow, ClaudeCliOptions } from '../utils/claude.js'\nimport { PromptTemplateManager, TemplateVariables } from './PromptTemplateManager.js'\nimport { SettingsManager, IloomSettings } from './SettingsManager.js'\nimport { logger } from '../utils/logger.js'\n\nexport interface ClaudeWorkflowOptions {\n\ttype: 'issue' | 'pr' | 'regular'\n\tissueNumber?: string | number\n\tprNumber?: number\n\ttitle?: string\n\tworkspacePath: string\n\tport?: number\n\theadless?: boolean\n\tbranchName?: string\n\toneShot?: import('../types/index.js').OneShotMode\n\tsetArguments?: string[] // Raw --set arguments to forward\n\texecutablePath?: string // Executable path to use for spin command\n}\n\nexport class ClaudeService {\n\tprivate templateManager: PromptTemplateManager\n\tprivate settingsManager: SettingsManager\n\tprivate settings?: IloomSettings\n\n\tconstructor(templateManager?: PromptTemplateManager, settingsManager?: SettingsManager) {\n\t\tthis.templateManager = templateManager ?? new PromptTemplateManager()\n\t\tthis.settingsManager = settingsManager ?? new SettingsManager()\n\t}\n\n\t/**\n\t * Check if Claude CLI is available\n\t */\n\tasync isAvailable(): Promise<boolean> {\n\t\treturn detectClaudeCli()\n\t}\n\n\t/**\n\t * Get the appropriate permission mode for a workflow type\n\t */\n\tprivate getPermissionModeForWorkflow(\n\t\ttype: 'issue' | 'pr' | 'regular'\n\t): ClaudeCliOptions['permissionMode'] {\n\t\t// Check settings for configured permission mode\n\t\tif (this.settings?.workflows) {\n\t\t\tconst workflowConfig =\n\t\t\t\ttype === 'issue'\n\t\t\t\t\t? this.settings.workflows.issue\n\t\t\t\t\t: type === 'pr'\n\t\t\t\t\t\t? this.settings.workflows.pr\n\t\t\t\t\t\t: this.settings.workflows.regular\n\n\t\t\tif (workflowConfig?.permissionMode) {\n\t\t\t\treturn workflowConfig.permissionMode\n\t\t\t}\n\t\t}\n\n\t\t// Fall back to current defaults\n\t\tif (type === 'issue') {\n\t\t\treturn 'acceptEdits'\n\t\t}\n\t\t// For PR and regular workflows, use default permissions\n\t\treturn 'default'\n\t}\n\n\t/**\n\t * Launch Claude for a specific workflow\n\t */\n\tasync launchForWorkflow(options: ClaudeWorkflowOptions): Promise<string | void> {\n\t\tconst { type, issueNumber, prNumber, title, workspacePath, port, headless = false, branchName, oneShot = 'default', setArguments, executablePath } = options\n\n\t\ttry {\n\t\t\t// Load settings if not already cached\n\t\t\t// Settings are pre-validated at CLI startup, so no error handling needed here\n\t\t\tthis.settings ??= await this.settingsManager.loadSettings()\n\n\t\t\t// Build template variables\n\t\t\tconst variables: TemplateVariables = {\n\t\t\t\tWORKSPACE_PATH: workspacePath,\n\t\t\t}\n\n\t\t\tif (issueNumber !== undefined) {\n\t\t\t\tvariables.ISSUE_NUMBER = issueNumber\n\t\t\t}\n\n\t\t\tif (prNumber !== undefined) {\n\t\t\t\tvariables.PR_NUMBER = prNumber\n\t\t\t}\n\n\t\t\tif (title !== undefined) {\n\t\t\t\tif (type === 'issue') {\n\t\t\t\t\tvariables.ISSUE_TITLE = title\n\t\t\t\t} else if (type === 'pr') {\n\t\t\t\t\tvariables.PR_TITLE = title\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (port !== undefined) {\n\t\t\t\tvariables.PORT = port\n\t\t\t}\n\n\t\t\t// Get the prompt from template manager\n\t\t\tconst prompt = await this.templateManager.getPrompt(type, variables)\n\n\t\t\t// Determine permission mode (model uses Claude's default for start command)\n\t\t\tconst permissionMode = this.getPermissionModeForWorkflow(type)\n\n\t\t\t// Display warning if bypassPermissions mode is used\n\t\t\tif (permissionMode === 'bypassPermissions') {\n\t\t\t\tlogger.warn(\n\t\t\t\t\t'⚠️ WARNING: Using bypassPermissions mode - Claude will execute all tool calls without confirmation. ' +\n\t\t\t\t\t\t'This can be dangerous. Use with caution.'\n\t\t\t\t)\n\t\t\t}\n\n\t\t\t// Build Claude CLI options\n\t\t\tconst claudeOptions: ClaudeCliOptions = {\n\t\t\t\taddDir: workspacePath,\n\t\t\t\theadless,\n\t\t\t}\n\n\t\t\t// Add permission mode if not default\n\t\t\tif (permissionMode !== undefined && permissionMode !== 'default') {\n\t\t\t\tclaudeOptions.permissionMode = permissionMode\n\t\t\t}\n\n\t\t\t// Add optional branch name for terminal coloring\n\t\t\tif (branchName !== undefined) {\n\t\t\t\tclaudeOptions.branchName = branchName\n\t\t\t}\n\n\t\t\t// Add optional port for terminal window export\n\t\t\tif (port !== undefined) {\n\t\t\t\tclaudeOptions.port = port\n\t\t\t}\n\n\t\t\t// Add optional setArguments for forwarding\n\t\t\tif (setArguments !== undefined) {\n\t\t\t\tclaudeOptions.setArguments = setArguments\n\t\t\t}\n\n\t\t\t// Add optional executablePath for spin command\n\t\t\tif (executablePath !== undefined) {\n\t\t\t\tclaudeOptions.executablePath = executablePath\n\t\t\t}\n\n\t\t\tlogger.debug('Launching Claude for workflow', {\n\t\t\t\ttype,\n\t\t\t\tpermissionMode,\n\t\t\t\theadless,\n\t\t\t\tworkspacePath,\n\t\t\t})\n\n\t\t\t// Launch Claude\n\t\t\tif (headless) {\n\t\t\t\t// Headless mode: use simple launchClaude\n\t\t\t\treturn await launchClaude(prompt, claudeOptions)\n\t\t\t} else {\n\t\t\t\t// Interactive workflow mode: use terminal window launcher\n\t\t\t\t// This is the \"end of il start\" behavior\n\t\t\t\tif (!claudeOptions.addDir) {\n\t\t\t\t\tthrow new Error('workspacePath required for interactive workflow launch')\n\t\t\t\t}\n\n\t\t\t\treturn await launchClaudeInNewTerminalWindow(prompt, {\n\t\t\t\t\t...claudeOptions,\n\t\t\t\t\tworkspacePath: claudeOptions.addDir,\n\t\t\t\t\toneShot,\n\t\t\t\t})\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tlogger.error('Failed to launch Claude for workflow', { error, options })\n\t\t\tthrow error\n\t\t}\n\t}\n\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAmBO,IAAM,gBAAN,MAAoB;AAAA,EAK1B,YAAY,iBAAyC,iBAAmC;AACvF,SAAK,kBAAkB,mBAAmB,IAAI,sBAAsB;AACpE,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAgC;AACrC,WAAO,gBAAgB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,6BACP,MACqC;AAzCvC;AA2CE,SAAI,UAAK,aAAL,mBAAe,WAAW;AAC7B,YAAM,iBACL,SAAS,UACN,KAAK,SAAS,UAAU,QACxB,SAAS,OACR,KAAK,SAAS,UAAU,KACxB,KAAK,SAAS,UAAU;AAE7B,UAAI,iDAAgB,gBAAgB;AACnC,eAAO,eAAe;AAAA,MACvB;AAAA,IACD;AAGA,QAAI,SAAS,SAAS;AACrB,aAAO;AAAA,IACR;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,kBAAkB,SAAwD;AAC/E,UAAM,EAAE,MAAM,aAAa,UAAU,OAAO,eAAe,MAAM,WAAW,OAAO,YAAY,UAAU,WAAW,cAAc,eAAe,IAAI;AAErJ,QAAI;AAGH,WAAK,aAAa,MAAM,KAAK,gBAAgB,aAAa;AAG1D,YAAM,YAA+B;AAAA,QACpC,gBAAgB;AAAA,MACjB;AAEA,UAAI,gBAAgB,QAAW;AAC9B,kBAAU,eAAe;AAAA,MAC1B;AAEA,UAAI,aAAa,QAAW;AAC3B,kBAAU,YAAY;AAAA,MACvB;AAEA,UAAI,UAAU,QAAW;AACxB,YAAI,SAAS,SAAS;AACrB,oBAAU,cAAc;AAAA,QACzB,WAAW,SAAS,MAAM;AACzB,oBAAU,WAAW;AAAA,QACtB;AAAA,MACD;AAEA,UAAI,SAAS,QAAW;AACvB,kBAAU,OAAO;AAAA,MAClB;AAGA,YAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,MAAM,SAAS;AAGnE,YAAM,iBAAiB,KAAK,6BAA6B,IAAI;AAG7D,UAAI,mBAAmB,qBAAqB;AAC3C,eAAO;AAAA,UACN;AAAA,QAED;AAAA,MACD;AAGA,YAAM,gBAAkC;AAAA,QACvC,QAAQ;AAAA,QACR;AAAA,MACD;AAGA,UAAI,mBAAmB,UAAa,mBAAmB,WAAW;AACjE,sBAAc,iBAAiB;AAAA,MAChC;AAGA,UAAI,eAAe,QAAW;AAC7B,sBAAc,aAAa;AAAA,MAC5B;AAGA,UAAI,SAAS,QAAW;AACvB,sBAAc,OAAO;AAAA,MACtB;AAGA,UAAI,iBAAiB,QAAW;AAC/B,sBAAc,eAAe;AAAA,MAC9B;AAGA,UAAI,mBAAmB,QAAW;AACjC,sBAAc,iBAAiB;AAAA,MAChC;AAEA,aAAO,MAAM,iCAAiC;AAAA,QAC7C;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACD,CAAC;AAGD,UAAI,UAAU;AAEb,eAAO,MAAM,aAAa,QAAQ,aAAa;AAAA,MAChD,OAAO;AAGN,YAAI,CAAC,cAAc,QAAQ;AAC1B,gBAAM,IAAI,MAAM,wDAAwD;AAAA,QACzE;AAEA,eAAO,MAAM,gCAAgC,QAAQ;AAAA,UACpD,GAAG;AAAA,UACH,eAAe,cAAc;AAAA,UAC7B;AAAA,QACD,CAAC;AAAA,MACF;AAAA,IACD,SAAS,OAAO;AACf,aAAO,MAAM,wCAAwC,EAAE,OAAO,QAAQ,CAAC;AACvE,YAAM;AAAA,IACP;AAAA,EACD;AAED;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/PromptTemplateManager.ts"],"sourcesContent":["import { readFile } from 'fs/promises'\nimport { accessSync } from 'fs'\nimport path from 'path'\nimport { fileURLToPath } from 'url'\nimport Handlebars from 'handlebars'\nimport { logger } from '../utils/logger.js'\n\n// Register raw helper to handle content with curly braces (e.g., JSON)\n// Usage: {{{{raw}}}}{{VARIABLE}}{{{{/raw}}}}\n// This outputs the variable content as-is without Handlebars parsing its curly braces\nHandlebars.registerHelper('raw', function (this: unknown, options: Handlebars.HelperOptions) {\n\treturn options.fn(this)\n})\n\nexport interface TemplateVariables {\n\tISSUE_NUMBER?: string | number\n\tPR_NUMBER?: number\n\tISSUE_TITLE?: string\n\tPR_TITLE?: string\n\tWORKSPACE_PATH?: string\n\tPORT?: number\n\tONE_SHOT_MODE?: boolean\n\tINTERACTIVE_MODE?: boolean\n\tSETTINGS_SCHEMA?: string\n\tSETTINGS_GLOBAL_JSON?: string\n\tSETTINGS_JSON?: string\n\tSETTINGS_LOCAL_JSON?: string\n\tSHELL_TYPE?: string\n\tSHELL_CONFIG_PATH?: string\n\tSHELL_CONFIG_CONTENT?: string\n\tREMOTES_INFO?: string\n\tMULTIPLE_REMOTES?: string\n\tSINGLE_REMOTE?: string\n\tSINGLE_REMOTE_NAME?: string\n\tSINGLE_REMOTE_URL?: string\n\tNO_REMOTES?: string\n\tREADME_CONTENT?: string\n\tSETTINGS_SCHEMA_CONTENT?: string\n\tFIRST_TIME_USER?: boolean\n\tVSCODE_SETTINGS_GITIGNORED?: string\n\t// Session summary template variables\n\tSESSION_CONTEXT?: string // Session ID for Claude to reference its conversation\n\tBRANCH_NAME?: string // Branch being finished\n\tLOOM_TYPE?: string // 'issue' or 'pr'\n\tCOMPACT_SUMMARIES?: string // Extracted compact summaries from session transcript\n\tRECAP_DATA?: string // Formatted recap data (goal, complexity, entries, artifacts)\n\t// Draft PR mode variables - mutually exclusive with standard issue mode\n\tDRAFT_PR_NUMBER?: number // PR number for draft PR workflow\n\tDRAFT_PR_MODE?: boolean // True when using github-draft-pr merge mode\n\tSTANDARD_ISSUE_MODE?: boolean // True when using standard issue commenting (not draft PR)\n\t// Multi-language support variables - mutually exclusive\n\tHAS_PACKAGE_JSON?: boolean // True when project has package.json\n\tNO_PACKAGE_JSON?: boolean // True when project does not have package.json (non-Node.js projects)\n}\n\nexport class PromptTemplateManager {\n\tprivate templateDir: string\n\n\tconstructor(templateDir?: string) {\n\t\tif (templateDir) {\n\t\t\tthis.templateDir = templateDir\n\t\t} else {\n\t\t\t// Find templates relative to the package installation\n\t\t\t// When running from dist/, templates are copied to dist/prompts/\n\t\t\tconst currentFileUrl = import.meta.url\n\t\t\tconst currentFilePath = fileURLToPath(currentFileUrl)\n\t\t\tconst distDir = path.dirname(currentFilePath) // dist directory (may be chunked file location)\n\n\t\t\t// Walk up to find the dist directory (in case of chunked files)\n\t\t\tlet templateDir = path.join(distDir, 'prompts')\n\t\t\tlet currentDir = distDir\n\n\t\t\t// Try to find the prompts directory by walking up\n\t\t\twhile (currentDir !== path.dirname(currentDir)) {\n\t\t\t\tconst candidatePath = path.join(currentDir, 'prompts')\n\t\t\t\ttry {\n\t\t\t\t\t// Check if this directory exists (sync check for constructor)\n\t\t\t\t\taccessSync(candidatePath)\n\t\t\t\t\ttemplateDir = candidatePath\n\t\t\t\t\tbreak\n\t\t\t\t} catch {\n\t\t\t\t\tcurrentDir = path.dirname(currentDir)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tthis.templateDir = templateDir\n\t\t\tlogger.debug('PromptTemplateManager initialized', {\n\t\t\t\tcurrentFilePath,\n\t\t\t\tdistDir,\n\t\t\t\ttemplateDir: this.templateDir\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Load a template file by name\n\t */\n\tasync loadTemplate(templateName: 'issue' | 'pr' | 'regular' | 'init' | 'session-summary'): Promise<string> {\n\t\tconst templatePath = path.join(this.templateDir, `${templateName}-prompt.txt`)\n\n\t\tlogger.debug('Loading template', {\n\t\t\ttemplateName,\n\t\t\ttemplateDir: this.templateDir,\n\t\t\ttemplatePath\n\t\t})\n\n\t\ttry {\n\t\t\treturn await readFile(templatePath, 'utf-8')\n\t\t} catch (error) {\n\t\t\tlogger.error('Failed to load template', { templateName, templatePath, error })\n\t\t\tthrow new Error(`Template not found: ${templatePath}`)\n\t\t}\n\t}\n\n\t/**\n\t * Substitute variables in a template string using Handlebars\n\t */\n\tsubstituteVariables(template: string, variables: TemplateVariables): string {\n\t\tconst compiled = Handlebars.compile(template, { noEscape: true })\n\t\treturn compiled(variables)\n\t}\n\n\t/**\n\t * Get a fully processed prompt for a workflow type\n\t */\n\tasync getPrompt(\n\t\ttype: 'issue' | 'pr' | 'regular' | 'init' | 'session-summary',\n\t\tvariables: TemplateVariables\n\t): Promise<string> {\n\t\tconst template = await this.loadTemplate(type)\n\t\treturn this.substituteVariables(template, variables)\n\t}\n}\n"],"mappings":";;;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,kBAAkB;AAC3B,OAAO,UAAU;AACjB,SAAS,qBAAqB;AAC9B,OAAO,gBAAgB;AAMvB,WAAW,eAAe,OAAO,SAAyB,SAAmC;AAC5F,SAAO,QAAQ,GAAG,IAAI;AACvB,CAAC;AA2CM,IAAM,wBAAN,MAA4B;AAAA,EAGlC,YAAY,aAAsB;AACjC,QAAI,aAAa;AAChB,WAAK,cAAc;AAAA,IACpB,OAAO;AAGN,YAAM,iBAAiB,YAAY;AACnC,YAAM,kBAAkB,cAAc,cAAc;AACpD,YAAM,UAAU,KAAK,QAAQ,eAAe;AAG5C,UAAIA,eAAc,KAAK,KAAK,SAAS,SAAS;AAC9C,UAAI,aAAa;AAGjB,aAAO,eAAe,KAAK,QAAQ,UAAU,GAAG;AAC/C,cAAM,gBAAgB,KAAK,KAAK,YAAY,SAAS;AACrD,YAAI;AAEH,qBAAW,aAAa;AACxB,UAAAA,eAAc;AACd;AAAA,QACD,QAAQ;AACP,uBAAa,KAAK,QAAQ,UAAU;AAAA,QACrC;AAAA,MACD;AAEA,WAAK,cAAcA;AACnB,aAAO,MAAM,qCAAqC;AAAA,QACjD;AAAA,QACA;AAAA,QACA,aAAa,KAAK;AAAA,MACnB,CAAC;AAAA,IACF;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,cAAwF;AAC1G,UAAM,eAAe,KAAK,KAAK,KAAK,aAAa,GAAG,YAAY,aAAa;AAE7E,WAAO,MAAM,oBAAoB;AAAA,MAChC;AAAA,MACA,aAAa,KAAK;AAAA,MAClB;AAAA,IACD,CAAC;AAED,QAAI;AACH,aAAO,MAAM,SAAS,cAAc,OAAO;AAAA,IAC5C,SAAS,OAAO;AACf,aAAO,MAAM,2BAA2B,EAAE,cAAc,cAAc,MAAM,CAAC;AAC7E,YAAM,IAAI,MAAM,uBAAuB,YAAY,EAAE;AAAA,IACtD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAoB,UAAkB,WAAsC;AAC3E,UAAM,WAAW,WAAW,QAAQ,UAAU,EAAE,UAAU,KAAK,CAAC;AAChE,WAAO,SAAS,SAAS;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UACL,MACA,WACkB;AAClB,UAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,WAAO,KAAK,oBAAoB,UAAU,SAAS;AAAA,EACpD;AACD;","names":["templateDir"]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/MetadataManager.ts"],"sourcesContent":["import path from 'path'\nimport os from 'os'\nimport fs from 'fs-extra'\nimport { getLogger } from '../utils/logger-context.js'\nimport type { ProjectCapability } from '../types/loom.js'\n\n/**\n * Schema for metadata JSON file\n * Stored in ~/.config/iloom-ai/looms/\n */\nexport interface MetadataFile {\n description: string\n created_at?: string\n version: number\n // Additional metadata fields (v2)\n branchName?: string\n worktreePath?: string\n issueType?: 'branch' | 'issue' | 'pr'\n issue_numbers?: string[]\n pr_numbers?: string[]\n issueTracker?: string\n colorHex?: string // Stored hex color (e.g., \"#dcebff\") - robust against palette changes\n sessionId?: string // Claude Code session ID for resume support\n projectPath?: string // Main worktree path (project root) - enables project identification\n issueUrls?: Record<string, string> // Map of issue ID to URL in the issue tracker\n prUrls?: Record<string, string> // Map of PR number to URL in the issue tracker\n draftPrNumber?: number // Draft PR number if github-draft-pr mode was used\n capabilities?: ProjectCapability[] // Detected project capabilities\n parentLoom?: {\n type: 'issue' | 'pr' | 'branch'\n identifier: string | number\n branchName: string\n worktreePath: string\n databaseBranch?: string\n }\n}\n\n/**\n * Input for writing metadata (all fields except version and created_at)\n * Note: issueTracker is required because every loom should have an associated\n * issue tracker provider (defaults to 'github' via IssueTrackerFactory)\n */\nexport interface WriteMetadataInput {\n description: string\n branchName: string\n worktreePath: string\n issueType: 'branch' | 'issue' | 'pr'\n issue_numbers: string[]\n pr_numbers: string[]\n issueTracker: string\n colorHex: string // Hex color (e.g., \"#dcebff\") - robust against palette changes\n sessionId: string // Claude Code session ID for resume support (required for new looms)\n projectPath: string // Main worktree path (project root) - required for new looms\n issueUrls: Record<string, string> // Map of issue ID to URL in the issue tracker\n prUrls: Record<string, string> // Map of PR number to URL in the issue tracker\n draftPrNumber?: number // Draft PR number for github-draft-pr mode\n capabilities: ProjectCapability[] // Detected project capabilities (required for new looms)\n parentLoom?: {\n type: 'issue' | 'pr' | 'branch'\n identifier: string | number\n branchName: string\n worktreePath: string\n databaseBranch?: string\n }\n}\n\n/**\n * Result of reading metadata for a worktree\n */\nexport interface LoomMetadata {\n status?: 'active' | 'finished'\n finishedAt?: string | null\n description: string\n created_at: string | null\n branchName: string | null\n worktreePath: string | null\n issueType: 'branch' | 'issue' | 'pr' | null\n issue_numbers: string[]\n pr_numbers: string[]\n issueTracker: string | null\n colorHex: string | null // Hex color (e.g., \"#dcebff\") - robust against palette changes\n sessionId: string | null // Claude Code session ID (null for legacy looms)\n projectPath: string | null // Main worktree path (null for legacy looms)\n issueUrls: Record<string, string> // Map of issue ID to URL ({} for legacy looms)\n prUrls: Record<string, string> // Map of PR number to URL ({} for legacy looms)\n draftPrNumber: number | null // Draft PR number (null if not draft mode)\n capabilities: ProjectCapability[] // Detected project capabilities (empty for legacy looms)\n parentLoom: {\n type: 'issue' | 'pr' | 'branch'\n identifier: string | number\n branchName: string\n worktreePath: string\n databaseBranch?: string\n } | null\n}\n\n/**\n * MetadataManager: Manage loom metadata persistence\n *\n * Stores loom metadata in ~/.config/iloom-ai/looms/ directory.\n * Each worktree gets a JSON file named by slugifying its absolute path.\n *\n * Per spec section 2.2:\n * - Filename derived from worktree absolute path\n * - Path separators replaced with double underscores\n * - Non-alphanumeric chars (except _ and -) replaced with hyphens\n */\nexport class MetadataManager {\n private readonly loomsDir: string\n private readonly finishedDir: string\n\n constructor() {\n this.loomsDir = path.join(os.homedir(), '.config', 'iloom-ai', 'looms')\n this.finishedDir = path.join(this.loomsDir, 'finished')\n }\n\n /**\n * Convert MetadataFile to LoomMetadata with default values for optional fields\n */\n private toMetadata(data: MetadataFile): LoomMetadata {\n return {\n description: data.description,\n created_at: data.created_at ?? null,\n branchName: data.branchName ?? null,\n worktreePath: data.worktreePath ?? null,\n issueType: data.issueType ?? null,\n issue_numbers: data.issue_numbers ?? [],\n pr_numbers: data.pr_numbers ?? [],\n issueTracker: data.issueTracker ?? null,\n colorHex: data.colorHex ?? null,\n sessionId: data.sessionId ?? null,\n projectPath: data.projectPath ?? null,\n issueUrls: data.issueUrls ?? {},\n prUrls: data.prUrls ?? {},\n draftPrNumber: data.draftPrNumber ?? null,\n capabilities: data.capabilities ?? [],\n parentLoom: data.parentLoom ?? null,\n }\n }\n\n /**\n * Convert worktree path to filename slug per spec section 2.2\n *\n * Algorithm:\n * 1. Trim trailing slashes\n * 2. Replace all path separators (/ or \\) with __ (double underscore)\n * 3. Replace any other non-alphanumeric characters (except _ and -) with -\n * 4. Append .json\n *\n * Example:\n * - Worktree: /Users/jane/dev/repo\n * - Filename: _Users__jane__dev__repo.json\n */\n slugifyPath(worktreePath: string): string {\n // 1. Trim trailing slashes\n let slug = worktreePath.replace(/[/\\\\]+$/, '')\n\n // 2. Replace path separators with double underscores\n slug = slug.replace(/[/\\\\]/g, '___')\n\n // 3. Replace non-alphanumeric chars (except _ and -) with hyphens\n slug = slug.replace(/[^a-zA-Z0-9_-]/g, '-')\n\n // 4. Append .json\n return `${slug}.json`\n }\n\n /**\n * Get the full path to the metadata file for a worktree\n */\n private getFilePath(worktreePath: string): string {\n const filename = this.slugifyPath(worktreePath)\n return path.join(this.loomsDir, filename)\n }\n\n /**\n * Get the full path to the metadata file for a worktree (public API)\n * Used by other services that need to reference the metadata file location\n * (e.g., MCP servers that need to read loom context)\n */\n getMetadataFilePath(worktreePath: string): string {\n return this.getFilePath(worktreePath)\n }\n\n /**\n * Write metadata for a worktree (spec section 3.1)\n *\n * @param worktreePath - Absolute path to the worktree (used for file naming)\n * @param input - Metadata to write (description plus additional fields)\n */\n async writeMetadata(worktreePath: string, input: WriteMetadataInput): Promise<void> {\n try {\n // 1. Ensure looms directory exists\n await fs.ensureDir(this.loomsDir, { mode: 0o755 })\n\n // 2. Create JSON content\n const content: MetadataFile = {\n description: input.description,\n created_at: new Date().toISOString(),\n version: 1,\n branchName: input.branchName,\n worktreePath: input.worktreePath,\n issueType: input.issueType,\n issue_numbers: input.issue_numbers,\n pr_numbers: input.pr_numbers,\n issueTracker: input.issueTracker,\n colorHex: input.colorHex,\n sessionId: input.sessionId,\n projectPath: input.projectPath,\n issueUrls: input.issueUrls,\n prUrls: input.prUrls,\n capabilities: input.capabilities,\n ...(input.draftPrNumber && { draftPrNumber: input.draftPrNumber }),\n ...(input.parentLoom && { parentLoom: input.parentLoom }),\n }\n\n // 3. Write to slugified filename\n const filePath = this.getFilePath(worktreePath)\n await fs.writeFile(filePath, JSON.stringify(content, null, 2), { mode: 0o644 })\n\n getLogger().debug(`Metadata written for worktree: ${worktreePath}`)\n } catch (error) {\n // Log warning but don't throw - metadata is supplementary\n getLogger().warn(\n `Failed to write metadata for worktree: ${error instanceof Error ? error.message : String(error)}`\n )\n }\n }\n\n /**\n * Read metadata for a worktree (spec section 3.2)\n *\n * @param worktreePath - Absolute path to the worktree\n * @returns The metadata object with all fields, or null if not found/invalid\n */\n async readMetadata(worktreePath: string): Promise<LoomMetadata | null> {\n try {\n const filePath = this.getFilePath(worktreePath)\n\n // Check if file exists\n if (!(await fs.pathExists(filePath))) {\n return null\n }\n\n // Read and parse JSON\n const content = await fs.readFile(filePath, 'utf8')\n const data: MetadataFile = JSON.parse(content)\n\n if (!data.description) {\n return null\n }\n\n return this.toMetadata(data)\n } catch (error) {\n // Return null on any error (graceful degradation per spec)\n getLogger().debug(\n `Could not read metadata for worktree ${worktreePath}: ${error instanceof Error ? error.message : String(error)}`\n )\n return null\n }\n }\n\n /**\n * List all stored loom metadata files\n *\n * Returns an array of LoomMetadata objects for all valid metadata files\n * in the looms directory. Invalid or unreadable files are skipped.\n *\n * @returns Array of LoomMetadata objects from all stored files\n */\n async listAllMetadata(): Promise<LoomMetadata[]> {\n const results: LoomMetadata[] = []\n\n try {\n // Check if looms directory exists\n if (!(await fs.pathExists(this.loomsDir))) {\n return results\n }\n\n // Read all files in looms directory\n const files = await fs.readdir(this.loomsDir)\n\n // Filter to only .json files and read each\n for (const file of files) {\n if (!file.endsWith('.json')) {\n continue\n }\n\n try {\n const filePath = path.join(this.loomsDir, file)\n const content = await fs.readFile(filePath, 'utf8')\n const data: MetadataFile = JSON.parse(content)\n\n // Skip files without required description field\n if (!data.description) {\n continue\n }\n\n results.push(this.toMetadata(data))\n } catch (error) {\n // Skip individual files that fail to parse (graceful degradation)\n getLogger().debug(\n `Skipping metadata file ${file}: ${error instanceof Error ? error.message : String(error)}`\n )\n }\n }\n } catch (error) {\n // Log error but return empty array (graceful degradation)\n getLogger().debug(\n `Could not list metadata files: ${error instanceof Error ? error.message : String(error)}`\n )\n }\n\n return results\n }\n\n /**\n * Delete metadata for a worktree (spec section 3.3)\n *\n * Idempotent: silently succeeds if file doesn't exist\n * Non-fatal: logs warning on permission errors but doesn't throw\n *\n * @param worktreePath - Absolute path to the worktree\n */\n async deleteMetadata(worktreePath: string): Promise<void> {\n try {\n const filePath = this.getFilePath(worktreePath)\n\n // Check if file exists - silently return if not\n if (!(await fs.pathExists(filePath))) {\n getLogger().debug(`No metadata file to delete for worktree: ${worktreePath}`)\n return\n }\n\n // Delete the file\n await fs.unlink(filePath)\n getLogger().debug(`Metadata deleted for worktree: ${worktreePath}`)\n } catch (error) {\n // Log warning on permission error but don't throw (per spec section 3.3)\n getLogger().warn(\n `Failed to delete metadata for worktree: ${error instanceof Error ? error.message : String(error)}`\n )\n }\n }\n\n /**\n * Archive metadata for a finished worktree\n *\n * Moves the metadata file to the finished/ subdirectory and adds\n * status: 'finished' and finishedAt timestamp fields.\n *\n * Idempotent: silently succeeds if source file doesn't exist\n * Non-fatal: logs warning on errors but doesn't throw\n *\n * @param worktreePath - Absolute path to the worktree\n */\n async archiveMetadata(worktreePath: string): Promise<void> {\n try {\n const filename = this.slugifyPath(worktreePath)\n const sourcePath = path.join(this.loomsDir, filename)\n\n // Check if source file exists - silently return if not (idempotent)\n if (!(await fs.pathExists(sourcePath))) {\n getLogger().debug(`No metadata file to archive for worktree: ${worktreePath}`)\n return\n }\n\n // Read existing metadata\n const content = await fs.readFile(sourcePath, 'utf8')\n const data: MetadataFile = JSON.parse(content)\n\n // Add finished status and timestamp\n const finishedData = {\n ...data,\n status: 'finished' as const,\n finishedAt: new Date().toISOString(),\n }\n\n // Ensure finished directory exists\n await fs.ensureDir(this.finishedDir, { mode: 0o755 })\n\n // Write to finished subdirectory\n const destPath = path.join(this.finishedDir, filename)\n await fs.writeFile(destPath, JSON.stringify(finishedData, null, 2), { mode: 0o644 })\n\n // Delete original file\n await fs.unlink(sourcePath)\n\n getLogger().debug(`Metadata archived for worktree: ${worktreePath}`)\n } catch (error) {\n // Log warning but don't throw - archiving is supplementary\n getLogger().warn(\n `Failed to archive metadata for worktree: ${error instanceof Error ? error.message : String(error)}`\n )\n }\n }\n\n /**\n * List all finished loom metadata files\n *\n * Returns an array of LoomMetadata objects for all finished looms\n * in the finished/ subdirectory, sorted by finishedAt in descending order\n * (most recently finished first).\n *\n * @returns Array of LoomMetadata objects from finished files, sorted by finishedAt desc\n */\n async listFinishedMetadata(): Promise<LoomMetadata[]> {\n const results: LoomMetadata[] = []\n\n try {\n // Check if finished directory exists\n if (!(await fs.pathExists(this.finishedDir))) {\n return results\n }\n\n // Read all files in finished directory\n const files = await fs.readdir(this.finishedDir)\n\n // Filter to only .json files and read each\n for (const file of files) {\n if (!file.endsWith('.json')) {\n continue\n }\n\n try {\n const filePath = path.join(this.finishedDir, file)\n const content = await fs.readFile(filePath, 'utf8')\n const data = JSON.parse(content) as MetadataFile & { status?: string; finishedAt?: string }\n\n // Skip files without required description field\n if (!data.description) {\n continue\n }\n\n const metadata = this.toMetadata(data)\n // Add finished-specific fields\n metadata.status = (data.status as 'active' | 'finished') ?? 'finished'\n metadata.finishedAt = data.finishedAt ?? null\n\n results.push(metadata)\n } catch (error) {\n // Skip individual files that fail to parse (graceful degradation)\n getLogger().warn(\n `Skipping finished metadata file ${file}: ${error instanceof Error ? error.message : String(error)}`\n )\n }\n }\n\n // Sort by finishedAt descending (most recently finished first)\n results.sort((a, b) => {\n const aTime = a.finishedAt ? new Date(a.finishedAt).getTime() : 0\n const bTime = b.finishedAt ? new Date(b.finishedAt).getTime() : 0\n return bTime - aTime\n })\n } catch (error) {\n // Log error but return empty array (graceful degradation)\n getLogger().warn(\n `Could not list finished metadata files: ${error instanceof Error ? error.message : String(error)}`\n )\n }\n\n return results\n }\n}\n"],"mappings":";;;;;;AAAA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,OAAO,QAAQ;AAyGR,IAAM,kBAAN,MAAsB;AAAA,EAI3B,cAAc;AACZ,SAAK,WAAW,KAAK,KAAK,GAAG,QAAQ,GAAG,WAAW,YAAY,OAAO;AACtE,SAAK,cAAc,KAAK,KAAK,KAAK,UAAU,UAAU;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,MAAkC;AACnD,WAAO;AAAA,MACL,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK,cAAc;AAAA,MAC/B,YAAY,KAAK,cAAc;AAAA,MAC/B,cAAc,KAAK,gBAAgB;AAAA,MACnC,WAAW,KAAK,aAAa;AAAA,MAC7B,eAAe,KAAK,iBAAiB,CAAC;AAAA,MACtC,YAAY,KAAK,cAAc,CAAC;AAAA,MAChC,cAAc,KAAK,gBAAgB;AAAA,MACnC,UAAU,KAAK,YAAY;AAAA,MAC3B,WAAW,KAAK,aAAa;AAAA,MAC7B,aAAa,KAAK,eAAe;AAAA,MACjC,WAAW,KAAK,aAAa,CAAC;AAAA,MAC9B,QAAQ,KAAK,UAAU,CAAC;AAAA,MACxB,eAAe,KAAK,iBAAiB;AAAA,MACrC,cAAc,KAAK,gBAAgB,CAAC;AAAA,MACpC,YAAY,KAAK,cAAc;AAAA,IACjC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,YAAY,cAA8B;AAExC,QAAI,OAAO,aAAa,QAAQ,WAAW,EAAE;AAG7C,WAAO,KAAK,QAAQ,UAAU,KAAK;AAGnC,WAAO,KAAK,QAAQ,mBAAmB,GAAG;AAG1C,WAAO,GAAG,IAAI;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,cAA8B;AAChD,UAAM,WAAW,KAAK,YAAY,YAAY;AAC9C,WAAO,KAAK,KAAK,KAAK,UAAU,QAAQ;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,oBAAoB,cAA8B;AAChD,WAAO,KAAK,YAAY,YAAY;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,cAAc,cAAsB,OAA0C;AAClF,QAAI;AAEF,YAAM,GAAG,UAAU,KAAK,UAAU,EAAE,MAAM,IAAM,CAAC;AAGjD,YAAM,UAAwB;AAAA,QAC5B,aAAa,MAAM;AAAA,QACnB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,YAAY,MAAM;AAAA,QAClB,cAAc,MAAM;AAAA,QACpB,WAAW,MAAM;AAAA,QACjB,eAAe,MAAM;AAAA,QACrB,YAAY,MAAM;AAAA,QAClB,cAAc,MAAM;AAAA,QACpB,UAAU,MAAM;AAAA,QAChB,WAAW,MAAM;AAAA,QACjB,aAAa,MAAM;AAAA,QACnB,WAAW,MAAM;AAAA,QACjB,QAAQ,MAAM;AAAA,QACd,cAAc,MAAM;AAAA,QACpB,GAAI,MAAM,iBAAiB,EAAE,eAAe,MAAM,cAAc;AAAA,QAChE,GAAI,MAAM,cAAc,EAAE,YAAY,MAAM,WAAW;AAAA,MACzD;AAGA,YAAM,WAAW,KAAK,YAAY,YAAY;AAC9C,YAAM,GAAG,UAAU,UAAU,KAAK,UAAU,SAAS,MAAM,CAAC,GAAG,EAAE,MAAM,IAAM,CAAC;AAE9E,gBAAU,EAAE,MAAM,kCAAkC,YAAY,EAAE;AAAA,IACpE,SAAS,OAAO;AAEd,gBAAU,EAAE;AAAA,QACV,0CAA0C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAClG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,aAAa,cAAoD;AACrE,QAAI;AACF,YAAM,WAAW,KAAK,YAAY,YAAY;AAG9C,UAAI,CAAE,MAAM,GAAG,WAAW,QAAQ,GAAI;AACpC,eAAO;AAAA,MACT;AAGA,YAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAClD,YAAM,OAAqB,KAAK,MAAM,OAAO;AAE7C,UAAI,CAAC,KAAK,aAAa;AACrB,eAAO;AAAA,MACT;AAEA,aAAO,KAAK,WAAW,IAAI;AAAA,IAC7B,SAAS,OAAO;AAEd,gBAAU,EAAE;AAAA,QACV,wCAAwC,YAAY,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACjH;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,kBAA2C;AAC/C,UAAM,UAA0B,CAAC;AAEjC,QAAI;AAEF,UAAI,CAAE,MAAM,GAAG,WAAW,KAAK,QAAQ,GAAI;AACzC,eAAO;AAAA,MACT;AAGA,YAAM,QAAQ,MAAM,GAAG,QAAQ,KAAK,QAAQ;AAG5C,iBAAW,QAAQ,OAAO;AACxB,YAAI,CAAC,KAAK,SAAS,OAAO,GAAG;AAC3B;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,WAAW,KAAK,KAAK,KAAK,UAAU,IAAI;AAC9C,gBAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAClD,gBAAM,OAAqB,KAAK,MAAM,OAAO;AAG7C,cAAI,CAAC,KAAK,aAAa;AACrB;AAAA,UACF;AAEA,kBAAQ,KAAK,KAAK,WAAW,IAAI,CAAC;AAAA,QACpC,SAAS,OAAO;AAEd,oBAAU,EAAE;AAAA,YACV,0BAA0B,IAAI,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UAC3F;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AAEd,gBAAU,EAAE;AAAA,QACV,kCAAkC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC1F;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,eAAe,cAAqC;AACxD,QAAI;AACF,YAAM,WAAW,KAAK,YAAY,YAAY;AAG9C,UAAI,CAAE,MAAM,GAAG,WAAW,QAAQ,GAAI;AACpC,kBAAU,EAAE,MAAM,4CAA4C,YAAY,EAAE;AAC5E;AAAA,MACF;AAGA,YAAM,GAAG,OAAO,QAAQ;AACxB,gBAAU,EAAE,MAAM,kCAAkC,YAAY,EAAE;AAAA,IACpE,SAAS,OAAO;AAEd,gBAAU,EAAE;AAAA,QACV,2CAA2C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACnG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,gBAAgB,cAAqC;AACzD,QAAI;AACF,YAAM,WAAW,KAAK,YAAY,YAAY;AAC9C,YAAM,aAAa,KAAK,KAAK,KAAK,UAAU,QAAQ;AAGpD,UAAI,CAAE,MAAM,GAAG,WAAW,UAAU,GAAI;AACtC,kBAAU,EAAE,MAAM,6CAA6C,YAAY,EAAE;AAC7E;AAAA,MACF;AAGA,YAAM,UAAU,MAAM,GAAG,SAAS,YAAY,MAAM;AACpD,YAAM,OAAqB,KAAK,MAAM,OAAO;AAG7C,YAAM,eAAe;AAAA,QACnB,GAAG;AAAA,QACH,QAAQ;AAAA,QACR,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,MACrC;AAGA,YAAM,GAAG,UAAU,KAAK,aAAa,EAAE,MAAM,IAAM,CAAC;AAGpD,YAAM,WAAW,KAAK,KAAK,KAAK,aAAa,QAAQ;AACrD,YAAM,GAAG,UAAU,UAAU,KAAK,UAAU,cAAc,MAAM,CAAC,GAAG,EAAE,MAAM,IAAM,CAAC;AAGnF,YAAM,GAAG,OAAO,UAAU;AAE1B,gBAAU,EAAE,MAAM,mCAAmC,YAAY,EAAE;AAAA,IACrE,SAAS,OAAO;AAEd,gBAAU,EAAE;AAAA,QACV,4CAA4C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACpG;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,uBAAgD;AACpD,UAAM,UAA0B,CAAC;AAEjC,QAAI;AAEF,UAAI,CAAE,MAAM,GAAG,WAAW,KAAK,WAAW,GAAI;AAC5C,eAAO;AAAA,MACT;AAGA,YAAM,QAAQ,MAAM,GAAG,QAAQ,KAAK,WAAW;AAG/C,iBAAW,QAAQ,OAAO;AACxB,YAAI,CAAC,KAAK,SAAS,OAAO,GAAG;AAC3B;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,WAAW,KAAK,KAAK,KAAK,aAAa,IAAI;AACjD,gBAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAClD,gBAAM,OAAO,KAAK,MAAM,OAAO;AAG/B,cAAI,CAAC,KAAK,aAAa;AACrB;AAAA,UACF;AAEA,gBAAM,WAAW,KAAK,WAAW,IAAI;AAErC,mBAAS,SAAU,KAAK,UAAoC;AAC5D,mBAAS,aAAa,KAAK,cAAc;AAEzC,kBAAQ,KAAK,QAAQ;AAAA,QACvB,SAAS,OAAO;AAEd,oBAAU,EAAE;AAAA,YACV,mCAAmC,IAAI,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,UACpG;AAAA,QACF;AAAA,MACF;AAGA,cAAQ,KAAK,CAAC,GAAG,MAAM;AACrB,cAAM,QAAQ,EAAE,aAAa,IAAI,KAAK,EAAE,UAAU,EAAE,QAAQ,IAAI;AAChE,cAAM,QAAQ,EAAE,aAAa,IAAI,KAAK,EAAE,UAAU,EAAE,QAAQ,IAAI;AAChE,eAAO,QAAQ;AAAA,MACjB,CAAC;AAAA,IACH,SAAS,OAAO;AAEd,gBAAU,EAAE;AAAA,QACV,2CAA2C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACnG;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/SettingsManager.ts"],"sourcesContent":["import { readFile } from 'fs/promises'\nimport path from 'path'\nimport os from 'os'\nimport { z } from 'zod'\nimport deepmerge from 'deepmerge'\nimport { logger } from '../utils/logger.js'\n\n/**\n * Valid project capability values for Zod enum validation.\n * When updating this constant, also update ProjectCapability type in src/types/loom.ts\n */\nconst PROJECT_CAPABILITIES = ['cli', 'web'] as const\n\n/**\n * Zod schema for agent settings\n */\nexport const AgentSettingsSchema = z.object({\n\tmodel: z\n\t\t.enum(['sonnet', 'opus', 'haiku'])\n\t\t.optional()\n\t\t.describe('Claude model shorthand: sonnet, opus, or haiku'),\n\t// Future: could add other per-agent overrides\n})\n\n/**\n * Zod schema for spin agent settings with default model\n * Used for the spin orchestrator configuration\n */\nexport const SpinAgentSettingsSchema = z.object({\n\tmodel: z\n\t\t.enum(['sonnet', 'opus', 'haiku'])\n\t\t.default('opus')\n\t\t.describe('Claude model shorthand for spin orchestrator'),\n})\n\n/**\n * Zod schema for summary settings with default model\n * Used for session summary generation configuration\n */\nexport const SummarySettingsSchema = z.object({\n\tmodel: z\n\t\t.enum(['sonnet', 'opus', 'haiku'])\n\t\t.default('sonnet')\n\t\t.describe('Claude model shorthand for session summary generation'),\n})\n\n/**\n * Zod schema for workflow permission configuration\n */\nexport const WorkflowPermissionSchema = z.object({\n\tpermissionMode: z\n\t\t.enum(['plan', 'acceptEdits', 'bypassPermissions', 'default'])\n\t\t.optional()\n\t\t.describe('Permission mode for Claude CLI in this workflow type'),\n\tnoVerify: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe('Skip pre-commit hooks (--no-verify) when committing during commit and finish workflows'),\n\tstartIde: z\n\t\t.boolean()\n\t\t.default(true)\n\t\t.describe('Launch IDE (code) when starting this workflow type'),\n\tstartDevServer: z\n\t\t.boolean()\n\t\t.default(true)\n\t\t.describe('Launch development server when starting this workflow type'),\n\tstartAiAgent: z\n\t\t.boolean()\n\t\t.default(true)\n\t\t.describe('Launch Claude Code agent when starting this workflow type'),\n\tstartTerminal: z\n\t\t.boolean()\n\t\t.default(false)\n\t\t.describe('Launch terminal window without dev server when starting this workflow type'),\n\tgenerateSummary: z\n\t\t.boolean()\n\t\t.default(true)\n\t\t.describe('Generate and post Claude session summary when finishing this workflow type'),\n})\n\n/**\n * Non-defaulting variant for pre-merge validation\n * This prevents Zod from polluting partial settings with default values before merge\n */\nexport const WorkflowPermissionSchemaNoDefaults = z.object({\n\tpermissionMode: z\n\t\t.enum(['plan', 'acceptEdits', 'bypassPermissions', 'default'])\n\t\t.optional()\n\t\t.describe('Permission mode for Claude CLI in this workflow type'),\n\tnoVerify: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe('Skip pre-commit hooks (--no-verify) when committing during commit and finish workflows'),\n\tstartIde: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe('Launch IDE (code) when starting this workflow type'),\n\tstartDevServer: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe('Launch development server when starting this workflow type'),\n\tstartAiAgent: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe('Launch Claude Code agent when starting this workflow type'),\n\tstartTerminal: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe('Launch terminal window without dev server when starting this workflow type'),\n\tgenerateSummary: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe('Generate and post Claude session summary when finishing this workflow type'),\n})\n\n/**\n * Zod schema for workflows settings\n */\nexport const WorkflowsSettingsSchema = z\n\t.object({\n\t\tissue: WorkflowPermissionSchema.optional(),\n\t\tpr: WorkflowPermissionSchema.optional(),\n\t\tregular: WorkflowPermissionSchema.optional(),\n\t})\n\t.optional()\n\n/**\n * Non-defaulting variant for pre-merge validation\n */\nexport const WorkflowsSettingsSchemaNoDefaults = z\n\t.object({\n\t\tissue: WorkflowPermissionSchemaNoDefaults.optional(),\n\t\tpr: WorkflowPermissionSchemaNoDefaults.optional(),\n\t\tregular: WorkflowPermissionSchemaNoDefaults.optional(),\n\t})\n\t.optional()\n\n/**\n * Zod schema for capabilities settings\n */\nexport const CapabilitiesSettingsSchema = z\n\t.object({\n\t\tcapabilities: z\n\t\t\t.array(z.enum(PROJECT_CAPABILITIES))\n\t\t\t.optional()\n\t\t\t.describe('Explicitly declared project capabilities (auto-detected if not specified)'),\n\t\tweb: z\n\t\t\t.object({\n\t\t\t\tbasePort: z\n\t\t\t\t\t.number()\n\t\t\t\t\t.min(1, 'Base port must be >= 1')\n\t\t\t\t\t.max(65535, 'Base port must be <= 65535')\n\t\t\t\t\t.optional()\n\t\t\t\t\t.describe('Base port for web workspace port calculations (default: 3000)'),\n\t\t\t})\n\t\t\t.optional(),\n\t\tdatabase: z\n\t\t\t.object({\n\t\t\t\tdatabaseUrlEnvVarName: z\n\t\t\t\t\t.string()\n\t\t\t\t\t.min(1, 'Database URL variable name cannot be empty')\n\t\t\t\t\t.regex(/^[A-Z_][A-Z0-9_]*$/, 'Must be valid env var name (uppercase, underscores)')\n\t\t\t\t\t.optional()\n\t\t\t\t\t.default('DATABASE_URL')\n\t\t\t\t\t.describe('Name of environment variable for database connection URL'),\n\t\t\t})\n\t\t\t.optional(),\n\t})\n\t.optional()\n\n/**\n * Non-defaulting variant for pre-merge validation\n */\nexport const CapabilitiesSettingsSchemaNoDefaults = z\n\t.object({\n\t\tcapabilities: z\n\t\t\t.array(z.enum(PROJECT_CAPABILITIES))\n\t\t\t.optional()\n\t\t\t.describe('Explicitly declared project capabilities (auto-detected if not specified)'),\n\t\tweb: z\n\t\t\t.object({\n\t\t\t\tbasePort: z\n\t\t\t\t\t.number()\n\t\t\t\t\t.min(1, 'Base port must be >= 1')\n\t\t\t\t\t.max(65535, 'Base port must be <= 65535')\n\t\t\t\t\t.optional()\n\t\t\t\t\t.describe('Base port for web workspace port calculations (default: 3000)'),\n\t\t\t})\n\t\t\t.optional(),\n\t\tdatabase: z\n\t\t\t.object({\n\t\t\t\tdatabaseUrlEnvVarName: z\n\t\t\t\t\t.string()\n\t\t\t\t\t.min(1, 'Database URL variable name cannot be empty')\n\t\t\t\t\t.regex(/^[A-Z_][A-Z0-9_]*$/, 'Must be valid env var name (uppercase, underscores)')\n\t\t\t\t\t.optional()\n\t\t\t\t\t.describe('Name of environment variable for database connection URL'),\n\t\t\t})\n\t\t\t.optional(),\n\t})\n\t.optional()\n\n/**\n * Zod schema for Neon database provider settings\n */\nexport const NeonSettingsSchema = z.object({\n\tprojectId: z\n\t\t.string()\n\t\t.min(1)\n\t\t.regex(/^[a-zA-Z0-9-]+$/, 'Neon project ID must contain only letters, numbers, and hyphens')\n\t\t.describe('Neon project ID found in your project URL (e.g., \"fantastic-fox-3566354\")'),\n\tparentBranch: z\n\t\t.string()\n\t\t.min(1)\n\t\t.describe('Branch from which new database branches are created'),\n})\n\n/**\n * Zod schema for database provider settings\n */\nexport const DatabaseProvidersSettingsSchema = z\n\t.object({\n\t\tneon: NeonSettingsSchema.optional().describe(\n\t\t\t'Neon database configuration. Requires Neon CLI installed and authenticated for database branching.',\n\t\t),\n\t})\n\t.optional()\n\n/**\n * Zod schema for iloom settings\n */\nexport const IloomSettingsSchema = z.object({\n\tmainBranch: z\n\t\t.string()\n\t\t.min(1, \"Settings 'mainBranch' cannot be empty\")\n\t\t.optional()\n\t\t.describe('Name of the main/primary branch for the repository'),\n\tsourceEnvOnStart: z\n\t\t.boolean()\n\t\t.default(false)\n\t\t.describe(\n\t\t\t'Source all dotenv-flow files (.env, .env.local, .env.development, .env.development.local) when launching terminal processes (Claude, dev server, terminal). ' +\n\t\t\t\t'Files are sourced in precedence order so later files override earlier ones. ' +\n\t\t\t\t'NOTE TO AI AGENTS: Before making changes to settings files, YOU MUST OFFER to check .env file compatibility. ' +\n\t\t\t\t'WARNING: Unquoted values in .env files may break sourcing in bash/zsh shells. ' +\n\t\t\t\t'Before enabling, verify ALL your .env.* files do not contain unquoted special characters ' +\n\t\t\t\t'(e.g., database URLs with ?, &, or other shell metacharacters). ' +\n\t\t\t\t'Shell compatibility issues may cause processes to fail or behave unexpectedly.',\n\t\t),\n\tworktreePrefix: z\n\t\t.string()\n\t\t.optional()\n\t\t.refine(\n\t\t\t(val) => {\n\t\t\t\tif (val === undefined) return true // undefined = use default calculation\n\t\t\t\tif (val === '') return true // empty string = no prefix mode\n\n\t\t\t\t// Allowlist: only alphanumeric, hyphens, underscores, and forward slashes\n\t\t\t\tconst allowedChars = /^[a-zA-Z0-9\\-_/]+$/\n\t\t\t\tif (!allowedChars.test(val)) return false\n\n\t\t\t\t// Reject if only special characters (no alphanumeric content)\n\t\t\t\tif (/^[-_/]+$/.test(val)) return false\n\n\t\t\t\t// Check each segment (split by /) contains at least one alphanumeric character\n\t\t\t\tconst segments = val.split('/')\n\t\t\t\tfor (const segment of segments) {\n\t\t\t\t\tif (segment && /^[-_]+$/.test(segment)) {\n\t\t\t\t\t\t// Segment exists but contains only hyphens/underscores\n\t\t\t\t\t\treturn false\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn true\n\t\t\t},\n\t\t\t{\n\t\t\t\tmessage:\n\t\t\t\t\t\"worktreePrefix contains invalid characters. Only alphanumeric characters, hyphens (-), underscores (_), and forward slashes (/) are allowed. Use forward slashes for nested directories.\",\n\t\t\t},\n\t\t)\n\t\t.describe(\n\t\t\t'Prefix for worktree directories. Empty string disables prefix. Defaults to <repo-name>-looms if not set.',\n\t\t),\n\tprotectedBranches: z\n\t\t.array(z.string().min(1, 'Protected branch name cannot be empty'))\n\t\t.optional()\n\t\t.describe('List of branches that cannot be deleted (defaults to [mainBranch, \"main\", \"master\", \"develop\"])'),\n\tcopyGitIgnoredPatterns: z\n\t\t.array(z.string().min(1, 'Pattern cannot be empty'))\n\t\t.optional()\n\t\t.describe('Glob patterns for gitignored files to copy to looms (e.g., [\"*.db\", \"data/*.sqlite\"]). Great for local dbs and large test data files that are too big to commit to git. Note: .env (dotenv-flow) files, iloom\\'s and claude\\'s local settings are automatically copied and do not need to be specified here.'),\n\tworkflows: WorkflowsSettingsSchema.describe('Per-workflow-type permission configurations'),\n\tagents: z\n\t\t.record(z.string(), AgentSettingsSchema)\n\t\t.optional()\n\t\t.nullable()\n\t\t.describe(\n\t\t\t'Per-agent configuration overrides. Available agents: ' +\n\t\t\t\t'iloom-issue-analyzer (analyzes issues), ' +\n\t\t\t\t'iloom-issue-planner (creates implementation plans), ' +\n\t\t\t\t'iloom-issue-analyze-and-plan (combined analysis and planning), ' +\n\t\t\t\t'iloom-issue-complexity-evaluator (evaluates complexity), ' +\n\t\t\t\t'iloom-issue-enhancer (enhances issue descriptions), ' +\n\t\t\t\t'iloom-issue-implementer (implements code changes), ' +\n\t\t\t\t'iloom-issue-reviewer (reviews code changes against requirements)',\n\t\t),\n\tspin: SpinAgentSettingsSchema.optional().describe(\n\t\t'Spin orchestrator configuration. Model defaults to opus when not configured.',\n\t),\n\tsummary: SummarySettingsSchema.optional().describe(\n\t\t'Session summary generation configuration. Model defaults to sonnet when not configured.',\n\t),\n\tcapabilities: CapabilitiesSettingsSchema.describe('Project capability configurations'),\n\tdatabaseProviders: DatabaseProvidersSettingsSchema.describe('Database provider configurations'),\n\tissueManagement: z\n\t\t.object({\n\t\t\tprovider: z.enum(['github', 'linear']).optional().default('github').describe('Issue tracker provider (github, linear)'),\n\t\t\tgithub: z\n\t\t\t\t.object({\n\t\t\t\t\tremote: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.min(1, 'Remote name cannot be empty')\n\t\t\t\t\t\t.describe('Git remote name to use for GitHub operations'),\n\t\t\t\t})\n\t\t\t\t.optional(),\n\t\t\tlinear: z\n\t\t\t\t.object({\n\t\t\t\t\tteamId: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.min(1, 'Team ID cannot be empty')\n\t\t\t\t\t\t.describe('Linear team identifier (e.g., \"ENG\", \"PLAT\")'),\n\t\t\t\t\tbranchFormat: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.optional()\n\t\t\t\t\t\t.describe('Branch naming template for Linear issues'),\n\t\t\t\t\tapiToken: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.optional()\n\t\t\t\t\t\t.describe('Linear API token (lin_api_...). SECURITY: Store in settings.local.json only, never commit to source control.'),\n\t\t\t\t})\n\t\t\t\t.optional(),\n\t\t})\n\t\t.optional()\n\t\t.describe('Issue management configuration'),\n\tmergeBehavior: z\n\t\t.object({\n\t\t\tmode: z.enum(['local', 'github-pr', 'github-draft-pr']).default('local'),\n\t\t\tremote: z.string().optional(),\n\t\t})\n\t\t.optional()\n\t\t.describe('Merge behavior configuration: local (merge locally), github-pr (create PR), or github-draft-pr (create draft PR at start, mark ready on finish)'),\n\tide: z\n\t\t.object({\n\t\t\ttype: z\n\t\t\t\t.enum(['vscode', 'cursor', 'webstorm', 'sublime', 'intellij', 'windsurf', 'antigravity'])\n\t\t\t\t.default('vscode')\n\t\t\t\t.describe(\n\t\t\t\t\t'IDE to launch when starting a loom. Options: vscode (Visual Studio Code), cursor (Cursor AI editor), ' +\n\t\t\t\t\t\t'webstorm (JetBrains WebStorm), sublime (Sublime Text), intellij (JetBrains IntelliJ IDEA), ' +\n\t\t\t\t\t\t'windsurf (Windsurf editor), antigravity (Antigravity IDE).'\n\t\t\t\t),\n\t\t})\n\t\t.optional()\n\t\t.describe(\n\t\t\t'IDE configuration for workspace launches. Controls which editor opens when you start a loom. ' +\n\t\t\t\t'Supports VSCode, Cursor, WebStorm, Sublime Text, IntelliJ, Windsurf, and Antigravity. ' +\n\t\t\t\t'Note: Color synchronization (title bar colors) only works with VSCode-compatible editors (vscode, cursor, windsurf, antigravity).'\n\t\t),\n\tcolors: z\n\t\t.object({\n\t\t\tterminal: z\n\t\t\t\t.boolean()\n\t\t\t\t.default(true)\n\t\t\t\t.describe('Apply terminal background colors based on branch name (macOS only)'),\n\t\t\tvscode: z\n\t\t\t\t.boolean()\n\t\t\t\t.default(false)\n\t\t\t\t.describe(\n\t\t\t\t\t'Apply VSCode/Cursor title bar colors based on branch name. ' +\n\t\t\t\t\t\t'Note: This modifies .vscode/settings.json which may be in source control. ' +\n\t\t\t\t\t\t'Default is false for safety; enable via init or explicitly if .vscode is gitignored.'\n\t\t\t\t),\n\t\t})\n\t\t.optional()\n\t\t.describe('Color synchronization settings for workspace identification'),\n\tattribution: z\n\t\t.enum(['off', 'upstreamOnly', 'on'])\n\t\t.default('upstreamOnly')\n\t\t.describe(\n\t\t\t'Controls when iloom attribution appears in session summaries. ' +\n\t\t\t\t'\"off\" - never show attribution. ' +\n\t\t\t\t'\"upstreamOnly\" - only show for contributions to external repositories (e.g., open source). ' +\n\t\t\t\t'\"on\" - always show attribution.'\n\t\t),\n})\n\n/**\n * Non-defaulting variant for pre-merge validation\n * This prevents Zod from polluting partial settings with default values before merge\n */\nexport const IloomSettingsSchemaNoDefaults = z.object({\n\tmainBranch: z\n\t\t.string()\n\t\t.min(1, \"Settings 'mainBranch' cannot be empty\")\n\t\t.optional()\n\t\t.describe('Name of the main/primary branch for the repository'),\n\tsourceEnvOnStart: z\n\t\t.boolean()\n\t\t.optional()\n\t\t.describe(\n\t\t\t'Source all dotenv-flow files (.env, .env.local, .env.development, .env.development.local) when launching terminal processes (Claude, dev server, terminal). ' +\n\t\t\t\t'Files are sourced in precedence order so later files override earlier ones. ' +\n\t\t\t\t'NOTE TO AI AGENTS: Before making changes to settings files, YOU MUST OFFER to check .env compatibility. ' +\n\t\t\t\t'WARNING: Unquoted values in .env files may break sourcing in bash/zsh shells. ' +\n\t\t\t\t'Before enabling, verify ALL your .env.* files do not contain unquoted special characters ' +\n\t\t\t\t'(e.g., database URLs with ?, &, or other shell metacharacters). ' +\n\t\t\t\t'Shell compatibility issues may cause processes to fail or behave unexpectedly.',\n\t\t),\n\tworktreePrefix: z\n\t\t.string()\n\t\t.optional()\n\t\t.refine(\n\t\t\t(val) => {\n\t\t\t\tif (val === undefined) return true // undefined = use default calculation\n\t\t\t\tif (val === '') return true // empty string = no prefix mode\n\n\t\t\t\t// Allowlist: only alphanumeric, hyphens, underscores, and forward slashes\n\t\t\t\tconst allowedChars = /^[a-zA-Z0-9\\-_/]+$/\n\t\t\t\tif (!allowedChars.test(val)) return false\n\n\t\t\t\t// Reject if only special characters (no alphanumeric content)\n\t\t\t\tif (/^[-_/]+$/.test(val)) return false\n\n\t\t\t\t// Check each segment (split by /) contains at least one alphanumeric character\n\t\t\t\tconst segments = val.split('/')\n\t\t\t\tfor (const segment of segments) {\n\t\t\t\t\tif (segment && /^[-_]+$/.test(segment)) {\n\t\t\t\t\t\t// Segment exists but contains only hyphens/underscores\n\t\t\t\t\t\treturn false\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn true\n\t\t\t},\n\t\t\t{\n\t\t\t\tmessage:\n\t\t\t\t\t\"worktreePrefix contains invalid characters. Only alphanumeric characters, hyphens (-), underscores (_), and forward slashes (/) are allowed. Use forward slashes for nested directories.\",\n\t\t\t},\n\t\t)\n\t\t.describe(\n\t\t\t'Prefix for worktree directories. Empty string disables prefix. Defaults to <repo-name>-looms if not set.',\n\t\t),\n\tprotectedBranches: z\n\t\t.array(z.string().min(1, 'Protected branch name cannot be empty'))\n\t\t.optional()\n\t\t.describe('List of branches that cannot be deleted (defaults to [mainBranch, \"main\", \"master\", \"develop\"])'),\n\tcopyGitIgnoredPatterns: z\n\t\t.array(z.string().min(1, 'Pattern cannot be empty'))\n\t\t.optional()\n\t\t.describe('Glob patterns for gitignored files to copy to looms (e.g., [\"*.db\", \"data/*.sqlite\"]). Great for local dbs and large test data files that are too big to commit to git. Note: .env (dotenv-flow) files, iloom\\'s and claude\\'s local settings are automatically copied and do not need to be specified here.'),\n\tworkflows: WorkflowsSettingsSchemaNoDefaults.describe('Per-workflow-type permission configurations'),\n\tagents: z\n\t\t.record(z.string(), AgentSettingsSchema)\n\t\t.optional()\n\t\t.nullable()\n\t\t.describe(\n\t\t\t'Per-agent configuration overrides. Available agents: ' +\n\t\t\t\t'iloom-issue-analyzer (analyzes issues), ' +\n\t\t\t\t'iloom-issue-planner (creates implementation plans), ' +\n\t\t\t\t'iloom-issue-analyze-and-plan (combined analysis and planning), ' +\n\t\t\t\t'iloom-issue-complexity-evaluator (evaluates complexity), ' +\n\t\t\t\t'iloom-issue-enhancer (enhances issue descriptions), ' +\n\t\t\t\t'iloom-issue-implementer (implements code changes), ' +\n\t\t\t\t'iloom-issue-reviewer (reviews code changes against requirements)',\n\t\t),\n\tspin: z\n\t\t.object({\n\t\t\tmodel: z.enum(['sonnet', 'opus', 'haiku']).optional(),\n\t\t})\n\t\t.optional()\n\t\t.describe('Spin orchestrator configuration'),\n\tsummary: z\n\t\t.object({\n\t\t\tmodel: z.enum(['sonnet', 'opus', 'haiku']).optional(),\n\t\t})\n\t\t.optional()\n\t\t.describe('Session summary generation configuration'),\n\tcapabilities: CapabilitiesSettingsSchemaNoDefaults.describe('Project capability configurations'),\n\tdatabaseProviders: DatabaseProvidersSettingsSchema.describe('Database provider configurations'),\n\tissueManagement: z\n\t\t.object({\n\t\t\tprovider: z.enum(['github', 'linear']).optional().describe('Issue tracker provider (github, linear)'),\n\t\t\tgithub: z\n\t\t\t\t.object({\n\t\t\t\t\tremote: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.min(1, 'Remote name cannot be empty')\n\t\t\t\t\t\t.describe('Git remote name to use for GitHub operations'),\n\t\t\t\t})\n\t\t\t\t.optional(),\n\t\t\tlinear: z\n\t\t\t\t.object({\n\t\t\t\t\tteamId: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.min(1, 'Team ID cannot be empty')\n\t\t\t\t\t\t.describe('Linear team identifier (e.g., \"ENG\", \"PLAT\")'),\n\t\t\t\t\tbranchFormat: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.optional()\n\t\t\t\t\t\t.describe('Branch naming template for Linear issues'),\n\t\t\t\t\tapiToken: z\n\t\t\t\t\t\t.string()\n\t\t\t\t\t\t.optional()\n\t\t\t\t\t\t.describe('Linear API token (lin_api_...). SECURITY: Store in settings.local.json only, never commit to source control.'),\n\t\t\t\t})\n\t\t\t\t.optional(),\n\t\t})\n\t\t.optional()\n\t\t.describe('Issue management configuration'),\n\tmergeBehavior: z\n\t\t.object({\n\t\t\tmode: z.enum(['local', 'github-pr', 'github-draft-pr']).optional(),\n\t\t\tremote: z.string().optional(),\n\t\t})\n\t\t.optional()\n\t\t.describe('Merge behavior configuration: local (merge locally), github-pr (create PR), or github-draft-pr (create draft PR at start, mark ready on finish)'),\n\tide: z\n\t\t.object({\n\t\t\ttype: z\n\t\t\t\t.enum(['vscode', 'cursor', 'webstorm', 'sublime', 'intellij', 'windsurf', 'antigravity'])\n\t\t\t\t.optional()\n\t\t\t\t.describe(\n\t\t\t\t\t'IDE to launch when starting a loom. Options: vscode (Visual Studio Code), cursor (Cursor AI editor), ' +\n\t\t\t\t\t\t'webstorm (JetBrains WebStorm), sublime (Sublime Text), intellij (JetBrains IntelliJ IDEA), ' +\n\t\t\t\t\t\t'windsurf (Windsurf editor), antigravity (Antigravity IDE).'\n\t\t\t\t),\n\t\t})\n\t\t.optional()\n\t\t.describe(\n\t\t\t'IDE configuration for workspace launches. Controls which editor opens when you start a loom. ' +\n\t\t\t\t'Supports VSCode, Cursor, WebStorm, Sublime Text, IntelliJ, Windsurf, and Antigravity. ' +\n\t\t\t\t'Note: Color synchronization (title bar colors) only works with VSCode-compatible editors (vscode, cursor, windsurf, antigravity).'\n\t\t),\n\tcolors: z\n\t\t.object({\n\t\t\tterminal: z\n\t\t\t\t.boolean()\n\t\t\t\t.optional()\n\t\t\t\t.describe('Apply terminal background colors based on branch name (macOS only)'),\n\t\t\tvscode: z\n\t\t\t\t.boolean()\n\t\t\t\t.optional()\n\t\t\t\t.describe(\n\t\t\t\t\t'Apply VSCode/Cursor title bar colors based on branch name. ' +\n\t\t\t\t\t\t'Note: This modifies .vscode/settings.json which may be in source control.'\n\t\t\t\t),\n\t\t})\n\t\t.optional()\n\t\t.describe('Color synchronization settings for workspace identification'),\n\tattribution: z\n\t\t.enum(['off', 'upstreamOnly', 'on'])\n\t\t.optional()\n\t\t.describe(\n\t\t\t'Controls when iloom attribution appears in session summaries. ' +\n\t\t\t\t'\"off\" - never show attribution. ' +\n\t\t\t\t'\"upstreamOnly\" - only show for contributions to external repositories (e.g., open source). ' +\n\t\t\t\t'\"on\" - always show attribution.'\n\t\t),\n})\n\n/**\n * TypeScript type for Neon settings derived from Zod schema\n */\nexport type NeonSettings = z.infer<typeof NeonSettingsSchema>\n\n/**\n * TypeScript type for database providers settings derived from Zod schema\n */\nexport type DatabaseProvidersSettings = z.infer<typeof DatabaseProvidersSettingsSchema>\n\n/**\n * TypeScript type for agent settings derived from Zod schema\n */\nexport type AgentSettings = z.infer<typeof AgentSettingsSchema>\n\n/**\n * TypeScript type for spin agent settings derived from Zod schema\n */\nexport type SpinAgentSettings = z.infer<typeof SpinAgentSettingsSchema>\n\n/**\n * TypeScript type for summary settings derived from Zod schema\n */\nexport type SummarySettings = z.infer<typeof SummarySettingsSchema>\n\n/**\n * TypeScript type for workflow permission configuration derived from Zod schema\n */\nexport type WorkflowPermission = z.infer<typeof WorkflowPermissionSchema>\n\n/**\n * TypeScript type for workflows settings derived from Zod schema\n */\nexport type WorkflowsSettings = z.infer<typeof WorkflowsSettingsSchema>\n\n/**\n * TypeScript type for capabilities settings derived from Zod schema\n */\nexport type CapabilitiesSettings = z.infer<typeof CapabilitiesSettingsSchema>\n\n/**\n * TypeScript type for IDE settings derived from Zod schema\n */\nexport type IdeSettings = z.infer<typeof IloomSettingsSchema>['ide']\n\n/**\n * TypeScript type for iloom settings derived from Zod schema\n */\nexport type IloomSettings = z.infer<typeof IloomSettingsSchema>\n\n/**\n * Manages project-level settings from .iloom/settings.json\n */\nexport class SettingsManager {\n\t/**\n\t * Load settings from global, project, and local sources with proper precedence\n\t * Merge hierarchy (lowest to highest priority):\n\t * 1. Global settings (~/.config/iloom-ai/settings.json)\n\t * 2. Project settings (<PROJECT_ROOT>/.iloom/settings.json)\n\t * 3. Local settings (<PROJECT_ROOT>/.iloom/settings.local.json)\n\t * 4. CLI overrides (--set flags)\n\t * Returns empty object if all files don't exist (not an error)\n\t */\n\tasync loadSettings(\n\t\tprojectRoot?: string,\n\t\tcliOverrides?: Partial<IloomSettings>,\n\t): Promise<IloomSettings> {\n\t\tconst root = this.getProjectRoot(projectRoot)\n\n\t\t// Load global settings (lowest priority)\n\t\tconst globalSettings = await this.loadGlobalSettingsFile()\n\t\tconst globalSettingsPath = this.getGlobalSettingsPath()\n\t\tlogger.debug(`🌍 Global settings from ${globalSettingsPath}:`, JSON.stringify(globalSettings, null, 2))\n\n\t\t// Load base settings from settings.json\n\t\tconst baseSettings = await this.loadSettingsFile(root, 'settings.json')\n\t\tconst baseSettingsPath = path.join(root, '.iloom', 'settings.json')\n\t\tlogger.debug(`📄 Base settings from ${baseSettingsPath}:`, JSON.stringify(baseSettings, null, 2))\n\n\t\t// Load local overrides from settings.local.json\n\t\tconst localSettings = await this.loadSettingsFile(root, 'settings.local.json')\n\t\tconst localSettingsPath = path.join(root, '.iloom', 'settings.local.json')\n\t\tlogger.debug(`📄 Local settings from ${localSettingsPath}:`, JSON.stringify(localSettings, null, 2))\n\n\t\t// Deep merge with priority: cliOverrides > localSettings > baseSettings > globalSettings\n\t\tlet merged = this.mergeSettings(this.mergeSettings(globalSettings, baseSettings), localSettings)\n\t\tlogger.debug('🔄 After merging global + base + local settings:', JSON.stringify(merged, null, 2))\n\n\t\tif (cliOverrides && Object.keys(cliOverrides).length > 0) {\n\t\t\tlogger.debug('⚙️ CLI overrides to apply:', JSON.stringify(cliOverrides, null, 2))\n\t\t\tmerged = this.mergeSettings(merged, cliOverrides)\n\t\t\tlogger.debug('🔄 After applying CLI overrides:', JSON.stringify(merged, null, 2))\n\t\t}\n\n\t\t// Validate merged result\n\t\ttry {\n\t\t\tconst finalSettings = IloomSettingsSchema.parse(merged)\n\n\t\t\t// Debug: Log final merged configuration\n\t\t\tthis.logFinalConfiguration(finalSettings)\n\n\t\t\treturn finalSettings\n\t\t} catch (error) {\n\t\t\t// Show all Zod validation errors\n\t\t\tif (error instanceof z.ZodError) {\n\t\t\t\tconst errorMsg = this.formatAllZodErrors(error, '<merged settings>')\n\t\t\t\t// Enhance error message if CLI overrides were applied\n\t\t\t\tif (cliOverrides && Object.keys(cliOverrides).length > 0) {\n\t\t\t\t\tthrow new Error(`${errorMsg.message}\\n\\nNote: CLI overrides were applied. Check your --set arguments.`)\n\t\t\t\t}\n\t\t\t\tthrow errorMsg\n\t\t\t}\n\t\t\tthrow error\n\t\t}\n\t}\n\n\t/**\n\t * Log the final merged configuration for debugging\n\t */\n\tprivate logFinalConfiguration(settings: IloomSettings): void {\n\t\tlogger.debug('📋 Final merged configuration:', JSON.stringify(settings, null, 2))\n\t}\n\n\t/**\n\t * Load and parse a single settings file\n\t * Returns empty object if file doesn't exist (not an error)\n\t * Uses non-defaulting schema to prevent polluting partial settings with defaults before merge\n\t */\n\tprivate async loadSettingsFile(\n\t\tprojectRoot: string,\n\t\tfilename: string,\n\t): Promise<z.infer<typeof IloomSettingsSchemaNoDefaults>> {\n\t\tconst settingsPath = path.join(projectRoot, '.iloom', filename)\n\n\t\ttry {\n\t\t\tconst content = await readFile(settingsPath, 'utf-8')\n\t\t\tlet parsed: unknown\n\n\t\t\ttry {\n\t\t\t\tparsed = JSON.parse(content)\n\t\t\t} catch (error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Failed to parse settings file at ${settingsPath}: ${error instanceof Error ? error.message : 'Invalid JSON'}`,\n\t\t\t\t)\n\t\t\t}\n\n\t\t\t// Basic type checking - ensure it's an object, but don't validate schema completeness\n\t\t\t// Individual files may be incomplete (e.g., Linear config split between files)\n\t\t\t// Final validation will happen on the merged result in loadSettings()\n\t\t\tif (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Settings validation failed at ${filename}:\\n - root: Expected object, received ${typeof parsed}`\n\t\t\t\t)\n\t\t\t}\n\t\t\treturn parsed as z.infer<typeof IloomSettingsSchemaNoDefaults>\n\t\t} catch (error) {\n\t\t\t// File not found is not an error - return empty settings\n\t\t\tif ((error as { code?: string }).code === 'ENOENT') {\n\t\t\t\tlogger.debug(`No settings file found at ${settingsPath}, using defaults`)\n\t\t\t\treturn {}\n\t\t\t}\n\n\t\t\t// Re-throw parsing errors\n\t\t\tthrow error\n\t\t}\n\t}\n\n\t/**\n\t * Deep merge two settings objects with priority to override\n\t * Uses deepmerge library with array replacement strategy\n\t */\n\tprivate mergeSettings(\n\t\tbase: Partial<IloomSettings> | z.infer<typeof IloomSettingsSchemaNoDefaults>,\n\t\toverride: Partial<IloomSettings> | z.infer<typeof IloomSettingsSchemaNoDefaults>,\n\t): IloomSettings {\n\t\t// Use deepmerge with array replacement (not concatenation)\n\t\t// Type assertion is safe because the merged result will be validated with IloomSettingsSchema\n\t\t// which applies all the defaults after merging\n\t\treturn deepmerge(base as Record<string, unknown>, override as Record<string, unknown>, {\n\t\t\t// Replace arrays instead of concatenating them\n\t\t\tarrayMerge: (_destinationArray, sourceArray) => sourceArray,\n\t\t}) as IloomSettings\n\t}\n\n\t/**\n\t * Format all Zod validation errors into a single error message\n\t */\n\tprivate formatAllZodErrors(error: z.ZodError, settingsPath: string): Error {\n\t\tconst errorMessages = error.issues.map(issue => {\n\t\t\tconst path = issue.path.length > 0 ? issue.path.join('.') : 'root'\n\t\t\treturn ` - ${path}: ${issue.message}`\n\t\t})\n\n\t\treturn new Error(\n\t\t\t`Settings validation failed at ${settingsPath}:\\n${errorMessages.join('\\n')}`,\n\t\t)\n\t}\n\n\t/**\n\t * Validate settings structure and model names using Zod schema\n\t * This method is kept for testing purposes but uses Zod internally\n\t * @internal - Only used in tests via bracket notation\n\t */\n\t// @ts-expect-error - Used in tests via bracket notation, TypeScript can't detect this usage\n\tprivate validateSettings(settings: IloomSettings): void {\n\t\ttry {\n\t\t\tIloomSettingsSchema.parse(settings)\n\t\t} catch (error) {\n\t\t\tif (error instanceof z.ZodError) {\n\t\t\t\tthrow this.formatAllZodErrors(error, '<validation>')\n\t\t\t}\n\t\t\tthrow error\n\t\t}\n\t}\n\n\t/**\n\t * Get project root (defaults to process.cwd())\n\t */\n\tprivate getProjectRoot(projectRoot?: string): string {\n\t\treturn projectRoot ?? process.cwd()\n\t}\n\n\t/**\n\t * Get global config directory path (~/.config/iloom-ai)\n\t */\n\tprivate getGlobalConfigDir(): string {\n\t\treturn path.join(os.homedir(), '.config', 'iloom-ai')\n\t}\n\n\t/**\n\t * Get global settings file path (~/.config/iloom-ai/settings.json)\n\t */\n\tprivate getGlobalSettingsPath(): string {\n\t\treturn path.join(this.getGlobalConfigDir(), 'settings.json')\n\t}\n\n\t/**\n\t * Load and parse global settings file\n\t * Returns empty object if file doesn't exist (not an error)\n\t * Warns but returns empty object on validation/parse errors (graceful degradation)\n\t */\n\tprivate async loadGlobalSettingsFile(): Promise<z.infer<typeof IloomSettingsSchemaNoDefaults>> {\n\t\tconst settingsPath = this.getGlobalSettingsPath()\n\n\t\ttry {\n\t\t\tconst content = await readFile(settingsPath, 'utf-8')\n\t\t\tlet parsed: unknown\n\n\t\t\ttry {\n\t\t\t\tparsed = JSON.parse(content)\n\t\t\t} catch (error) {\n\t\t\t\tlogger.warn(\n\t\t\t\t\t`Failed to parse global settings file at ${settingsPath}: ${error instanceof Error ? error.message : 'Invalid JSON'}. Ignoring global settings.`,\n\t\t\t\t)\n\t\t\t\treturn {}\n\t\t\t}\n\n\t\t\t// Validate with non-defaulting schema\n\t\t\ttry {\n\t\t\t\tconst validated = IloomSettingsSchemaNoDefaults.strict().parse(parsed)\n\t\t\t\treturn validated\n\t\t\t} catch (error) {\n\t\t\t\tif (error instanceof z.ZodError) {\n\t\t\t\t\tconst errorMsg = this.formatAllZodErrors(error, 'global settings')\n\t\t\t\t\tlogger.warn(`${errorMsg.message}. Ignoring global settings.`)\n\t\t\t\t} else {\n\t\t\t\t\tlogger.warn(`Validation error in global settings: ${error instanceof Error ? error.message : 'Unknown error'}. Ignoring global settings.`)\n\t\t\t\t}\n\t\t\t\treturn {}\n\t\t\t}\n\t\t} catch (error) {\n\t\t\t// File not found is not an error - return empty settings\n\t\t\tif ((error as { code?: string }).code === 'ENOENT') {\n\t\t\t\tlogger.debug(`No global settings file found at ${settingsPath}`)\n\t\t\t\treturn {}\n\t\t\t}\n\n\t\t\t// Other file system errors - warn and continue\n\t\t\tlogger.warn(`Error reading global settings file at ${settingsPath}: ${error instanceof Error ? error.message : 'Unknown error'}. Ignoring global settings.`)\n\t\t\treturn {}\n\t\t}\n\t}\n\n\t/**\n\t * Get effective protected branches list with mainBranch always included\n\t *\n\t * This method provides a single source of truth for protected branches logic:\n\t * 1. Use configured protectedBranches if provided\n\t * 2. Otherwise use defaults: [mainBranch, 'main', 'master', 'develop']\n\t * 3. ALWAYS ensure mainBranch is included even if user configured custom list\n\t *\n\t * @param projectRoot - Optional project root directory (defaults to process.cwd())\n\t * @returns Array of protected branch names with mainBranch guaranteed to be included\n\t */\n\tasync getProtectedBranches(projectRoot?: string): Promise<string[]> {\n\t\tconst settings = await this.loadSettings(projectRoot)\n\t\tconst mainBranch = settings.mainBranch ?? 'main'\n\n\t\t// Build protected branches list:\n\t\t// 1. Use configured protectedBranches if provided\n\t\t// 2. Otherwise use defaults: [mainBranch, 'main', 'master', 'develop']\n\t\t// 3. ALWAYS ensure mainBranch is included even if user configured custom list\n\t\tlet protectedBranches: string[]\n\t\tif (settings.protectedBranches) {\n\t\t\t// Use configured list but ensure mainBranch is always included\n\t\t\tprotectedBranches = settings.protectedBranches.includes(mainBranch)\n\t\t\t\t? settings.protectedBranches\n\t\t\t\t: [mainBranch, ...settings.protectedBranches]\n\t\t} else {\n\t\t\t// Use defaults with current mainBranch\n\t\t\tprotectedBranches = [mainBranch, 'main', 'master', 'develop']\n\t\t}\n\n\t\treturn protectedBranches\n\t}\n\n\t/**\n\t * Get the spin orchestrator model with default applied\n\t * Default is defined in SpinAgentSettingsSchema\n\t *\n\t * @param settings - Pre-loaded settings object\n\t * @returns Model shorthand ('opus', 'sonnet', or 'haiku')\n\t */\n\tgetSpinModel(settings?: IloomSettings): 'sonnet' | 'opus' | 'haiku' {\n\t\treturn settings?.spin?.model ?? SpinAgentSettingsSchema.parse({}).model\n\t}\n\n\t/**\n\t * Get the session summary model with default applied\n\t * Default is defined in SummarySettingsSchema\n\t *\n\t * @param settings - Pre-loaded settings object\n\t * @returns Model shorthand ('opus', 'sonnet', or 'haiku')\n\t */\n\tgetSummaryModel(settings?: IloomSettings): 'sonnet' | 'opus' | 'haiku' {\n\t\treturn settings?.summary?.model ?? SummarySettingsSchema.parse({}).model\n\t}\n}\n"],"mappings":";;;;;;AAAA,SAAS,gBAAgB;AACzB,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,SAAS;AAClB,OAAO,eAAe;AAOtB,IAAM,uBAAuB,CAAC,OAAO,KAAK;AAKnC,IAAM,sBAAsB,EAAE,OAAO;AAAA,EAC3C,OAAO,EACL,KAAK,CAAC,UAAU,QAAQ,OAAO,CAAC,EAChC,SAAS,EACT,SAAS,gDAAgD;AAAA;AAE5D,CAAC;AAMM,IAAM,0BAA0B,EAAE,OAAO;AAAA,EAC/C,OAAO,EACL,KAAK,CAAC,UAAU,QAAQ,OAAO,CAAC,EAChC,QAAQ,MAAM,EACd,SAAS,8CAA8C;AAC1D,CAAC;AAMM,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC7C,OAAO,EACL,KAAK,CAAC,UAAU,QAAQ,OAAO,CAAC,EAChC,QAAQ,QAAQ,EAChB,SAAS,uDAAuD;AACnE,CAAC;AAKM,IAAM,2BAA2B,EAAE,OAAO;AAAA,EAChD,gBAAgB,EACd,KAAK,CAAC,QAAQ,eAAe,qBAAqB,SAAS,CAAC,EAC5D,SAAS,EACT,SAAS,sDAAsD;AAAA,EACjE,UAAU,EACR,QAAQ,EACR,SAAS,EACT,SAAS,wFAAwF;AAAA,EACnG,UAAU,EACR,QAAQ,EACR,QAAQ,IAAI,EACZ,SAAS,oDAAoD;AAAA,EAC/D,gBAAgB,EACd,QAAQ,EACR,QAAQ,IAAI,EACZ,SAAS,4DAA4D;AAAA,EACvE,cAAc,EACZ,QAAQ,EACR,QAAQ,IAAI,EACZ,SAAS,2DAA2D;AAAA,EACtE,eAAe,EACb,QAAQ,EACR,QAAQ,KAAK,EACb,SAAS,4EAA4E;AAAA,EACvF,iBAAiB,EACf,QAAQ,EACR,QAAQ,IAAI,EACZ,SAAS,4EAA4E;AACxF,CAAC;AAMM,IAAM,qCAAqC,EAAE,OAAO;AAAA,EAC1D,gBAAgB,EACd,KAAK,CAAC,QAAQ,eAAe,qBAAqB,SAAS,CAAC,EAC5D,SAAS,EACT,SAAS,sDAAsD;AAAA,EACjE,UAAU,EACR,QAAQ,EACR,SAAS,EACT,SAAS,wFAAwF;AAAA,EACnG,UAAU,EACR,QAAQ,EACR,SAAS,EACT,SAAS,oDAAoD;AAAA,EAC/D,gBAAgB,EACd,QAAQ,EACR,SAAS,EACT,SAAS,4DAA4D;AAAA,EACvE,cAAc,EACZ,QAAQ,EACR,SAAS,EACT,SAAS,2DAA2D;AAAA,EACtE,eAAe,EACb,QAAQ,EACR,SAAS,EACT,SAAS,4EAA4E;AAAA,EACvF,iBAAiB,EACf,QAAQ,EACR,SAAS,EACT,SAAS,4EAA4E;AACxF,CAAC;AAKM,IAAM,0BAA0B,EACrC,OAAO;AAAA,EACP,OAAO,yBAAyB,SAAS;AAAA,EACzC,IAAI,yBAAyB,SAAS;AAAA,EACtC,SAAS,yBAAyB,SAAS;AAC5C,CAAC,EACA,SAAS;AAKJ,IAAM,oCAAoC,EAC/C,OAAO;AAAA,EACP,OAAO,mCAAmC,SAAS;AAAA,EACnD,IAAI,mCAAmC,SAAS;AAAA,EAChD,SAAS,mCAAmC,SAAS;AACtD,CAAC,EACA,SAAS;AAKJ,IAAM,6BAA6B,EACxC,OAAO;AAAA,EACP,cAAc,EACZ,MAAM,EAAE,KAAK,oBAAoB,CAAC,EAClC,SAAS,EACT,SAAS,2EAA2E;AAAA,EACtF,KAAK,EACH,OAAO;AAAA,IACP,UAAU,EACR,OAAO,EACP,IAAI,GAAG,wBAAwB,EAC/B,IAAI,OAAO,4BAA4B,EACvC,SAAS,EACT,SAAS,+DAA+D;AAAA,EAC3E,CAAC,EACA,SAAS;AAAA,EACX,UAAU,EACR,OAAO;AAAA,IACP,uBAAuB,EACrB,OAAO,EACP,IAAI,GAAG,4CAA4C,EACnD,MAAM,sBAAsB,qDAAqD,EACjF,SAAS,EACT,QAAQ,cAAc,EACtB,SAAS,0DAA0D;AAAA,EACtE,CAAC,EACA,SAAS;AACZ,CAAC,EACA,SAAS;AAKJ,IAAM,uCAAuC,EAClD,OAAO;AAAA,EACP,cAAc,EACZ,MAAM,EAAE,KAAK,oBAAoB,CAAC,EAClC,SAAS,EACT,SAAS,2EAA2E;AAAA,EACtF,KAAK,EACH,OAAO;AAAA,IACP,UAAU,EACR,OAAO,EACP,IAAI,GAAG,wBAAwB,EAC/B,IAAI,OAAO,4BAA4B,EACvC,SAAS,EACT,SAAS,+DAA+D;AAAA,EAC3E,CAAC,EACA,SAAS;AAAA,EACX,UAAU,EACR,OAAO;AAAA,IACP,uBAAuB,EACrB,OAAO,EACP,IAAI,GAAG,4CAA4C,EACnD,MAAM,sBAAsB,qDAAqD,EACjF,SAAS,EACT,SAAS,0DAA0D;AAAA,EACtE,CAAC,EACA,SAAS;AACZ,CAAC,EACA,SAAS;AAKJ,IAAM,qBAAqB,EAAE,OAAO;AAAA,EAC1C,WAAW,EACT,OAAO,EACP,IAAI,CAAC,EACL,MAAM,mBAAmB,iEAAiE,EAC1F,SAAS,2EAA2E;AAAA,EACtF,cAAc,EACZ,OAAO,EACP,IAAI,CAAC,EACL,SAAS,qDAAqD;AACjE,CAAC;AAKM,IAAM,kCAAkC,EAC7C,OAAO;AAAA,EACP,MAAM,mBAAmB,SAAS,EAAE;AAAA,IACnC;AAAA,EACD;AACD,CAAC,EACA,SAAS;AAKJ,IAAM,sBAAsB,EAAE,OAAO;AAAA,EAC3C,YAAY,EACV,OAAO,EACP,IAAI,GAAG,uCAAuC,EAC9C,SAAS,EACT,SAAS,oDAAoD;AAAA,EAC/D,kBAAkB,EAChB,QAAQ,EACR,QAAQ,KAAK,EACb;AAAA,IACA;AAAA,EAOD;AAAA,EACD,gBAAgB,EACd,OAAO,EACP,SAAS,EACT;AAAA,IACA,CAAC,QAAQ;AACR,UAAI,QAAQ,OAAW,QAAO;AAC9B,UAAI,QAAQ,GAAI,QAAO;AAGvB,YAAM,eAAe;AACrB,UAAI,CAAC,aAAa,KAAK,GAAG,EAAG,QAAO;AAGpC,UAAI,WAAW,KAAK,GAAG,EAAG,QAAO;AAGjC,YAAM,WAAW,IAAI,MAAM,GAAG;AAC9B,iBAAW,WAAW,UAAU;AAC/B,YAAI,WAAW,UAAU,KAAK,OAAO,GAAG;AAEvC,iBAAO;AAAA,QACR;AAAA,MACD;AAEA,aAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,SACC;AAAA,IACF;AAAA,EACD,EACC;AAAA,IACA;AAAA,EACD;AAAA,EACD,mBAAmB,EACjB,MAAM,EAAE,OAAO,EAAE,IAAI,GAAG,uCAAuC,CAAC,EAChE,SAAS,EACT,SAAS,iGAAiG;AAAA,EAC5G,wBAAwB,EACtB,MAAM,EAAE,OAAO,EAAE,IAAI,GAAG,yBAAyB,CAAC,EAClD,SAAS,EACT,SAAS,4SAA8S;AAAA,EACzT,WAAW,wBAAwB,SAAS,6CAA6C;AAAA,EACzF,QAAQ,EACN,OAAO,EAAE,OAAO,GAAG,mBAAmB,EACtC,SAAS,EACT,SAAS,EACT;AAAA,IACA;AAAA,EAQD;AAAA,EACD,MAAM,wBAAwB,SAAS,EAAE;AAAA,IACxC;AAAA,EACD;AAAA,EACA,SAAS,sBAAsB,SAAS,EAAE;AAAA,IACzC;AAAA,EACD;AAAA,EACA,cAAc,2BAA2B,SAAS,mCAAmC;AAAA,EACrF,mBAAmB,gCAAgC,SAAS,kCAAkC;AAAA,EAC9F,iBAAiB,EACf,OAAO;AAAA,IACP,UAAU,EAAE,KAAK,CAAC,UAAU,QAAQ,CAAC,EAAE,SAAS,EAAE,QAAQ,QAAQ,EAAE,SAAS,yCAAyC;AAAA,IACtH,QAAQ,EACN,OAAO;AAAA,MACP,QAAQ,EACN,OAAO,EACP,IAAI,GAAG,6BAA6B,EACpC,SAAS,8CAA8C;AAAA,IAC1D,CAAC,EACA,SAAS;AAAA,IACX,QAAQ,EACN,OAAO;AAAA,MACP,QAAQ,EACN,OAAO,EACP,IAAI,GAAG,yBAAyB,EAChC,SAAS,8CAA8C;AAAA,MACzD,cAAc,EACZ,OAAO,EACP,SAAS,EACT,SAAS,0CAA0C;AAAA,MACrD,UAAU,EACR,OAAO,EACP,SAAS,EACT,SAAS,8GAA8G;AAAA,IAC1H,CAAC,EACA,SAAS;AAAA,EACZ,CAAC,EACA,SAAS,EACT,SAAS,gCAAgC;AAAA,EAC3C,eAAe,EACb,OAAO;AAAA,IACP,MAAM,EAAE,KAAK,CAAC,SAAS,aAAa,iBAAiB,CAAC,EAAE,QAAQ,OAAO;AAAA,IACvE,QAAQ,EAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,CAAC,EACA,SAAS,EACT,SAAS,iJAAiJ;AAAA,EAC5J,KAAK,EACH,OAAO;AAAA,IACP,MAAM,EACJ,KAAK,CAAC,UAAU,UAAU,YAAY,WAAW,YAAY,YAAY,aAAa,CAAC,EACvF,QAAQ,QAAQ,EAChB;AAAA,MACA;AAAA,IAGD;AAAA,EACF,CAAC,EACA,SAAS,EACT;AAAA,IACA;AAAA,EAGD;AAAA,EACD,QAAQ,EACN,OAAO;AAAA,IACP,UAAU,EACR,QAAQ,EACR,QAAQ,IAAI,EACZ,SAAS,oEAAoE;AAAA,IAC/E,QAAQ,EACN,QAAQ,EACR,QAAQ,KAAK,EACb;AAAA,MACA;AAAA,IAGD;AAAA,EACF,CAAC,EACA,SAAS,EACT,SAAS,6DAA6D;AAAA,EACxE,aAAa,EACX,KAAK,CAAC,OAAO,gBAAgB,IAAI,CAAC,EAClC,QAAQ,cAAc,EACtB;AAAA,IACA;AAAA,EAID;AACF,CAAC;AAMM,IAAM,gCAAgC,EAAE,OAAO;AAAA,EACrD,YAAY,EACV,OAAO,EACP,IAAI,GAAG,uCAAuC,EAC9C,SAAS,EACT,SAAS,oDAAoD;AAAA,EAC/D,kBAAkB,EAChB,QAAQ,EACR,SAAS,EACT;AAAA,IACA;AAAA,EAOD;AAAA,EACD,gBAAgB,EACd,OAAO,EACP,SAAS,EACT;AAAA,IACA,CAAC,QAAQ;AACR,UAAI,QAAQ,OAAW,QAAO;AAC9B,UAAI,QAAQ,GAAI,QAAO;AAGvB,YAAM,eAAe;AACrB,UAAI,CAAC,aAAa,KAAK,GAAG,EAAG,QAAO;AAGpC,UAAI,WAAW,KAAK,GAAG,EAAG,QAAO;AAGjC,YAAM,WAAW,IAAI,MAAM,GAAG;AAC9B,iBAAW,WAAW,UAAU;AAC/B,YAAI,WAAW,UAAU,KAAK,OAAO,GAAG;AAEvC,iBAAO;AAAA,QACR;AAAA,MACD;AAEA,aAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,SACC;AAAA,IACF;AAAA,EACD,EACC;AAAA,IACA;AAAA,EACD;AAAA,EACD,mBAAmB,EACjB,MAAM,EAAE,OAAO,EAAE,IAAI,GAAG,uCAAuC,CAAC,EAChE,SAAS,EACT,SAAS,iGAAiG;AAAA,EAC5G,wBAAwB,EACtB,MAAM,EAAE,OAAO,EAAE,IAAI,GAAG,yBAAyB,CAAC,EAClD,SAAS,EACT,SAAS,4SAA8S;AAAA,EACzT,WAAW,kCAAkC,SAAS,6CAA6C;AAAA,EACnG,QAAQ,EACN,OAAO,EAAE,OAAO,GAAG,mBAAmB,EACtC,SAAS,EACT,SAAS,EACT;AAAA,IACA;AAAA,EAQD;AAAA,EACD,MAAM,EACJ,OAAO;AAAA,IACP,OAAO,EAAE,KAAK,CAAC,UAAU,QAAQ,OAAO,CAAC,EAAE,SAAS;AAAA,EACrD,CAAC,EACA,SAAS,EACT,SAAS,iCAAiC;AAAA,EAC5C,SAAS,EACP,OAAO;AAAA,IACP,OAAO,EAAE,KAAK,CAAC,UAAU,QAAQ,OAAO,CAAC,EAAE,SAAS;AAAA,EACrD,CAAC,EACA,SAAS,EACT,SAAS,0CAA0C;AAAA,EACrD,cAAc,qCAAqC,SAAS,mCAAmC;AAAA,EAC/F,mBAAmB,gCAAgC,SAAS,kCAAkC;AAAA,EAC9F,iBAAiB,EACf,OAAO;AAAA,IACP,UAAU,EAAE,KAAK,CAAC,UAAU,QAAQ,CAAC,EAAE,SAAS,EAAE,SAAS,yCAAyC;AAAA,IACpG,QAAQ,EACN,OAAO;AAAA,MACP,QAAQ,EACN,OAAO,EACP,IAAI,GAAG,6BAA6B,EACpC,SAAS,8CAA8C;AAAA,IAC1D,CAAC,EACA,SAAS;AAAA,IACX,QAAQ,EACN,OAAO;AAAA,MACP,QAAQ,EACN,OAAO,EACP,IAAI,GAAG,yBAAyB,EAChC,SAAS,8CAA8C;AAAA,MACzD,cAAc,EACZ,OAAO,EACP,SAAS,EACT,SAAS,0CAA0C;AAAA,MACrD,UAAU,EACR,OAAO,EACP,SAAS,EACT,SAAS,8GAA8G;AAAA,IAC1H,CAAC,EACA,SAAS;AAAA,EACZ,CAAC,EACA,SAAS,EACT,SAAS,gCAAgC;AAAA,EAC3C,eAAe,EACb,OAAO;AAAA,IACP,MAAM,EAAE,KAAK,CAAC,SAAS,aAAa,iBAAiB,CAAC,EAAE,SAAS;AAAA,IACjE,QAAQ,EAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,CAAC,EACA,SAAS,EACT,SAAS,iJAAiJ;AAAA,EAC5J,KAAK,EACH,OAAO;AAAA,IACP,MAAM,EACJ,KAAK,CAAC,UAAU,UAAU,YAAY,WAAW,YAAY,YAAY,aAAa,CAAC,EACvF,SAAS,EACT;AAAA,MACA;AAAA,IAGD;AAAA,EACF,CAAC,EACA,SAAS,EACT;AAAA,IACA;AAAA,EAGD;AAAA,EACD,QAAQ,EACN,OAAO;AAAA,IACP,UAAU,EACR,QAAQ,EACR,SAAS,EACT,SAAS,oEAAoE;AAAA,IAC/E,QAAQ,EACN,QAAQ,EACR,SAAS,EACT;AAAA,MACA;AAAA,IAED;AAAA,EACF,CAAC,EACA,SAAS,EACT,SAAS,6DAA6D;AAAA,EACxE,aAAa,EACX,KAAK,CAAC,OAAO,gBAAgB,IAAI,CAAC,EAClC,SAAS,EACT;AAAA,IACA;AAAA,EAID;AACF,CAAC;AAuDM,IAAM,kBAAN,MAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU5B,MAAM,aACL,aACA,cACyB;AACzB,UAAM,OAAO,KAAK,eAAe,WAAW;AAG5C,UAAM,iBAAiB,MAAM,KAAK,uBAAuB;AACzD,UAAM,qBAAqB,KAAK,sBAAsB;AACtD,WAAO,MAAM,kCAA2B,kBAAkB,KAAK,KAAK,UAAU,gBAAgB,MAAM,CAAC,CAAC;AAGtG,UAAM,eAAe,MAAM,KAAK,iBAAiB,MAAM,eAAe;AACtE,UAAM,mBAAmB,KAAK,KAAK,MAAM,UAAU,eAAe;AAClE,WAAO,MAAM,gCAAyB,gBAAgB,KAAK,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AAGhG,UAAM,gBAAgB,MAAM,KAAK,iBAAiB,MAAM,qBAAqB;AAC7E,UAAM,oBAAoB,KAAK,KAAK,MAAM,UAAU,qBAAqB;AACzE,WAAO,MAAM,iCAA0B,iBAAiB,KAAK,KAAK,UAAU,eAAe,MAAM,CAAC,CAAC;AAGnG,QAAI,SAAS,KAAK,cAAc,KAAK,cAAc,gBAAgB,YAAY,GAAG,aAAa;AAC/F,WAAO,MAAM,2DAAoD,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAEhG,QAAI,gBAAgB,OAAO,KAAK,YAAY,EAAE,SAAS,GAAG;AACzD,aAAO,MAAM,wCAA8B,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AAChF,eAAS,KAAK,cAAc,QAAQ,YAAY;AAChD,aAAO,MAAM,2CAAoC,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,IACjF;AAGA,QAAI;AACH,YAAM,gBAAgB,oBAAoB,MAAM,MAAM;AAGtD,WAAK,sBAAsB,aAAa;AAExC,aAAO;AAAA,IACR,SAAS,OAAO;AAEf,UAAI,iBAAiB,EAAE,UAAU;AAChC,cAAM,WAAW,KAAK,mBAAmB,OAAO,mBAAmB;AAEnE,YAAI,gBAAgB,OAAO,KAAK,YAAY,EAAE,SAAS,GAAG;AACzD,gBAAM,IAAI,MAAM,GAAG,SAAS,OAAO;AAAA;AAAA,8DAAmE;AAAA,QACvG;AACA,cAAM;AAAA,MACP;AACA,YAAM;AAAA,IACP;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB,UAA+B;AAC5D,WAAO,MAAM,yCAAkC,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAAA,EACjF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,iBACb,aACA,UACyD;AACzD,UAAM,eAAe,KAAK,KAAK,aAAa,UAAU,QAAQ;AAE9D,QAAI;AACH,YAAM,UAAU,MAAM,SAAS,cAAc,OAAO;AACpD,UAAI;AAEJ,UAAI;AACH,iBAAS,KAAK,MAAM,OAAO;AAAA,MAC5B,SAAS,OAAO;AACf,cAAM,IAAI;AAAA,UACT,oCAAoC,YAAY,KAAK,iBAAiB,QAAQ,MAAM,UAAU,cAAc;AAAA,QAC7G;AAAA,MACD;AAKA,UAAI,OAAO,WAAW,YAAY,WAAW,QAAQ,MAAM,QAAQ,MAAM,GAAG;AAC3E,cAAM,IAAI;AAAA,UACT,iCAAiC,QAAQ;AAAA,sCAA0C,OAAO,MAAM;AAAA,QACjG;AAAA,MACD;AACA,aAAO;AAAA,IACR,SAAS,OAAO;AAEf,UAAK,MAA4B,SAAS,UAAU;AACnD,eAAO,MAAM,6BAA6B,YAAY,kBAAkB;AACxE,eAAO,CAAC;AAAA,MACT;AAGA,YAAM;AAAA,IACP;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,cACP,MACA,UACgB;AAIhB,WAAO,UAAU,MAAiC,UAAqC;AAAA;AAAA,MAEtF,YAAY,CAAC,mBAAmB,gBAAgB;AAAA,IACjD,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAAmB,cAA6B;AAC1E,UAAM,gBAAgB,MAAM,OAAO,IAAI,WAAS;AAC/C,YAAMA,QAAO,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,KAAK,GAAG,IAAI;AAC5D,aAAO,OAAOA,KAAI,KAAK,MAAM,OAAO;AAAA,IACrC,CAAC;AAED,WAAO,IAAI;AAAA,MACV,iCAAiC,YAAY;AAAA,EAAM,cAAc,KAAK,IAAI,CAAC;AAAA,IAC5E;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,iBAAiB,UAA+B;AACvD,QAAI;AACH,0BAAoB,MAAM,QAAQ;AAAA,IACnC,SAAS,OAAO;AACf,UAAI,iBAAiB,EAAE,UAAU;AAChC,cAAM,KAAK,mBAAmB,OAAO,cAAc;AAAA,MACpD;AACA,YAAM;AAAA,IACP;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,aAA8B;AACpD,WAAO,eAAe,QAAQ,IAAI;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAA6B;AACpC,WAAO,KAAK,KAAK,GAAG,QAAQ,GAAG,WAAW,UAAU;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKQ,wBAAgC;AACvC,WAAO,KAAK,KAAK,KAAK,mBAAmB,GAAG,eAAe;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,yBAAiF;AAC9F,UAAM,eAAe,KAAK,sBAAsB;AAEhD,QAAI;AACH,YAAM,UAAU,MAAM,SAAS,cAAc,OAAO;AACpD,UAAI;AAEJ,UAAI;AACH,iBAAS,KAAK,MAAM,OAAO;AAAA,MAC5B,SAAS,OAAO;AACf,eAAO;AAAA,UACN,2CAA2C,YAAY,KAAK,iBAAiB,QAAQ,MAAM,UAAU,cAAc;AAAA,QACpH;AACA,eAAO,CAAC;AAAA,MACT;AAGA,UAAI;AACH,cAAM,YAAY,8BAA8B,OAAO,EAAE,MAAM,MAAM;AACrE,eAAO;AAAA,MACR,SAAS,OAAO;AACf,YAAI,iBAAiB,EAAE,UAAU;AAChC,gBAAM,WAAW,KAAK,mBAAmB,OAAO,iBAAiB;AACjE,iBAAO,KAAK,GAAG,SAAS,OAAO,6BAA6B;AAAA,QAC7D,OAAO;AACN,iBAAO,KAAK,wCAAwC,iBAAiB,QAAQ,MAAM,UAAU,eAAe,6BAA6B;AAAA,QAC1I;AACA,eAAO,CAAC;AAAA,MACT;AAAA,IACD,SAAS,OAAO;AAEf,UAAK,MAA4B,SAAS,UAAU;AACnD,eAAO,MAAM,oCAAoC,YAAY,EAAE;AAC/D,eAAO,CAAC;AAAA,MACT;AAGA,aAAO,KAAK,yCAAyC,YAAY,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe,6BAA6B;AAC3J,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,qBAAqB,aAAyC;AACnE,UAAM,WAAW,MAAM,KAAK,aAAa,WAAW;AACpD,UAAM,aAAa,SAAS,cAAc;AAM1C,QAAI;AACJ,QAAI,SAAS,mBAAmB;AAE/B,0BAAoB,SAAS,kBAAkB,SAAS,UAAU,IAC/D,SAAS,oBACT,CAAC,YAAY,GAAG,SAAS,iBAAiB;AAAA,IAC9C,OAAO;AAEN,0BAAoB,CAAC,YAAY,QAAQ,UAAU,SAAS;AAAA,IAC7D;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,aAAa,UAAuD;AA73BrE;AA83BE,aAAO,0CAAU,SAAV,mBAAgB,UAAS,wBAAwB,MAAM,CAAC,CAAC,EAAE;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,UAAuD;AAx4BxE;AAy4BE,aAAO,0CAAU,YAAV,mBAAmB,UAAS,sBAAsB,MAAM,CAAC,CAAC,EAAE;AAAA,EACpE;AACD;","names":["path"]}
|
package/dist/chunk-ZX3GTM7O.js
DELETED
|
@@ -1,119 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import {
|
|
3
|
-
logger
|
|
4
|
-
} from "./chunk-VT4PDUYT.js";
|
|
5
|
-
|
|
6
|
-
// src/utils/prompt.ts
|
|
7
|
-
import * as readline from "readline";
|
|
8
|
-
async function promptConfirmation(message, defaultValue = false) {
|
|
9
|
-
const suffix = defaultValue ? "[Y/n]" : "[y/N]";
|
|
10
|
-
const fullMessage = `${message} ${suffix}: `;
|
|
11
|
-
while (true) {
|
|
12
|
-
const rl = readline.createInterface({
|
|
13
|
-
input: process.stdin,
|
|
14
|
-
output: process.stdout
|
|
15
|
-
});
|
|
16
|
-
const answer = await new Promise((resolve) => {
|
|
17
|
-
rl.question(fullMessage, (ans) => {
|
|
18
|
-
rl.close();
|
|
19
|
-
resolve(ans);
|
|
20
|
-
});
|
|
21
|
-
});
|
|
22
|
-
const normalized = answer.trim().toLowerCase();
|
|
23
|
-
if (normalized === "") {
|
|
24
|
-
return defaultValue;
|
|
25
|
-
}
|
|
26
|
-
if (normalized === "y" || normalized === "yes") {
|
|
27
|
-
return true;
|
|
28
|
-
}
|
|
29
|
-
if (normalized === "n" || normalized === "no") {
|
|
30
|
-
return false;
|
|
31
|
-
}
|
|
32
|
-
logger.warn("Invalid input. Please enter y/yes or n/no.");
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
async function promptInput(message, defaultValue) {
|
|
36
|
-
const rl = readline.createInterface({
|
|
37
|
-
input: process.stdin,
|
|
38
|
-
output: process.stdout
|
|
39
|
-
});
|
|
40
|
-
const suffix = defaultValue ? ` [${defaultValue}]` : "";
|
|
41
|
-
const fullMessage = `${message}${suffix}: `;
|
|
42
|
-
return new Promise((resolve) => {
|
|
43
|
-
rl.question(fullMessage, (answer) => {
|
|
44
|
-
rl.close();
|
|
45
|
-
const trimmed = answer.trim();
|
|
46
|
-
if (trimmed === "" && defaultValue !== void 0) {
|
|
47
|
-
resolve(defaultValue);
|
|
48
|
-
return;
|
|
49
|
-
}
|
|
50
|
-
resolve(trimmed);
|
|
51
|
-
});
|
|
52
|
-
});
|
|
53
|
-
}
|
|
54
|
-
async function waitForKeypress(message = "Press any key to continue...") {
|
|
55
|
-
if (!process.stdin.isTTY || typeof process.stdin.setRawMode !== "function") {
|
|
56
|
-
return "";
|
|
57
|
-
}
|
|
58
|
-
process.stdout.write(message);
|
|
59
|
-
return new Promise((resolve) => {
|
|
60
|
-
process.stdin.setRawMode(true);
|
|
61
|
-
process.stdin.resume();
|
|
62
|
-
process.stdin.once("data", (chunk) => {
|
|
63
|
-
const key = chunk.toString("utf8");
|
|
64
|
-
process.stdin.setRawMode(false);
|
|
65
|
-
process.stdin.pause();
|
|
66
|
-
if (key === "") {
|
|
67
|
-
process.stdout.write("\n");
|
|
68
|
-
process.exit(130);
|
|
69
|
-
}
|
|
70
|
-
process.stdout.write("\n");
|
|
71
|
-
resolve(key);
|
|
72
|
-
});
|
|
73
|
-
});
|
|
74
|
-
}
|
|
75
|
-
function isInteractiveEnvironment() {
|
|
76
|
-
return process.stdin.isTTY === true && process.env.CI !== "true";
|
|
77
|
-
}
|
|
78
|
-
async function promptCommitAction(message) {
|
|
79
|
-
if (!isInteractiveEnvironment()) {
|
|
80
|
-
return "accept";
|
|
81
|
-
}
|
|
82
|
-
process.stdout.write("\n" + "=".repeat(60) + "\n");
|
|
83
|
-
process.stdout.write("COMMIT MESSAGE:\n");
|
|
84
|
-
process.stdout.write("=".repeat(60) + "\n");
|
|
85
|
-
process.stdout.write(message + "\n");
|
|
86
|
-
process.stdout.write("=".repeat(60) + "\n\n");
|
|
87
|
-
while (true) {
|
|
88
|
-
const rl = readline.createInterface({
|
|
89
|
-
input: process.stdin,
|
|
90
|
-
output: process.stdout
|
|
91
|
-
});
|
|
92
|
-
const answer = await new Promise((resolve) => {
|
|
93
|
-
rl.question("[A]ccept as-is, [E]dit in editor, A[b]ort? [A/e/b]: ", (ans) => {
|
|
94
|
-
rl.close();
|
|
95
|
-
resolve(ans);
|
|
96
|
-
});
|
|
97
|
-
});
|
|
98
|
-
const normalized = answer.trim().toLowerCase();
|
|
99
|
-
if (normalized === "" || normalized === "a" || normalized === "accept") {
|
|
100
|
-
return "accept";
|
|
101
|
-
}
|
|
102
|
-
if (normalized === "e" || normalized === "edit") {
|
|
103
|
-
return "edit";
|
|
104
|
-
}
|
|
105
|
-
if (normalized === "b" || normalized === "abort") {
|
|
106
|
-
return "abort";
|
|
107
|
-
}
|
|
108
|
-
logger.warn("Invalid input. Please enter A (accept), E (edit), or B (abort).");
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
export {
|
|
113
|
-
promptConfirmation,
|
|
114
|
-
promptInput,
|
|
115
|
-
waitForKeypress,
|
|
116
|
-
isInteractiveEnvironment,
|
|
117
|
-
promptCommitAction
|
|
118
|
-
};
|
|
119
|
-
//# sourceMappingURL=chunk-ZX3GTM7O.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/utils/prompt.ts"],"sourcesContent":["import * as readline from 'node:readline'\nimport { logger } from './logger.js'\n\n/**\n * Prompt user for confirmation (yes/no)\n * @param message The question to ask the user\n * @param defaultValue Default value if user just presses enter (default: false)\n * @returns Promise<boolean> - true if user confirms, false otherwise\n */\nexport async function promptConfirmation(\n\tmessage: string,\n\tdefaultValue = false\n): Promise<boolean> {\n\tconst suffix = defaultValue ? '[Y/n]' : '[y/N]'\n\tconst fullMessage = `${message} ${suffix}: `\n\n\t// Loop until valid input is received\n\twhile (true) {\n\t\tconst rl = readline.createInterface({\n\t\t\tinput: process.stdin,\n\t\t\toutput: process.stdout,\n\t\t})\n\n\t\tconst answer = await new Promise<string>((resolve) => {\n\t\t\trl.question(fullMessage, (ans) => {\n\t\t\t\trl.close()\n\t\t\t\tresolve(ans)\n\t\t\t})\n\t\t})\n\n\t\tconst normalized = answer.trim().toLowerCase()\n\n\t\tif (normalized === '') {\n\t\t\treturn defaultValue\n\t\t}\n\n\t\tif (normalized === 'y' || normalized === 'yes') {\n\t\t\treturn true\n\t\t}\n\n\t\tif (normalized === 'n' || normalized === 'no') {\n\t\t\treturn false\n\t\t}\n\n\t\t// Invalid input - show warning and re-prompt\n\t\tlogger.warn('Invalid input. Please enter y/yes or n/no.')\n\t}\n}\n\n/**\n * Prompt user for text input\n * @param message The prompt message\n * @param defaultValue Optional default value\n * @returns Promise<string> - the user's input\n */\nexport async function promptInput(\n\tmessage: string,\n\tdefaultValue?: string\n): Promise<string> {\n\tconst rl = readline.createInterface({\n\t\tinput: process.stdin,\n\t\toutput: process.stdout,\n\t})\n\n\tconst suffix = defaultValue ? ` [${defaultValue}]` : ''\n\tconst fullMessage = `${message}${suffix}: `\n\n\treturn new Promise((resolve) => {\n\t\trl.question(fullMessage, (answer) => {\n\t\t\trl.close()\n\n\t\t\tconst trimmed = answer.trim()\n\n\t\t\tif (trimmed === '' && defaultValue !== undefined) {\n\t\t\t\tresolve(defaultValue)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tresolve(trimmed)\n\t\t})\n\t})\n}\n\n/**\n * Wait for the user to press any key\n * @param message Optional message to display (default: \"Press any key to continue...\")\n * @returns Promise<string> - resolves with the key that was pressed, or empty string in non-interactive environments\n */\nexport async function waitForKeypress(\n\tmessage = 'Press any key to continue...'\n): Promise<string> {\n\t// Check if we can use raw mode (only available in TTY)\n\tif (!process.stdin.isTTY || typeof process.stdin.setRawMode !== 'function') {\n\t\t// Non-interactive environment - skip keypress wait\n\t\treturn ''\n\t}\n\n\t// Display message first\n\tprocess.stdout.write(message)\n\n\treturn new Promise((resolve) => {\n\t\t// Enable raw mode to capture single keypresses\n\t\tprocess.stdin.setRawMode(true)\n\t\tprocess.stdin.resume()\n\n\t\t// Listen for single data event\n\t\tprocess.stdin.once('data', (chunk: Buffer) => {\n\t\t\tconst key = chunk.toString('utf8')\n\n\t\t\t// Restore normal mode first (cleanup before any exit)\n\t\t\tprocess.stdin.setRawMode(false)\n\t\t\tprocess.stdin.pause()\n\n\t\t\t// Handle Ctrl+C (ETX character \\x03)\n\t\t\tif (key === '\\x03') {\n\t\t\t\tprocess.stdout.write('\\n')\n\t\t\t\tprocess.exit(130) // Standard exit code for SIGINT (128 + 2)\n\t\t\t}\n\n\t\t\t// Add newline after keypress for clean output\n\t\t\tprocess.stdout.write('\\n')\n\t\t\tresolve(key)\n\t\t})\n\t})\n}\n\n/**\n * Check if running in an interactive environment\n * Returns false if CI environment or no TTY\n */\nexport function isInteractiveEnvironment(): boolean {\n\treturn process.stdin.isTTY === true && process.env.CI !== 'true'\n}\n\n// Commit action type for type safety\nexport type CommitAction = 'accept' | 'edit' | 'abort'\n\n/**\n * Display commit message and prompt for action\n * @param message The commit message to display\n * @returns Promise<CommitAction> - 'accept', 'edit', or 'abort'\n */\nexport async function promptCommitAction(message: string): Promise<CommitAction> {\n\t// Check for non-interactive environment first\n\tif (!isInteractiveEnvironment()) {\n\t\treturn 'accept'\n\t}\n\n\t// Display the commit message with clear demarcation\n\tprocess.stdout.write('\\n' + '='.repeat(60) + '\\n')\n\tprocess.stdout.write('COMMIT MESSAGE:\\n')\n\tprocess.stdout.write('='.repeat(60) + '\\n')\n\tprocess.stdout.write(message + '\\n')\n\tprocess.stdout.write('='.repeat(60) + '\\n\\n')\n\n\t// Loop until valid input is received\n\twhile (true) {\n\t\tconst rl = readline.createInterface({\n\t\t\tinput: process.stdin,\n\t\t\toutput: process.stdout,\n\t\t})\n\n\t\tconst answer = await new Promise<string>((resolve) => {\n\t\t\trl.question('[A]ccept as-is, [E]dit in editor, A[b]ort? [A/e/b]: ', (ans) => {\n\t\t\t\trl.close()\n\t\t\t\tresolve(ans)\n\t\t\t})\n\t\t})\n\n\t\tconst normalized = answer.trim().toLowerCase()\n\n\t\tif (normalized === '' || normalized === 'a' || normalized === 'accept') {\n\t\t\treturn 'accept'\n\t\t}\n\n\t\tif (normalized === 'e' || normalized === 'edit') {\n\t\t\treturn 'edit'\n\t\t}\n\n\t\tif (normalized === 'b' || normalized === 'abort') {\n\t\t\treturn 'abort'\n\t\t}\n\n\t\t// Invalid input - show warning and re-prompt\n\t\tlogger.warn('Invalid input. Please enter A (accept), E (edit), or B (abort).')\n\t}\n}\n"],"mappings":";;;;;;AAAA,YAAY,cAAc;AAS1B,eAAsB,mBACrB,SACA,eAAe,OACI;AACnB,QAAM,SAAS,eAAe,UAAU;AACxC,QAAM,cAAc,GAAG,OAAO,IAAI,MAAM;AAGxC,SAAO,MAAM;AACZ,UAAM,KAAc,yBAAgB;AAAA,MACnC,OAAO,QAAQ;AAAA,MACf,QAAQ,QAAQ;AAAA,IACjB,CAAC;AAED,UAAM,SAAS,MAAM,IAAI,QAAgB,CAAC,YAAY;AACrD,SAAG,SAAS,aAAa,CAAC,QAAQ;AACjC,WAAG,MAAM;AACT,gBAAQ,GAAG;AAAA,MACZ,CAAC;AAAA,IACF,CAAC;AAED,UAAM,aAAa,OAAO,KAAK,EAAE,YAAY;AAE7C,QAAI,eAAe,IAAI;AACtB,aAAO;AAAA,IACR;AAEA,QAAI,eAAe,OAAO,eAAe,OAAO;AAC/C,aAAO;AAAA,IACR;AAEA,QAAI,eAAe,OAAO,eAAe,MAAM;AAC9C,aAAO;AAAA,IACR;AAGA,WAAO,KAAK,4CAA4C;AAAA,EACzD;AACD;AAQA,eAAsB,YACrB,SACA,cACkB;AAClB,QAAM,KAAc,yBAAgB;AAAA,IACnC,OAAO,QAAQ;AAAA,IACf,QAAQ,QAAQ;AAAA,EACjB,CAAC;AAED,QAAM,SAAS,eAAe,KAAK,YAAY,MAAM;AACrD,QAAM,cAAc,GAAG,OAAO,GAAG,MAAM;AAEvC,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC/B,OAAG,SAAS,aAAa,CAAC,WAAW;AACpC,SAAG,MAAM;AAET,YAAM,UAAU,OAAO,KAAK;AAE5B,UAAI,YAAY,MAAM,iBAAiB,QAAW;AACjD,gBAAQ,YAAY;AACpB;AAAA,MACD;AAEA,cAAQ,OAAO;AAAA,IAChB,CAAC;AAAA,EACF,CAAC;AACF;AAOA,eAAsB,gBACrB,UAAU,gCACQ;AAElB,MAAI,CAAC,QAAQ,MAAM,SAAS,OAAO,QAAQ,MAAM,eAAe,YAAY;AAE3E,WAAO;AAAA,EACR;AAGA,UAAQ,OAAO,MAAM,OAAO;AAE5B,SAAO,IAAI,QAAQ,CAAC,YAAY;AAE/B,YAAQ,MAAM,WAAW,IAAI;AAC7B,YAAQ,MAAM,OAAO;AAGrB,YAAQ,MAAM,KAAK,QAAQ,CAAC,UAAkB;AAC7C,YAAM,MAAM,MAAM,SAAS,MAAM;AAGjC,cAAQ,MAAM,WAAW,KAAK;AAC9B,cAAQ,MAAM,MAAM;AAGpB,UAAI,QAAQ,KAAQ;AACnB,gBAAQ,OAAO,MAAM,IAAI;AACzB,gBAAQ,KAAK,GAAG;AAAA,MACjB;AAGA,cAAQ,OAAO,MAAM,IAAI;AACzB,cAAQ,GAAG;AAAA,IACZ,CAAC;AAAA,EACF,CAAC;AACF;AAMO,SAAS,2BAAoC;AACnD,SAAO,QAAQ,MAAM,UAAU,QAAQ,QAAQ,IAAI,OAAO;AAC3D;AAUA,eAAsB,mBAAmB,SAAwC;AAEhF,MAAI,CAAC,yBAAyB,GAAG;AAChC,WAAO;AAAA,EACR;AAGA,UAAQ,OAAO,MAAM,OAAO,IAAI,OAAO,EAAE,IAAI,IAAI;AACjD,UAAQ,OAAO,MAAM,mBAAmB;AACxC,UAAQ,OAAO,MAAM,IAAI,OAAO,EAAE,IAAI,IAAI;AAC1C,UAAQ,OAAO,MAAM,UAAU,IAAI;AACnC,UAAQ,OAAO,MAAM,IAAI,OAAO,EAAE,IAAI,MAAM;AAG5C,SAAO,MAAM;AACZ,UAAM,KAAc,yBAAgB;AAAA,MACnC,OAAO,QAAQ;AAAA,MACf,QAAQ,QAAQ;AAAA,IACjB,CAAC;AAED,UAAM,SAAS,MAAM,IAAI,QAAgB,CAAC,YAAY;AACrD,SAAG,SAAS,wDAAwD,CAAC,QAAQ;AAC5E,WAAG,MAAM;AACT,gBAAQ,GAAG;AAAA,MACZ,CAAC;AAAA,IACF,CAAC;AAED,UAAM,aAAa,OAAO,KAAK,EAAE,YAAY;AAE7C,QAAI,eAAe,MAAM,eAAe,OAAO,eAAe,UAAU;AACvE,aAAO;AAAA,IACR;AAEA,QAAI,eAAe,OAAO,eAAe,QAAQ;AAChD,aAAO;AAAA,IACR;AAEA,QAAI,eAAe,OAAO,eAAe,SAAS;AACjD,aAAO;AAAA,IACR;AAGA,WAAO,KAAK,iEAAiE;AAAA,EAC9E;AACD;","names":[]}
|