panopticon-cli 0.4.32 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +96 -210
- package/dist/{agents-BDFHF4T3.js → agents-E43Y3HNU.js} +10 -7
- package/dist/chunk-7SN4L4PH.js +150 -0
- package/dist/chunk-7SN4L4PH.js.map +1 -0
- package/dist/{chunk-2NIAOCIC.js → chunk-AAFQANKW.js} +358 -97
- package/dist/chunk-AAFQANKW.js.map +1 -0
- package/dist/chunk-AQXETQHW.js +113 -0
- package/dist/chunk-AQXETQHW.js.map +1 -0
- package/dist/chunk-B3PF6JPQ.js +212 -0
- package/dist/chunk-B3PF6JPQ.js.map +1 -0
- package/dist/chunk-CFCUOV3Q.js +669 -0
- package/dist/chunk-CFCUOV3Q.js.map +1 -0
- package/dist/chunk-CWELWPWQ.js +32 -0
- package/dist/chunk-CWELWPWQ.js.map +1 -0
- package/dist/chunk-DI7ABPNQ.js +352 -0
- package/dist/chunk-DI7ABPNQ.js.map +1 -0
- package/dist/{chunk-VU4FLXV5.js → chunk-FQ66DECN.js} +31 -4
- package/dist/chunk-FQ66DECN.js.map +1 -0
- package/dist/{chunk-VIWUCJ4V.js → chunk-FTCPTHIJ.js} +57 -432
- package/dist/chunk-FTCPTHIJ.js.map +1 -0
- package/dist/{review-status-GWQYY77L.js → chunk-GFP3PIPB.js} +14 -7
- package/dist/chunk-GFP3PIPB.js.map +1 -0
- package/dist/chunk-GR6ZZMCX.js +816 -0
- package/dist/chunk-GR6ZZMCX.js.map +1 -0
- package/dist/chunk-HJSM6E6U.js +1038 -0
- package/dist/chunk-HJSM6E6U.js.map +1 -0
- package/dist/{chunk-XP2DXWYP.js → chunk-HZT2AOPN.js} +164 -39
- package/dist/chunk-HZT2AOPN.js.map +1 -0
- package/dist/chunk-JQBV3Q2W.js +29 -0
- package/dist/chunk-JQBV3Q2W.js.map +1 -0
- package/dist/{chunk-BWGFN44T.js → chunk-JT4O4YVM.js} +28 -16
- package/dist/chunk-JT4O4YVM.js.map +1 -0
- package/dist/chunk-NTO3EDB3.js +600 -0
- package/dist/chunk-NTO3EDB3.js.map +1 -0
- package/dist/{chunk-JY7R7V4G.js → chunk-OMNXYPXC.js} +2 -2
- package/dist/chunk-OMNXYPXC.js.map +1 -0
- package/dist/chunk-PELXV435.js +215 -0
- package/dist/chunk-PELXV435.js.map +1 -0
- package/dist/chunk-PPRFKTVC.js +154 -0
- package/dist/chunk-PPRFKTVC.js.map +1 -0
- package/dist/chunk-WQG2TYCB.js +677 -0
- package/dist/chunk-WQG2TYCB.js.map +1 -0
- package/dist/{chunk-HCTJFIJJ.js → chunk-YLPSQAM2.js} +2 -2
- package/dist/{chunk-HCTJFIJJ.js.map → chunk-YLPSQAM2.js.map} +1 -1
- package/dist/{chunk-6HXKTOD7.js → chunk-ZTFNYOC7.js} +53 -38
- package/dist/chunk-ZTFNYOC7.js.map +1 -0
- package/dist/cli/index.js +5103 -3165
- package/dist/cli/index.js.map +1 -1
- package/dist/{config-BOAMSKTF.js → config-4CJNUE3O.js} +7 -3
- package/dist/dashboard/prompts/merge-agent.md +217 -0
- package/dist/dashboard/prompts/review-agent.md +409 -0
- package/dist/dashboard/prompts/sync-main.md +84 -0
- package/dist/dashboard/prompts/test-agent.md +283 -0
- package/dist/dashboard/prompts/work-agent.md +249 -0
- package/dist/dashboard/public/assets/index-BxpjweAL.css +32 -0
- package/dist/dashboard/public/assets/index-DQHkwvvJ.js +743 -0
- package/dist/dashboard/public/index.html +2 -2
- package/dist/dashboard/server.js +17619 -4044
- package/dist/{dns-L3L2BB27.js → dns-7BDJSD3E.js} +4 -2
- package/dist/{feedback-writer-AAKF5BTK.js → feedback-writer-LVZ5TFYZ.js} +8 -4
- package/dist/feedback-writer-LVZ5TFYZ.js.map +1 -0
- package/dist/hume-WMAUBBV2.js +13 -0
- package/dist/index.d.ts +162 -40
- package/dist/index.js +67 -23
- package/dist/index.js.map +1 -1
- package/dist/{projects-VXRUCMLM.js → projects-JEIVIYC6.js} +3 -3
- package/dist/rally-RKFSWC7E.js +10 -0
- package/dist/{remote-agents-Z3R2A5BN.js → remote-agents-TFSMW7GN.js} +2 -2
- package/dist/{remote-workspace-2G6V2KNP.js → remote-workspace-AHVHQEES.js} +8 -8
- package/dist/review-status-EPFG4XM7.js +19 -0
- package/dist/shadow-state-5MDP6YXH.js +30 -0
- package/dist/shadow-state-5MDP6YXH.js.map +1 -0
- package/dist/{specialist-context-N32QBNNQ.js → specialist-context-ZC6A4M3I.js} +8 -7
- package/dist/{specialist-context-N32QBNNQ.js.map → specialist-context-ZC6A4M3I.js.map} +1 -1
- package/dist/{specialist-logs-GF3YV4KL.js → specialist-logs-KLGJCEUL.js} +7 -6
- package/dist/specialist-logs-KLGJCEUL.js.map +1 -0
- package/dist/{specialists-JBIW6MP4.js → specialists-O4HWDJL5.js} +7 -6
- package/dist/specialists-O4HWDJL5.js.map +1 -0
- package/dist/tldr-daemon-T3THOUGT.js +21 -0
- package/dist/tldr-daemon-T3THOUGT.js.map +1 -0
- package/dist/traefik-QN7R5I6V.js +19 -0
- package/dist/traefik-QN7R5I6V.js.map +1 -0
- package/dist/tunnel-W2GZBLEV.js +13 -0
- package/dist/tunnel-W2GZBLEV.js.map +1 -0
- package/dist/workspace-manager-IE4JL2JP.js +22 -0
- package/dist/workspace-manager-IE4JL2JP.js.map +1 -0
- package/package.json +2 -2
- package/scripts/heartbeat-hook +37 -10
- package/scripts/patches/llm-tldr-tsx-support.py +109 -0
- package/scripts/pre-tool-hook +26 -15
- package/scripts/record-cost-event.js +177 -43
- package/scripts/record-cost-event.ts +87 -3
- package/scripts/statusline.sh +169 -0
- package/scripts/stop-hook +21 -11
- package/scripts/tldr-post-edit +72 -0
- package/scripts/tldr-read-enforcer +275 -0
- package/scripts/work-agent-stop-hook +137 -0
- package/skills/check-merged/SKILL.md +143 -0
- package/skills/crash-investigation/SKILL.md +301 -0
- package/skills/github-cli/SKILL.md +185 -0
- package/skills/myn-standards/SKILL.md +351 -0
- package/skills/pan-reopen/SKILL.md +65 -0
- package/skills/pan-sync-main/SKILL.md +87 -0
- package/skills/pan-tldr/SKILL.md +149 -0
- package/skills/react-best-practices/SKILL.md +125 -0
- package/skills/spec-readiness/REPORT-TEMPLATE.md +158 -0
- package/skills/spec-readiness/SCORING-REFERENCE.md +369 -0
- package/skills/spec-readiness/SKILL.md +400 -0
- package/skills/spec-readiness-setup/SKILL.md +361 -0
- package/skills/workspace-status/SKILL.md +56 -0
- package/skills/write-spec/SKILL.md +138 -0
- package/templates/traefik/dynamic/panopticon.yml.template +0 -5
- package/templates/traefik/traefik.yml +0 -8
- package/dist/chunk-2NIAOCIC.js.map +0 -1
- package/dist/chunk-3XAB4IXF.js +0 -51
- package/dist/chunk-3XAB4IXF.js.map +0 -1
- package/dist/chunk-6HXKTOD7.js.map +0 -1
- package/dist/chunk-BBCUK6N2.js +0 -241
- package/dist/chunk-BBCUK6N2.js.map +0 -1
- package/dist/chunk-BWGFN44T.js.map +0 -1
- package/dist/chunk-ELK6Q7QI.js +0 -545
- package/dist/chunk-ELK6Q7QI.js.map +0 -1
- package/dist/chunk-JY7R7V4G.js.map +0 -1
- package/dist/chunk-LYSBSZYV.js +0 -1523
- package/dist/chunk-LYSBSZYV.js.map +0 -1
- package/dist/chunk-VIWUCJ4V.js.map +0 -1
- package/dist/chunk-VU4FLXV5.js.map +0 -1
- package/dist/chunk-XP2DXWYP.js.map +0 -1
- package/dist/dashboard/public/assets/index-C7X6LP5Z.css +0 -32
- package/dist/dashboard/public/assets/index-ClYqpcAJ.js +0 -645
- package/dist/feedback-writer-AAKF5BTK.js.map +0 -1
- package/dist/review-status-GWQYY77L.js.map +0 -1
- package/dist/traefik-CUJM6K5Z.js +0 -12
- /package/dist/{agents-BDFHF4T3.js.map → agents-E43Y3HNU.js.map} +0 -0
- /package/dist/{config-BOAMSKTF.js.map → config-4CJNUE3O.js.map} +0 -0
- /package/dist/{dns-L3L2BB27.js.map → dns-7BDJSD3E.js.map} +0 -0
- /package/dist/{projects-VXRUCMLM.js.map → hume-WMAUBBV2.js.map} +0 -0
- /package/dist/{remote-agents-Z3R2A5BN.js.map → projects-JEIVIYC6.js.map} +0 -0
- /package/dist/{specialist-logs-GF3YV4KL.js.map → rally-RKFSWC7E.js.map} +0 -0
- /package/dist/{specialists-JBIW6MP4.js.map → remote-agents-TFSMW7GN.js.map} +0 -0
- /package/dist/{remote-workspace-2G6V2KNP.js.map → remote-workspace-AHVHQEES.js.map} +0 -0
- /package/dist/{traefik-CUJM6K5Z.js.map → review-status-EPFG4XM7.js.map} +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
PANOPTICON_HOME,
|
|
3
3
|
init_paths
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-ZTFNYOC7.js";
|
|
5
5
|
import {
|
|
6
6
|
__esm,
|
|
7
7
|
__export,
|
|
@@ -257,4 +257,4 @@ export {
|
|
|
257
257
|
projects_exports,
|
|
258
258
|
init_projects
|
|
259
259
|
};
|
|
260
|
-
//# sourceMappingURL=chunk-
|
|
260
|
+
//# sourceMappingURL=chunk-OMNXYPXC.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/projects.ts"],"sourcesContent":["/**\n * Project Registry - Multi-project support for Panopticon\n *\n * Maps Linear team prefixes and labels to project paths for workspace creation.\n */\n\nimport { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { parse as parseYaml, stringify as stringifyYaml } from 'yaml';\nimport { PANOPTICON_HOME } from './paths.js';\n\nexport const PROJECTS_CONFIG_FILE = join(PANOPTICON_HOME, 'projects.yaml');\n\n/**\n * Issue routing rule - routes issues with certain labels to specific paths\n */\nexport interface IssueRoutingRule {\n labels?: string[];\n default?: boolean;\n path: string;\n}\n\n/**\n * Workspace configuration (imported from workspace-config.ts for full details)\n */\nexport interface WorkspaceConfig {\n type?: 'polyrepo' | 'monorepo';\n workspaces_dir?: string;\n repos?: Array<{ name: string; path: string; branch_prefix?: string }>;\n dns?: { domain: string; entries: string[]; sync_method?: 'wsl2hosts' | 'hosts_file' | 'dnsmasq' };\n ports?: Record<string, { range: [number, number] }>;\n docker?: { traefik?: string; compose_template?: string };\n database?: { seed_file?: string; container_name?: string; [key: string]: any };\n agent?: { template_dir: string; templates?: Array<{ source: string; target: string }>; copy_dirs?: string[]; symlinks?: string[] };\n env?: { template?: string; secrets_file?: string };\n services?: Array<{ name: string; path: string; start_command: string; docker_command?: string; health_url?: string; port?: number }>;\n}\n\n/**\n * Test configuration\n */\nexport interface TestConfig {\n type: string;\n path: string;\n command: string;\n container?: boolean;\n container_name?: string;\n env?: Record<string, string>;\n}\n\n/**\n * Specialist configuration for per-project specialists\n */\nexport interface SpecialistConfig {\n /** Number of recent runs to include in context digest (default: 5) */\n context_runs?: number;\n /** Model to use for generating context digests (null = same as specialist) */\n digest_model?: string | null;\n /** Log retention policy */\n retention?: {\n /** Maximum days to keep logs */\n max_days: number;\n /** Maximum number of runs to keep (whichever is more permissive) */\n max_runs: number;\n };\n /** Per-specialist prompt overrides */\n prompts?: {\n 'review-agent'?: string;\n 'test-agent'?: string;\n 'merge-agent'?: string;\n };\n}\n\n/**\n * Project configuration\n */\nexport interface ProjectConfig {\n name: string;\n path: string;\n linear_team?: string;\n issue_routing?: IssueRoutingRule[];\n /** Workspace configuration */\n workspace?: WorkspaceConfig;\n /** Test configuration by name */\n tests?: Record<string, TestConfig>;\n /** Custom command to create workspaces (e.g., infra/new-feature for MYN) */\n workspace_command?: string;\n /** Custom command to remove workspaces */\n workspace_remove_command?: string;\n /** Rally project OID (e.g., \"/project/822404704163\") for per-project Rally scoping */\n rally_project?: string;\n /** Specialist agent configuration */\n specialists?: SpecialistConfig;\n}\n\n/**\n * Full projects configuration file\n */\nexport interface ProjectsConfig {\n projects: Record<string, ProjectConfig>;\n}\n\n/**\n * Resolved project info for workspace creation\n */\nexport interface ResolvedProject {\n projectKey: string;\n projectName: string;\n projectPath: string;\n linearTeam?: string;\n}\n\n/**\n * Load projects configuration from ~/.panopticon/projects.yaml\n */\nexport function loadProjectsConfig(): ProjectsConfig {\n if (!existsSync(PROJECTS_CONFIG_FILE)) {\n return { projects: {} };\n }\n\n try {\n const content = readFileSync(PROJECTS_CONFIG_FILE, 'utf-8');\n const config = parseYaml(content) as ProjectsConfig;\n return config || { projects: {} };\n } catch (error: any) {\n console.error(`Failed to parse projects.yaml: ${error.message}`);\n return { projects: {} };\n }\n}\n\n/**\n * Save projects configuration\n */\nexport function saveProjectsConfig(config: ProjectsConfig): void {\n const dir = PANOPTICON_HOME;\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n\n const yaml = stringifyYaml(config, { indent: 2 });\n writeFileSync(PROJECTS_CONFIG_FILE, yaml, 'utf-8');\n}\n\n/**\n * Get a list of all registered projects\n */\nexport function listProjects(): Array<{ key: string; config: ProjectConfig }> {\n const config = loadProjectsConfig();\n return Object.entries(config.projects).map(([key, projectConfig]) => ({\n key,\n config: projectConfig,\n }));\n}\n\n/**\n * Add or update a project in the registry\n */\nexport function registerProject(key: string, projectConfig: ProjectConfig): void {\n const config = loadProjectsConfig();\n config.projects[key] = projectConfig;\n saveProjectsConfig(config);\n}\n\n/**\n * Remove a project from the registry\n */\nexport function unregisterProject(key: string): boolean {\n const config = loadProjectsConfig();\n if (config.projects[key]) {\n delete config.projects[key];\n saveProjectsConfig(config);\n return true;\n }\n return false;\n}\n\n/**\n * Extract Linear team prefix from an issue ID\n * E.g., \"MIN-123\" -> \"MIN\", \"PAN-456\" -> \"PAN\"\n */\nexport function extractTeamPrefix(issueId: string): string | null {\n const match = issueId.match(/^([A-Z]+)-\\d+$/i);\n return match ? match[1].toUpperCase() : null;\n}\n\n/**\n * Find project by Linear team prefix\n */\nexport function findProjectByTeam(teamPrefix: string): ProjectConfig | null {\n const config = loadProjectsConfig();\n\n for (const [, projectConfig] of Object.entries(config.projects)) {\n if (projectConfig.linear_team?.toUpperCase() === teamPrefix.toUpperCase()) {\n return projectConfig;\n }\n }\n\n return null;\n}\n\n/**\n * Resolve the correct project path for an issue based on labels\n *\n * @param project - The project config\n * @param labels - Array of label names from the Linear issue\n * @returns The resolved path (may differ from project.path based on routing rules)\n */\nexport function resolveProjectPath(project: ProjectConfig, labels: string[] = []): string {\n if (!project.issue_routing || project.issue_routing.length === 0) {\n return project.path;\n }\n\n // Normalize labels to lowercase for comparison\n const normalizedLabels = labels.map(l => l.toLowerCase());\n\n // First, check label-based routing rules\n for (const rule of project.issue_routing) {\n if (rule.labels && rule.labels.length > 0) {\n const ruleLabels = rule.labels.map(l => l.toLowerCase());\n const hasMatch = ruleLabels.some(label => normalizedLabels.includes(label));\n if (hasMatch) {\n return rule.path;\n }\n }\n }\n\n // Then, find default rule\n for (const rule of project.issue_routing) {\n if (rule.default) {\n return rule.path;\n }\n }\n\n // Fall back to project path\n return project.path;\n}\n\n/**\n * Resolve project from an issue ID (and optional labels)\n *\n * @param issueId - Linear issue ID (e.g., \"MIN-123\")\n * @param labels - Optional array of label names\n * @returns Resolved project info or null if not found\n */\nexport function resolveProjectFromIssue(\n issueId: string,\n labels: string[] = []\n): ResolvedProject | null {\n const teamPrefix = extractTeamPrefix(issueId);\n if (!teamPrefix) {\n return null;\n }\n\n const config = loadProjectsConfig();\n\n // Find project by team prefix\n for (const [key, projectConfig] of Object.entries(config.projects)) {\n if (projectConfig.linear_team?.toUpperCase() === teamPrefix) {\n const resolvedPath = resolveProjectPath(projectConfig, labels);\n return {\n projectKey: key,\n projectName: projectConfig.name,\n projectPath: resolvedPath,\n linearTeam: projectConfig.linear_team,\n };\n }\n }\n\n return null;\n}\n\n/**\n * Get a project by key\n */\nexport function getProject(key: string): ProjectConfig | null {\n const config = loadProjectsConfig();\n return config.projects[key] || null;\n}\n\n/**\n * Check if projects.yaml exists and has any projects\n */\nexport function hasProjects(): boolean {\n const config = loadProjectsConfig();\n return Object.keys(config.projects).length > 0;\n}\n\n/**\n * Create a default projects.yaml with example structure\n */\nexport function createDefaultProjectsConfig(): ProjectsConfig {\n const defaultConfig: ProjectsConfig = {\n projects: {\n // Example project - commented out in actual file\n },\n };\n\n return defaultConfig;\n}\n\n/**\n * Initialize projects.yaml with example configuration\n */\nexport function initializeProjectsConfig(): void {\n if (existsSync(PROJECTS_CONFIG_FILE)) {\n console.log(`Projects config already exists at ${PROJECTS_CONFIG_FILE}`);\n return;\n }\n\n const exampleYaml = `# Panopticon Project Registry\n# Maps Linear teams to project paths for workspace creation\n\nprojects:\n # Example: Mind Your Now project\n # myn:\n # name: \"Mind Your Now\"\n # path: /home/user/projects/myn\n # linear_team: MIN\n # issue_routing:\n # # Route docs/marketing issues to docs repo\n # - labels: [docs, marketing, seo, landing-pages]\n # path: /home/user/projects/myn/docs\n # # Default: main repo\n # - default: true\n # path: /home/user/projects/myn\n # specialists:\n # context_runs: 5\n # digest_model: null # Use same model as specialist\n # retention:\n # max_days: 30\n # max_runs: 50\n # prompts:\n # review-agent: |\n # Pay special attention to:\n # - Database migration safety\n # - API backward compatibility\n\n # Example: Panopticon itself\n # panopticon:\n # name: \"Panopticon\"\n # path: /home/user/projects/panopticon\n # linear_team: PAN\n`;\n\n const dir = PANOPTICON_HOME;\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n\n writeFileSync(PROJECTS_CONFIG_FILE, exampleYaml, 'utf-8');\n console.log(`Created example projects config at ${PROJECTS_CONFIG_FILE}`);\n}\n\n/**\n * Default specialist configuration values\n */\nconst DEFAULT_SPECIALIST_CONFIG: Required<SpecialistConfig> = {\n context_runs: 5,\n digest_model: null,\n retention: {\n max_days: 30,\n max_runs: 50,\n },\n prompts: {},\n};\n\n/**\n * Get specialist configuration for a project with defaults\n *\n * @param projectKey - Project key\n * @returns Specialist config with defaults applied\n */\nexport function getSpecialistConfig(projectKey: string): Required<SpecialistConfig> {\n const project = getProject(projectKey);\n\n if (!project || !project.specialists) {\n return DEFAULT_SPECIALIST_CONFIG;\n }\n\n return {\n context_runs: project.specialists.context_runs ?? DEFAULT_SPECIALIST_CONFIG.context_runs,\n digest_model: project.specialists.digest_model ?? DEFAULT_SPECIALIST_CONFIG.digest_model,\n retention: {\n max_days: project.specialists.retention?.max_days ?? DEFAULT_SPECIALIST_CONFIG.retention.max_days,\n max_runs: project.specialists.retention?.max_runs ?? DEFAULT_SPECIALIST_CONFIG.retention.max_runs,\n },\n prompts: project.specialists.prompts ?? DEFAULT_SPECIALIST_CONFIG.prompts,\n };\n}\n\n/**\n * Get retention policy for a project's specialists\n *\n * @param projectKey - Project key\n * @returns Retention policy\n */\nexport function getSpecialistRetention(projectKey: string): { max_days: number; max_runs: number } {\n const config = getSpecialistConfig(projectKey);\n return config.retention;\n}\n\n/**\n * Find all projects that have a rally_project configured.\n * Returns array of { key, config } for projects with Rally project OIDs.\n */\nexport function findProjectsByRallyProject(): Array<{ key: string; config: ProjectConfig }> {\n const config = loadProjectsConfig();\n return Object.entries(config.projects)\n .filter(([, projectConfig]) => !!projectConfig.rally_project)\n .map(([key, projectConfig]) => ({ key, config: projectConfig }));\n}\n\n/**\n * Get custom prompt override for a specialist (if configured)\n *\n * @param projectKey - Project key\n * @param specialistType - Specialist type\n * @returns Custom prompt or null if not configured\n */\nexport function getSpecialistPromptOverride(\n projectKey: string,\n specialistType: 'review-agent' | 'test-agent' | 'merge-agent'\n): string | null {\n const config = getSpecialistConfig(projectKey);\n return config.prompts[specialistType] || null;\n}\n"],"mappings":";;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,SAAS,YAAY,cAAc,eAAe,iBAAiB;AACnE,SAAS,YAAY;AACrB,SAAS,SAAS,WAAW,aAAa,qBAAqB;AA2GxD,SAAS,qBAAqC;AACnD,MAAI,CAAC,WAAW,oBAAoB,GAAG;AACrC,WAAO,EAAE,UAAU,CAAC,EAAE;AAAA,EACxB;AAEA,MAAI;AACF,UAAM,UAAU,aAAa,sBAAsB,OAAO;AAC1D,UAAM,SAAS,UAAU,OAAO;AAChC,WAAO,UAAU,EAAE,UAAU,CAAC,EAAE;AAAA,EAClC,SAAS,OAAY;AACnB,YAAQ,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAC/D,WAAO,EAAE,UAAU,CAAC,EAAE;AAAA,EACxB;AACF;AAKO,SAAS,mBAAmB,QAA8B;AAC/D,QAAM,MAAM;AACZ,MAAI,CAAC,WAAW,GAAG,GAAG;AACpB,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AAEA,QAAM,OAAO,cAAc,QAAQ,EAAE,QAAQ,EAAE,CAAC;AAChD,gBAAc,sBAAsB,MAAM,OAAO;AACnD;AAKO,SAAS,eAA8D;AAC5E,QAAM,SAAS,mBAAmB;AAClC,SAAO,OAAO,QAAQ,OAAO,QAAQ,EAAE,IAAI,CAAC,CAAC,KAAK,aAAa,OAAO;AAAA,IACpE;AAAA,IACA,QAAQ;AAAA,EACV,EAAE;AACJ;AAKO,SAAS,gBAAgB,KAAa,eAAoC;AAC/E,QAAM,SAAS,mBAAmB;AAClC,SAAO,SAAS,GAAG,IAAI;AACvB,qBAAmB,MAAM;AAC3B;AAKO,SAAS,kBAAkB,KAAsB;AACtD,QAAM,SAAS,mBAAmB;AAClC,MAAI,OAAO,SAAS,GAAG,GAAG;AACxB,WAAO,OAAO,SAAS,GAAG;AAC1B,uBAAmB,MAAM;AACzB,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAMO,SAAS,kBAAkB,SAAgC;AAChE,QAAM,QAAQ,QAAQ,MAAM,iBAAiB;AAC7C,SAAO,QAAQ,MAAM,CAAC,EAAE,YAAY,IAAI;AAC1C;AAKO,SAAS,kBAAkB,YAA0C;AAC1E,QAAM,SAAS,mBAAmB;AAElC,aAAW,CAAC,EAAE,aAAa,KAAK,OAAO,QAAQ,OAAO,QAAQ,GAAG;AAC/D,QAAI,cAAc,aAAa,YAAY,MAAM,WAAW,YAAY,GAAG;AACzE,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AASO,SAAS,mBAAmB,SAAwB,SAAmB,CAAC,GAAW;AACxF,MAAI,CAAC,QAAQ,iBAAiB,QAAQ,cAAc,WAAW,GAAG;AAChE,WAAO,QAAQ;AAAA,EACjB;AAGA,QAAM,mBAAmB,OAAO,IAAI,OAAK,EAAE,YAAY,CAAC;AAGxD,aAAW,QAAQ,QAAQ,eAAe;AACxC,QAAI,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AACzC,YAAM,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE,YAAY,CAAC;AACvD,YAAM,WAAW,WAAW,KAAK,WAAS,iBAAiB,SAAS,KAAK,CAAC;AAC1E,UAAI,UAAU;AACZ,eAAO,KAAK;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAGA,aAAW,QAAQ,QAAQ,eAAe;AACxC,QAAI,KAAK,SAAS;AAChB,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAGA,SAAO,QAAQ;AACjB;AASO,SAAS,wBACd,SACA,SAAmB,CAAC,GACI;AACxB,QAAM,aAAa,kBAAkB,OAAO;AAC5C,MAAI,CAAC,YAAY;AACf,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,mBAAmB;AAGlC,aAAW,CAAC,KAAK,aAAa,KAAK,OAAO,QAAQ,OAAO,QAAQ,GAAG;AAClE,QAAI,cAAc,aAAa,YAAY,MAAM,YAAY;AAC3D,YAAM,eAAe,mBAAmB,eAAe,MAAM;AAC7D,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,aAAa,cAAc;AAAA,QAC3B,aAAa;AAAA,QACb,YAAY,cAAc;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKO,SAAS,WAAW,KAAmC;AAC5D,QAAM,SAAS,mBAAmB;AAClC,SAAO,OAAO,SAAS,GAAG,KAAK;AACjC;AAKO,SAAS,cAAuB;AACrC,QAAM,SAAS,mBAAmB;AAClC,SAAO,OAAO,KAAK,OAAO,QAAQ,EAAE,SAAS;AAC/C;AAKO,SAAS,8BAA8C;AAC5D,QAAM,gBAAgC;AAAA,IACpC,UAAU;AAAA;AAAA,IAEV;AAAA,EACF;AAEA,SAAO;AACT;AAKO,SAAS,2BAAiC;AAC/C,MAAI,WAAW,oBAAoB,GAAG;AACpC,YAAQ,IAAI,qCAAqC,oBAAoB,EAAE;AACvE;AAAA,EACF;AAEA,QAAM,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmCpB,QAAM,MAAM;AACZ,MAAI,CAAC,WAAW,GAAG,GAAG;AACpB,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AAEA,gBAAc,sBAAsB,aAAa,OAAO;AACxD,UAAQ,IAAI,sCAAsC,oBAAoB,EAAE;AAC1E;AAqBO,SAAS,oBAAoB,YAAgD;AAClF,QAAM,UAAU,WAAW,UAAU;AAErC,MAAI,CAAC,WAAW,CAAC,QAAQ,aAAa;AACpC,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,cAAc,QAAQ,YAAY,gBAAgB,0BAA0B;AAAA,IAC5E,cAAc,QAAQ,YAAY,gBAAgB,0BAA0B;AAAA,IAC5E,WAAW;AAAA,MACT,UAAU,QAAQ,YAAY,WAAW,YAAY,0BAA0B,UAAU;AAAA,MACzF,UAAU,QAAQ,YAAY,WAAW,YAAY,0BAA0B,UAAU;AAAA,IAC3F;AAAA,IACA,SAAS,QAAQ,YAAY,WAAW,0BAA0B;AAAA,EACpE;AACF;AAQO,SAAS,uBAAuB,YAA4D;AACjG,QAAM,SAAS,oBAAoB,UAAU;AAC7C,SAAO,OAAO;AAChB;AAMO,SAAS,6BAA4E;AAC1F,QAAM,SAAS,mBAAmB;AAClC,SAAO,OAAO,QAAQ,OAAO,QAAQ,EAClC,OAAO,CAAC,CAAC,EAAE,aAAa,MAAM,CAAC,CAAC,cAAc,aAAa,EAC3D,IAAI,CAAC,CAAC,KAAK,aAAa,OAAO,EAAE,KAAK,QAAQ,cAAc,EAAE;AACnE;AASO,SAAS,4BACd,YACA,gBACe;AACf,QAAM,SAAS,oBAAoB,UAAU;AAC7C,SAAO,OAAO,QAAQ,cAAc,KAAK;AAC3C;AAzaA,IAWa,sBAyVP;AApWN;AAAA;AAAA;AASA;AAEO,IAAM,uBAAuB,KAAK,iBAAiB,eAAe;AAyVzE,IAAM,4BAAwD;AAAA,MAC5D,cAAc;AAAA,MACd,cAAc;AAAA,MACd,WAAW;AAAA,QACT,UAAU;AAAA,QACV,UAAU;AAAA,MACZ;AAAA,MACA,SAAS,CAAC;AAAA,IACZ;AAAA;AAAA;","names":[]}
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import {
|
|
2
|
+
init_workspace_config,
|
|
3
|
+
replacePlaceholders
|
|
4
|
+
} from "./chunk-CWELWPWQ.js";
|
|
5
|
+
import {
|
|
6
|
+
__esm,
|
|
7
|
+
init_esm_shims
|
|
8
|
+
} from "./chunk-ZHC57RCV.js";
|
|
9
|
+
|
|
10
|
+
// src/lib/tunnel.ts
|
|
11
|
+
import { readFileSync } from "fs";
|
|
12
|
+
import { resolve } from "path";
|
|
13
|
+
import { homedir } from "os";
|
|
14
|
+
function readCloudflareCredentials(certPath) {
|
|
15
|
+
try {
|
|
16
|
+
const resolvedPath = certPath.replace(/^~/, homedir());
|
|
17
|
+
const pem = readFileSync(resolve(resolvedPath), "utf-8");
|
|
18
|
+
const b64 = pem.split("\n").filter((line) => !line.startsWith("-----")).join("");
|
|
19
|
+
const json = JSON.parse(Buffer.from(b64, "base64").toString("utf-8"));
|
|
20
|
+
return {
|
|
21
|
+
apiToken: json.apiToken,
|
|
22
|
+
accountId: json.accountID,
|
|
23
|
+
zoneId: json.zoneID
|
|
24
|
+
};
|
|
25
|
+
} catch (err) {
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
async function cfFetch(path, apiToken, method = "GET", body) {
|
|
30
|
+
const controller = new AbortController();
|
|
31
|
+
const timeout = setTimeout(() => controller.abort(), FETCH_TIMEOUT);
|
|
32
|
+
try {
|
|
33
|
+
const resp = await fetch(`${CF_API}${path}`, {
|
|
34
|
+
method,
|
|
35
|
+
headers: {
|
|
36
|
+
Authorization: `Bearer ${apiToken}`,
|
|
37
|
+
"Content-Type": "application/json"
|
|
38
|
+
},
|
|
39
|
+
body: body ? JSON.stringify(body) : void 0,
|
|
40
|
+
signal: controller.signal
|
|
41
|
+
});
|
|
42
|
+
const json = await resp.json();
|
|
43
|
+
return { ok: json.success !== false, data: json.result, errors: json.errors };
|
|
44
|
+
} catch (err) {
|
|
45
|
+
return { ok: false, data: null, errors: [{ message: err.message }] };
|
|
46
|
+
} finally {
|
|
47
|
+
clearTimeout(timeout);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
function resolveHostnames(hostnames, placeholders) {
|
|
51
|
+
return hostnames.map((h) => ({
|
|
52
|
+
hostname: replacePlaceholders(h.pattern, placeholders),
|
|
53
|
+
httpHostHeader: h.http_host_header ? replacePlaceholders(h.http_host_header, placeholders) : void 0,
|
|
54
|
+
noTlsVerify: h.no_tls_verify !== false
|
|
55
|
+
// default true
|
|
56
|
+
}));
|
|
57
|
+
}
|
|
58
|
+
async function addTunnelIngress(config, placeholders) {
|
|
59
|
+
const steps = [];
|
|
60
|
+
let allOk = true;
|
|
61
|
+
const creds = readCloudflareCredentials(config.credentials_file);
|
|
62
|
+
if (!creds) {
|
|
63
|
+
return { success: false, steps: ["[tunnel] Failed to read Cloudflare credentials from " + config.credentials_file] };
|
|
64
|
+
}
|
|
65
|
+
steps.push("[tunnel] Read Cloudflare credentials");
|
|
66
|
+
const resolved = resolveHostnames(config.hostnames, placeholders);
|
|
67
|
+
const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;
|
|
68
|
+
const current = await cfFetch(tunnelPath, creds.apiToken);
|
|
69
|
+
if (!current.ok) {
|
|
70
|
+
return { success: false, steps: [...steps, `[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`] };
|
|
71
|
+
}
|
|
72
|
+
const tunnelConfig = current.data;
|
|
73
|
+
const ingress = tunnelConfig.config?.ingress || [];
|
|
74
|
+
steps.push(`[tunnel] Current tunnel config has ${ingress.length} ingress rules`);
|
|
75
|
+
let modified = false;
|
|
76
|
+
for (const h of resolved) {
|
|
77
|
+
if (ingress.some((r) => r.hostname === h.hostname)) {
|
|
78
|
+
steps.push(`[tunnel] Ingress rule for ${h.hostname} already exists, skipping`);
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
const originRequest = {};
|
|
82
|
+
if (h.noTlsVerify) originRequest.noTLSVerify = true;
|
|
83
|
+
if (h.httpHostHeader) originRequest.httpHostHeader = h.httpHostHeader;
|
|
84
|
+
const newRule = {
|
|
85
|
+
service: config.service_target,
|
|
86
|
+
hostname: h.hostname,
|
|
87
|
+
originRequest: Object.keys(originRequest).length > 0 ? originRequest : void 0
|
|
88
|
+
};
|
|
89
|
+
const catchAllIdx = ingress.findIndex((r) => !r.hostname);
|
|
90
|
+
if (catchAllIdx >= 0) {
|
|
91
|
+
ingress.splice(catchAllIdx, 0, newRule);
|
|
92
|
+
} else {
|
|
93
|
+
ingress.push(newRule);
|
|
94
|
+
}
|
|
95
|
+
modified = true;
|
|
96
|
+
steps.push(`[tunnel] Added ingress rule for ${h.hostname}`);
|
|
97
|
+
}
|
|
98
|
+
if (modified) {
|
|
99
|
+
const putResult = await cfFetch(tunnelPath, creds.apiToken, "PUT", {
|
|
100
|
+
config: { ingress }
|
|
101
|
+
});
|
|
102
|
+
if (!putResult.ok) {
|
|
103
|
+
steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);
|
|
104
|
+
allOk = false;
|
|
105
|
+
} else {
|
|
106
|
+
steps.push("[tunnel] Updated tunnel ingress configuration");
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
for (const h of resolved) {
|
|
110
|
+
const dnsResult = await cfFetch(
|
|
111
|
+
`/zones/${creds.zoneId}/dns_records`,
|
|
112
|
+
creds.apiToken,
|
|
113
|
+
"POST",
|
|
114
|
+
{
|
|
115
|
+
type: "CNAME",
|
|
116
|
+
name: h.hostname,
|
|
117
|
+
content: `${config.tunnel_id}.cfargotunnel.com`,
|
|
118
|
+
proxied: true
|
|
119
|
+
}
|
|
120
|
+
);
|
|
121
|
+
if (!dnsResult.ok) {
|
|
122
|
+
const errMsg = dnsResult.errors?.map((e) => e.message).join(", ") || "unknown error";
|
|
123
|
+
if (errMsg.includes("already exists") || errMsg.includes("already been taken")) {
|
|
124
|
+
steps.push(`[tunnel] DNS CNAME for ${h.hostname} already exists`);
|
|
125
|
+
} else {
|
|
126
|
+
steps.push(`[tunnel] Failed to create DNS CNAME for ${h.hostname}: ${errMsg}`);
|
|
127
|
+
allOk = false;
|
|
128
|
+
}
|
|
129
|
+
} else {
|
|
130
|
+
steps.push(`[tunnel] Created DNS CNAME: ${h.hostname} \u2192 ${config.tunnel_id}.cfargotunnel.com`);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return { success: allOk, steps };
|
|
134
|
+
}
|
|
135
|
+
async function removeTunnelIngress(config, placeholders) {
|
|
136
|
+
const steps = [];
|
|
137
|
+
let allOk = true;
|
|
138
|
+
const creds = readCloudflareCredentials(config.credentials_file);
|
|
139
|
+
if (!creds) {
|
|
140
|
+
return { success: false, steps: ["[tunnel] Failed to read Cloudflare credentials from " + config.credentials_file] };
|
|
141
|
+
}
|
|
142
|
+
steps.push("[tunnel] Read Cloudflare credentials");
|
|
143
|
+
const resolved = resolveHostnames(config.hostnames, placeholders);
|
|
144
|
+
const hostnameSet = new Set(resolved.map((h) => h.hostname));
|
|
145
|
+
const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;
|
|
146
|
+
const current = await cfFetch(tunnelPath, creds.apiToken);
|
|
147
|
+
if (!current.ok) {
|
|
148
|
+
steps.push(`[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`);
|
|
149
|
+
allOk = false;
|
|
150
|
+
} else {
|
|
151
|
+
const tunnelConfig = current.data;
|
|
152
|
+
const ingress = tunnelConfig.config?.ingress || [];
|
|
153
|
+
const originalCount = ingress.length;
|
|
154
|
+
const filtered = ingress.filter((r) => !r.hostname || !hostnameSet.has(r.hostname));
|
|
155
|
+
if (filtered.length < originalCount) {
|
|
156
|
+
const putResult = await cfFetch(tunnelPath, creds.apiToken, "PUT", {
|
|
157
|
+
config: { ingress: filtered }
|
|
158
|
+
});
|
|
159
|
+
if (!putResult.ok) {
|
|
160
|
+
steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);
|
|
161
|
+
allOk = false;
|
|
162
|
+
} else {
|
|
163
|
+
steps.push(`[tunnel] Removed ${originalCount - filtered.length} ingress rule(s)`);
|
|
164
|
+
}
|
|
165
|
+
} else {
|
|
166
|
+
steps.push("[tunnel] No matching ingress rules found to remove");
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
for (const h of resolved) {
|
|
170
|
+
const listResult = await cfFetch(
|
|
171
|
+
`/zones/${creds.zoneId}/dns_records?name=${encodeURIComponent(h.hostname)}&type=CNAME`,
|
|
172
|
+
creds.apiToken
|
|
173
|
+
);
|
|
174
|
+
if (!listResult.ok) {
|
|
175
|
+
steps.push(`[tunnel] Failed to look up DNS record for ${h.hostname}: ${JSON.stringify(listResult.errors)}`);
|
|
176
|
+
allOk = false;
|
|
177
|
+
continue;
|
|
178
|
+
}
|
|
179
|
+
const records = Array.isArray(listResult.data) ? listResult.data : [];
|
|
180
|
+
if (records.length === 0) {
|
|
181
|
+
steps.push(`[tunnel] No DNS CNAME record found for ${h.hostname}`);
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
for (const record of records) {
|
|
185
|
+
const delResult = await cfFetch(
|
|
186
|
+
`/zones/${creds.zoneId}/dns_records/${record.id}`,
|
|
187
|
+
creds.apiToken,
|
|
188
|
+
"DELETE"
|
|
189
|
+
);
|
|
190
|
+
if (!delResult.ok) {
|
|
191
|
+
steps.push(`[tunnel] Failed to delete DNS record ${record.id} for ${h.hostname}: ${JSON.stringify(delResult.errors)}`);
|
|
192
|
+
allOk = false;
|
|
193
|
+
} else {
|
|
194
|
+
steps.push(`[tunnel] Deleted DNS CNAME for ${h.hostname}`);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
return { success: allOk, steps };
|
|
199
|
+
}
|
|
200
|
+
var CF_API, FETCH_TIMEOUT;
|
|
201
|
+
var init_tunnel = __esm({
|
|
202
|
+
"src/lib/tunnel.ts"() {
|
|
203
|
+
init_esm_shims();
|
|
204
|
+
init_workspace_config();
|
|
205
|
+
CF_API = "https://api.cloudflare.com/client/v4";
|
|
206
|
+
FETCH_TIMEOUT = 1e4;
|
|
207
|
+
}
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
export {
|
|
211
|
+
addTunnelIngress,
|
|
212
|
+
removeTunnelIngress,
|
|
213
|
+
init_tunnel
|
|
214
|
+
};
|
|
215
|
+
//# sourceMappingURL=chunk-PELXV435.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/tunnel.ts"],"sourcesContent":["/**\n * Cloudflare Tunnel Management\n *\n * Manages tunnel ingress rules and DNS CNAME records for workspace lifecycle.\n * Called during workspace create (addTunnelIngress) and workspace remove/deep-wipe (removeTunnelIngress).\n */\n\nimport { readFileSync } from 'fs';\nimport { resolve } from 'path';\nimport { homedir } from 'os';\nimport { TunnelConfig, TunnelHostname, TemplatePlaceholders, replacePlaceholders } from './workspace-config.js';\n\nexport interface TunnelResult {\n success: boolean;\n steps: string[];\n}\n\ninterface CloudflareCredentials {\n apiToken: string;\n accountId: string;\n zoneId: string;\n}\n\ninterface CloudflareIngressRule {\n service: string;\n hostname?: string;\n originRequest?: Record<string, unknown>;\n}\n\ninterface CloudflareTunnelConfig {\n config: {\n ingress: CloudflareIngressRule[];\n };\n}\n\nconst CF_API = 'https://api.cloudflare.com/client/v4';\nconst FETCH_TIMEOUT = 10_000;\n\n/**\n * Read API token from Cloudflare cert.pem file.\n * The cert.pem contains a PEM-wrapped base64 JSON blob with { zoneID, accountID, apiToken }.\n */\nfunction readCloudflareCredentials(certPath: string): CloudflareCredentials | null {\n try {\n const resolvedPath = certPath.replace(/^~/, homedir());\n const pem = readFileSync(resolve(resolvedPath), 'utf-8');\n // Strip PEM headers/trailers and decode\n const b64 = pem\n .split('\\n')\n .filter(line => !line.startsWith('-----'))\n .join('');\n const json = JSON.parse(Buffer.from(b64, 'base64').toString('utf-8'));\n return {\n apiToken: json.apiToken,\n accountId: json.accountID,\n zoneId: json.zoneID,\n };\n } catch (err) {\n return null;\n }\n}\n\n/**\n * Make an authenticated Cloudflare API request.\n */\nasync function cfFetch(\n path: string,\n apiToken: string,\n method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET',\n body?: unknown,\n): Promise<{ ok: boolean; data: any; errors?: any[] }> {\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), FETCH_TIMEOUT);\n\n try {\n const resp = await fetch(`${CF_API}${path}`, {\n method,\n headers: {\n Authorization: `Bearer ${apiToken}`,\n 'Content-Type': 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal: controller.signal,\n });\n const json = await resp.json();\n return { ok: json.success !== false, data: json.result, errors: json.errors };\n } catch (err: any) {\n return { ok: false, data: null, errors: [{ message: err.message }] };\n } finally {\n clearTimeout(timeout);\n }\n}\n\n/**\n * Resolve hostnames from config, replacing template placeholders.\n */\nfunction resolveHostnames(\n hostnames: TunnelHostname[],\n placeholders: TemplatePlaceholders,\n): Array<{ hostname: string; httpHostHeader?: string; noTlsVerify: boolean }> {\n return hostnames.map(h => ({\n hostname: replacePlaceholders(h.pattern, placeholders),\n httpHostHeader: h.http_host_header ? replacePlaceholders(h.http_host_header, placeholders) : undefined,\n noTlsVerify: h.no_tls_verify !== false, // default true\n }));\n}\n\n/**\n * Add tunnel ingress rules and DNS CNAME records for a workspace.\n * Called during workspace creation.\n */\nexport async function addTunnelIngress(\n config: TunnelConfig,\n placeholders: TemplatePlaceholders,\n): Promise<TunnelResult> {\n const steps: string[] = [];\n let allOk = true;\n\n // Read credentials\n const creds = readCloudflareCredentials(config.credentials_file);\n if (!creds) {\n return { success: false, steps: ['[tunnel] Failed to read Cloudflare credentials from ' + config.credentials_file] };\n }\n steps.push('[tunnel] Read Cloudflare credentials');\n\n const resolved = resolveHostnames(config.hostnames, placeholders);\n\n // Get current tunnel configuration\n const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;\n const current = await cfFetch(tunnelPath, creds.apiToken);\n if (!current.ok) {\n return { success: false, steps: [...steps, `[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`] };\n }\n\n const tunnelConfig: CloudflareTunnelConfig = current.data;\n const ingress: CloudflareIngressRule[] = tunnelConfig.config?.ingress || [];\n steps.push(`[tunnel] Current tunnel config has ${ingress.length} ingress rules`);\n\n // Add new ingress rules (before the catch-all)\n let modified = false;\n for (const h of resolved) {\n // Skip if rule already exists\n if (ingress.some(r => r.hostname === h.hostname)) {\n steps.push(`[tunnel] Ingress rule for ${h.hostname} already exists, skipping`);\n continue;\n }\n\n const originRequest: Record<string, unknown> = {};\n if (h.noTlsVerify) originRequest.noTLSVerify = true;\n if (h.httpHostHeader) originRequest.httpHostHeader = h.httpHostHeader;\n\n const newRule: CloudflareIngressRule = {\n service: config.service_target,\n hostname: h.hostname,\n originRequest: Object.keys(originRequest).length > 0 ? originRequest : undefined,\n };\n\n // Insert before the last rule (catch-all has no hostname)\n const catchAllIdx = ingress.findIndex(r => !r.hostname);\n if (catchAllIdx >= 0) {\n ingress.splice(catchAllIdx, 0, newRule);\n } else {\n ingress.push(newRule);\n }\n modified = true;\n steps.push(`[tunnel] Added ingress rule for ${h.hostname}`);\n }\n\n // Push updated tunnel config\n if (modified) {\n const putResult = await cfFetch(tunnelPath, creds.apiToken, 'PUT', {\n config: { ingress },\n });\n if (!putResult.ok) {\n steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);\n allOk = false;\n } else {\n steps.push('[tunnel] Updated tunnel ingress configuration');\n }\n }\n\n // Create DNS CNAME records\n for (const h of resolved) {\n const dnsResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records`,\n creds.apiToken,\n 'POST',\n {\n type: 'CNAME',\n name: h.hostname,\n content: `${config.tunnel_id}.cfargotunnel.com`,\n proxied: true,\n },\n );\n if (!dnsResult.ok) {\n const errMsg = dnsResult.errors?.map((e: any) => e.message).join(', ') || 'unknown error';\n // Record already exists is not a failure\n if (errMsg.includes('already exists') || errMsg.includes('already been taken')) {\n steps.push(`[tunnel] DNS CNAME for ${h.hostname} already exists`);\n } else {\n steps.push(`[tunnel] Failed to create DNS CNAME for ${h.hostname}: ${errMsg}`);\n allOk = false;\n }\n } else {\n steps.push(`[tunnel] Created DNS CNAME: ${h.hostname} → ${config.tunnel_id}.cfargotunnel.com`);\n }\n }\n\n return { success: allOk, steps };\n}\n\n/**\n * Remove tunnel ingress rules and DNS CNAME records for a workspace.\n * Called during workspace removal and deep-wipe.\n */\nexport async function removeTunnelIngress(\n config: TunnelConfig,\n placeholders: TemplatePlaceholders,\n): Promise<TunnelResult> {\n const steps: string[] = [];\n let allOk = true;\n\n // Read credentials\n const creds = readCloudflareCredentials(config.credentials_file);\n if (!creds) {\n return { success: false, steps: ['[tunnel] Failed to read Cloudflare credentials from ' + config.credentials_file] };\n }\n steps.push('[tunnel] Read Cloudflare credentials');\n\n const resolved = resolveHostnames(config.hostnames, placeholders);\n const hostnameSet = new Set(resolved.map(h => h.hostname));\n\n // Get current tunnel configuration\n const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;\n const current = await cfFetch(tunnelPath, creds.apiToken);\n if (!current.ok) {\n steps.push(`[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`);\n // Continue to attempt DNS cleanup even if tunnel config read fails\n allOk = false;\n } else {\n const tunnelConfig: CloudflareTunnelConfig = current.data;\n const ingress: CloudflareIngressRule[] = tunnelConfig.config?.ingress || [];\n const originalCount = ingress.length;\n\n // Filter out matching ingress rules\n const filtered = ingress.filter(r => !r.hostname || !hostnameSet.has(r.hostname));\n\n if (filtered.length < originalCount) {\n const putResult = await cfFetch(tunnelPath, creds.apiToken, 'PUT', {\n config: { ingress: filtered },\n });\n if (!putResult.ok) {\n steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);\n allOk = false;\n } else {\n steps.push(`[tunnel] Removed ${originalCount - filtered.length} ingress rule(s)`);\n }\n } else {\n steps.push('[tunnel] No matching ingress rules found to remove');\n }\n }\n\n // Remove DNS CNAME records\n for (const h of resolved) {\n // Find the DNS record\n const listResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records?name=${encodeURIComponent(h.hostname)}&type=CNAME`,\n creds.apiToken,\n );\n if (!listResult.ok) {\n steps.push(`[tunnel] Failed to look up DNS record for ${h.hostname}: ${JSON.stringify(listResult.errors)}`);\n allOk = false;\n continue;\n }\n\n const records = Array.isArray(listResult.data) ? listResult.data : [];\n if (records.length === 0) {\n steps.push(`[tunnel] No DNS CNAME record found for ${h.hostname}`);\n continue;\n }\n\n for (const record of records) {\n const delResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records/${record.id}`,\n creds.apiToken,\n 'DELETE',\n );\n if (!delResult.ok) {\n steps.push(`[tunnel] Failed to delete DNS record ${record.id} for ${h.hostname}: ${JSON.stringify(delResult.errors)}`);\n allOk = false;\n } else {\n steps.push(`[tunnel] Deleted DNS CNAME for ${h.hostname}`);\n }\n }\n }\n\n return { success: allOk, steps };\n}\n"],"mappings":";;;;;;;;;;AAOA,SAAS,oBAAoB;AAC7B,SAAS,eAAe;AACxB,SAAS,eAAe;AAiCxB,SAAS,0BAA0B,UAAgD;AACjF,MAAI;AACF,UAAM,eAAe,SAAS,QAAQ,MAAM,QAAQ,CAAC;AACrD,UAAM,MAAM,aAAa,QAAQ,YAAY,GAAG,OAAO;AAEvD,UAAM,MAAM,IACT,MAAM,IAAI,EACV,OAAO,UAAQ,CAAC,KAAK,WAAW,OAAO,CAAC,EACxC,KAAK,EAAE;AACV,UAAM,OAAO,KAAK,MAAM,OAAO,KAAK,KAAK,QAAQ,EAAE,SAAS,OAAO,CAAC;AACpE,WAAO;AAAA,MACL,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK;AAAA,IACf;AAAA,EACF,SAAS,KAAK;AACZ,WAAO;AAAA,EACT;AACF;AAKA,eAAe,QACb,MACA,UACA,SAA4C,OAC5C,MACqD;AACrD,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,UAAU,WAAW,MAAM,WAAW,MAAM,GAAG,aAAa;AAElE,MAAI;AACF,UAAM,OAAO,MAAM,MAAM,GAAG,MAAM,GAAG,IAAI,IAAI;AAAA,MAC3C;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,QAAQ;AAAA,QACjC,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,OAAO,KAAK,UAAU,IAAI,IAAI;AAAA,MACpC,QAAQ,WAAW;AAAA,IACrB,CAAC;AACD,UAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,WAAO,EAAE,IAAI,KAAK,YAAY,OAAO,MAAM,KAAK,QAAQ,QAAQ,KAAK,OAAO;AAAA,EAC9E,SAAS,KAAU;AACjB,WAAO,EAAE,IAAI,OAAO,MAAM,MAAM,QAAQ,CAAC,EAAE,SAAS,IAAI,QAAQ,CAAC,EAAE;AAAA,EACrE,UAAE;AACA,iBAAa,OAAO;AAAA,EACtB;AACF;AAKA,SAAS,iBACP,WACA,cAC4E;AAC5E,SAAO,UAAU,IAAI,QAAM;AAAA,IACzB,UAAU,oBAAoB,EAAE,SAAS,YAAY;AAAA,IACrD,gBAAgB,EAAE,mBAAmB,oBAAoB,EAAE,kBAAkB,YAAY,IAAI;AAAA,IAC7F,aAAa,EAAE,kBAAkB;AAAA;AAAA,EACnC,EAAE;AACJ;AAMA,eAAsB,iBACpB,QACA,cACuB;AACvB,QAAM,QAAkB,CAAC;AACzB,MAAI,QAAQ;AAGZ,QAAM,QAAQ,0BAA0B,OAAO,gBAAgB;AAC/D,MAAI,CAAC,OAAO;AACV,WAAO,EAAE,SAAS,OAAO,OAAO,CAAC,yDAAyD,OAAO,gBAAgB,EAAE;AAAA,EACrH;AACA,QAAM,KAAK,sCAAsC;AAEjD,QAAM,WAAW,iBAAiB,OAAO,WAAW,YAAY;AAGhE,QAAM,aAAa,aAAa,MAAM,SAAS,eAAe,OAAO,SAAS;AAC9E,QAAM,UAAU,MAAM,QAAQ,YAAY,MAAM,QAAQ;AACxD,MAAI,CAAC,QAAQ,IAAI;AACf,WAAO,EAAE,SAAS,OAAO,OAAO,CAAC,GAAG,OAAO,yCAAyC,KAAK,UAAU,QAAQ,MAAM,CAAC,EAAE,EAAE;AAAA,EACxH;AAEA,QAAM,eAAuC,QAAQ;AACrD,QAAM,UAAmC,aAAa,QAAQ,WAAW,CAAC;AAC1E,QAAM,KAAK,sCAAsC,QAAQ,MAAM,gBAAgB;AAG/E,MAAI,WAAW;AACf,aAAW,KAAK,UAAU;AAExB,QAAI,QAAQ,KAAK,OAAK,EAAE,aAAa,EAAE,QAAQ,GAAG;AAChD,YAAM,KAAK,6BAA6B,EAAE,QAAQ,2BAA2B;AAC7E;AAAA,IACF;AAEA,UAAM,gBAAyC,CAAC;AAChD,QAAI,EAAE,YAAa,eAAc,cAAc;AAC/C,QAAI,EAAE,eAAgB,eAAc,iBAAiB,EAAE;AAEvD,UAAM,UAAiC;AAAA,MACrC,SAAS,OAAO;AAAA,MAChB,UAAU,EAAE;AAAA,MACZ,eAAe,OAAO,KAAK,aAAa,EAAE,SAAS,IAAI,gBAAgB;AAAA,IACzE;AAGA,UAAM,cAAc,QAAQ,UAAU,OAAK,CAAC,EAAE,QAAQ;AACtD,QAAI,eAAe,GAAG;AACpB,cAAQ,OAAO,aAAa,GAAG,OAAO;AAAA,IACxC,OAAO;AACL,cAAQ,KAAK,OAAO;AAAA,IACtB;AACA,eAAW;AACX,UAAM,KAAK,mCAAmC,EAAE,QAAQ,EAAE;AAAA,EAC5D;AAGA,MAAI,UAAU;AACZ,UAAM,YAAY,MAAM,QAAQ,YAAY,MAAM,UAAU,OAAO;AAAA,MACjE,QAAQ,EAAE,QAAQ;AAAA,IACpB,CAAC;AACD,QAAI,CAAC,UAAU,IAAI;AACjB,YAAM,KAAK,4CAA4C,KAAK,UAAU,UAAU,MAAM,CAAC,EAAE;AACzF,cAAQ;AAAA,IACV,OAAO;AACL,YAAM,KAAK,+CAA+C;AAAA,IAC5D;AAAA,EACF;AAGA,aAAW,KAAK,UAAU;AACxB,UAAM,YAAY,MAAM;AAAA,MACtB,UAAU,MAAM,MAAM;AAAA,MACtB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,MAAM,EAAE;AAAA,QACR,SAAS,GAAG,OAAO,SAAS;AAAA,QAC5B,SAAS;AAAA,MACX;AAAA,IACF;AACA,QAAI,CAAC,UAAU,IAAI;AACjB,YAAM,SAAS,UAAU,QAAQ,IAAI,CAAC,MAAW,EAAE,OAAO,EAAE,KAAK,IAAI,KAAK;AAE1E,UAAI,OAAO,SAAS,gBAAgB,KAAK,OAAO,SAAS,oBAAoB,GAAG;AAC9E,cAAM,KAAK,0BAA0B,EAAE,QAAQ,iBAAiB;AAAA,MAClE,OAAO;AACL,cAAM,KAAK,2CAA2C,EAAE,QAAQ,KAAK,MAAM,EAAE;AAC7E,gBAAQ;AAAA,MACV;AAAA,IACF,OAAO;AACL,YAAM,KAAK,+BAA+B,EAAE,QAAQ,WAAM,OAAO,SAAS,mBAAmB;AAAA,IAC/F;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,OAAO,MAAM;AACjC;AAMA,eAAsB,oBACpB,QACA,cACuB;AACvB,QAAM,QAAkB,CAAC;AACzB,MAAI,QAAQ;AAGZ,QAAM,QAAQ,0BAA0B,OAAO,gBAAgB;AAC/D,MAAI,CAAC,OAAO;AACV,WAAO,EAAE,SAAS,OAAO,OAAO,CAAC,yDAAyD,OAAO,gBAAgB,EAAE;AAAA,EACrH;AACA,QAAM,KAAK,sCAAsC;AAEjD,QAAM,WAAW,iBAAiB,OAAO,WAAW,YAAY;AAChE,QAAM,cAAc,IAAI,IAAI,SAAS,IAAI,OAAK,EAAE,QAAQ,CAAC;AAGzD,QAAM,aAAa,aAAa,MAAM,SAAS,eAAe,OAAO,SAAS;AAC9E,QAAM,UAAU,MAAM,QAAQ,YAAY,MAAM,QAAQ;AACxD,MAAI,CAAC,QAAQ,IAAI;AACf,UAAM,KAAK,yCAAyC,KAAK,UAAU,QAAQ,MAAM,CAAC,EAAE;AAEpF,YAAQ;AAAA,EACV,OAAO;AACL,UAAM,eAAuC,QAAQ;AACrD,UAAM,UAAmC,aAAa,QAAQ,WAAW,CAAC;AAC1E,UAAM,gBAAgB,QAAQ;AAG9B,UAAM,WAAW,QAAQ,OAAO,OAAK,CAAC,EAAE,YAAY,CAAC,YAAY,IAAI,EAAE,QAAQ,CAAC;AAEhF,QAAI,SAAS,SAAS,eAAe;AACnC,YAAM,YAAY,MAAM,QAAQ,YAAY,MAAM,UAAU,OAAO;AAAA,QACjE,QAAQ,EAAE,SAAS,SAAS;AAAA,MAC9B,CAAC;AACD,UAAI,CAAC,UAAU,IAAI;AACjB,cAAM,KAAK,4CAA4C,KAAK,UAAU,UAAU,MAAM,CAAC,EAAE;AACzF,gBAAQ;AAAA,MACV,OAAO;AACL,cAAM,KAAK,oBAAoB,gBAAgB,SAAS,MAAM,kBAAkB;AAAA,MAClF;AAAA,IACF,OAAO;AACL,YAAM,KAAK,oDAAoD;AAAA,IACjE;AAAA,EACF;AAGA,aAAW,KAAK,UAAU;AAExB,UAAM,aAAa,MAAM;AAAA,MACvB,UAAU,MAAM,MAAM,qBAAqB,mBAAmB,EAAE,QAAQ,CAAC;AAAA,MACzE,MAAM;AAAA,IACR;AACA,QAAI,CAAC,WAAW,IAAI;AAClB,YAAM,KAAK,6CAA6C,EAAE,QAAQ,KAAK,KAAK,UAAU,WAAW,MAAM,CAAC,EAAE;AAC1G,cAAQ;AACR;AAAA,IACF;AAEA,UAAM,UAAU,MAAM,QAAQ,WAAW,IAAI,IAAI,WAAW,OAAO,CAAC;AACpE,QAAI,QAAQ,WAAW,GAAG;AACxB,YAAM,KAAK,0CAA0C,EAAE,QAAQ,EAAE;AACjE;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,YAAM,YAAY,MAAM;AAAA,QACtB,UAAU,MAAM,MAAM,gBAAgB,OAAO,EAAE;AAAA,QAC/C,MAAM;AAAA,QACN;AAAA,MACF;AACA,UAAI,CAAC,UAAU,IAAI;AACjB,cAAM,KAAK,wCAAwC,OAAO,EAAE,QAAQ,EAAE,QAAQ,KAAK,KAAK,UAAU,UAAU,MAAM,CAAC,EAAE;AACrH,gBAAQ;AAAA,MACV,OAAO;AACL,cAAM,KAAK,kCAAkC,EAAE,QAAQ,EAAE;AAAA,MAC3D;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,OAAO,MAAM;AACjC;AAzSA,IAmCM,QACA;AApCN;AAAA;AAAA;AAUA;AAyBA,IAAM,SAAS;AACf,IAAM,gBAAgB;AAAA;AAAA;","names":[]}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import {
|
|
2
|
+
init_config,
|
|
3
|
+
loadConfig
|
|
4
|
+
} from "./chunk-FQ66DECN.js";
|
|
5
|
+
import {
|
|
6
|
+
init_projects,
|
|
7
|
+
loadProjectsConfig
|
|
8
|
+
} from "./chunk-OMNXYPXC.js";
|
|
9
|
+
import {
|
|
10
|
+
SOURCE_TRAEFIK_TEMPLATES,
|
|
11
|
+
TRAEFIK_CERTS_DIR,
|
|
12
|
+
TRAEFIK_DIR,
|
|
13
|
+
TRAEFIK_DYNAMIC_DIR,
|
|
14
|
+
init_paths
|
|
15
|
+
} from "./chunk-ZTFNYOC7.js";
|
|
16
|
+
import {
|
|
17
|
+
init_esm_shims
|
|
18
|
+
} from "./chunk-ZHC57RCV.js";
|
|
19
|
+
|
|
20
|
+
// src/lib/traefik.ts
|
|
21
|
+
init_esm_shims();
|
|
22
|
+
init_paths();
|
|
23
|
+
init_config();
|
|
24
|
+
init_projects();
|
|
25
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync, unlinkSync, readdirSync } from "fs";
|
|
26
|
+
import { join } from "path";
|
|
27
|
+
import { execSync } from "child_process";
|
|
28
|
+
function generatePanopticonTraefikConfig() {
|
|
29
|
+
const templatePath = join(SOURCE_TRAEFIK_TEMPLATES, "dynamic", "panopticon.yml.template");
|
|
30
|
+
if (!existsSync(templatePath)) {
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
const { config } = loadConfig();
|
|
34
|
+
const placeholders = {
|
|
35
|
+
TRAEFIK_DOMAIN: config.traefik?.domain || "pan.localhost",
|
|
36
|
+
DASHBOARD_PORT: String(config.dashboard.port),
|
|
37
|
+
DASHBOARD_API_PORT: String(config.dashboard.api_port)
|
|
38
|
+
};
|
|
39
|
+
let content = readFileSync(templatePath, "utf-8");
|
|
40
|
+
for (const [key, value] of Object.entries(placeholders)) {
|
|
41
|
+
content = content.replace(new RegExp(`\\{\\{${key}\\}\\}`, "g"), value);
|
|
42
|
+
}
|
|
43
|
+
mkdirSync(TRAEFIK_DYNAMIC_DIR, { recursive: true });
|
|
44
|
+
const outputPath = join(TRAEFIK_DYNAMIC_DIR, "panopticon.yml");
|
|
45
|
+
writeFileSync(outputPath, content, "utf-8");
|
|
46
|
+
return true;
|
|
47
|
+
}
|
|
48
|
+
function cleanupTemplateFiles() {
|
|
49
|
+
const copiedTemplate = join(TRAEFIK_DYNAMIC_DIR, "panopticon.yml.template");
|
|
50
|
+
if (existsSync(copiedTemplate)) {
|
|
51
|
+
unlinkSync(copiedTemplate);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
function generateTlsConfig() {
|
|
55
|
+
if (!existsSync(TRAEFIK_CERTS_DIR)) {
|
|
56
|
+
return false;
|
|
57
|
+
}
|
|
58
|
+
const files = readdirSync(TRAEFIK_CERTS_DIR);
|
|
59
|
+
const certFiles = files.filter((f) => f.endsWith(".pem") && !f.endsWith("-key.pem"));
|
|
60
|
+
if (certFiles.length === 0) {
|
|
61
|
+
return false;
|
|
62
|
+
}
|
|
63
|
+
const certPairs = [];
|
|
64
|
+
for (const certFile of certFiles) {
|
|
65
|
+
const keyFile = certFile.replace(".pem", "-key.pem");
|
|
66
|
+
if (files.includes(keyFile)) {
|
|
67
|
+
certPairs.push({
|
|
68
|
+
certFile: `/etc/traefik/certs/${certFile}`,
|
|
69
|
+
keyFile: `/etc/traefik/certs/${keyFile}`
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
if (certPairs.length === 0) {
|
|
74
|
+
return false;
|
|
75
|
+
}
|
|
76
|
+
const defaultCert = certPairs.find((p) => p.certFile.includes("pan.localhost")) || certPairs[0];
|
|
77
|
+
let yaml = "# Auto-generated TLS configuration \u2014 do not edit manually\n";
|
|
78
|
+
yaml += "# Generated by: pan up / pan install\n";
|
|
79
|
+
yaml += "# Traefik v3 requires TLS config in a separate dynamic config file\n\n";
|
|
80
|
+
yaml += "tls:\n";
|
|
81
|
+
yaml += " stores:\n";
|
|
82
|
+
yaml += " default:\n";
|
|
83
|
+
yaml += " defaultCertificate:\n";
|
|
84
|
+
yaml += ` certFile: ${defaultCert.certFile}
|
|
85
|
+
`;
|
|
86
|
+
yaml += ` keyFile: ${defaultCert.keyFile}
|
|
87
|
+
`;
|
|
88
|
+
yaml += " certificates:\n";
|
|
89
|
+
for (const pair of certPairs) {
|
|
90
|
+
yaml += ` - certFile: ${pair.certFile}
|
|
91
|
+
`;
|
|
92
|
+
yaml += ` keyFile: ${pair.keyFile}
|
|
93
|
+
`;
|
|
94
|
+
}
|
|
95
|
+
mkdirSync(TRAEFIK_DYNAMIC_DIR, { recursive: true });
|
|
96
|
+
const outputPath = join(TRAEFIK_DYNAMIC_DIR, "tls.yml");
|
|
97
|
+
writeFileSync(outputPath, yaml, "utf-8");
|
|
98
|
+
return true;
|
|
99
|
+
}
|
|
100
|
+
function ensureProjectCerts() {
|
|
101
|
+
try {
|
|
102
|
+
execSync("which mkcert", { stdio: "pipe" });
|
|
103
|
+
} catch {
|
|
104
|
+
return [];
|
|
105
|
+
}
|
|
106
|
+
const projectsConfig = loadProjectsConfig();
|
|
107
|
+
const generated = [];
|
|
108
|
+
for (const [, project] of Object.entries(projectsConfig.projects)) {
|
|
109
|
+
const domain = project.workspace?.dns?.domain;
|
|
110
|
+
if (!domain) continue;
|
|
111
|
+
const certFile = join(TRAEFIK_CERTS_DIR, `_wildcard.${domain}.pem`);
|
|
112
|
+
const keyFile = join(TRAEFIK_CERTS_DIR, `_wildcard.${domain}-key.pem`);
|
|
113
|
+
if (existsSync(certFile) && existsSync(keyFile)) {
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
mkdirSync(TRAEFIK_CERTS_DIR, { recursive: true });
|
|
117
|
+
try {
|
|
118
|
+
execSync(
|
|
119
|
+
`mkcert -cert-file "${certFile}" -key-file "${keyFile}" "${domain}" "*.${domain}" 2>/dev/null`,
|
|
120
|
+
{ stdio: "pipe" }
|
|
121
|
+
);
|
|
122
|
+
generated.push(domain);
|
|
123
|
+
} catch {
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return generated;
|
|
127
|
+
}
|
|
128
|
+
function cleanupStaleTlsSections() {
|
|
129
|
+
const staticConfig = join(TRAEFIK_DIR, "traefik.yml");
|
|
130
|
+
if (existsSync(staticConfig)) {
|
|
131
|
+
const content = readFileSync(staticConfig, "utf-8");
|
|
132
|
+
const cleaned = content.replace(/\n# TLS Configuration\ntls:\n(?: .*\n)*/g, "\n");
|
|
133
|
+
if (cleaned !== content) {
|
|
134
|
+
writeFileSync(staticConfig, cleaned, "utf-8");
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
const dynamicConfig = join(TRAEFIK_DYNAMIC_DIR, "panopticon.yml");
|
|
138
|
+
if (existsSync(dynamicConfig)) {
|
|
139
|
+
const content = readFileSync(dynamicConfig, "utf-8");
|
|
140
|
+
const cleaned = content.replace(/\ntls:\n (?:stores|certificates):\n(?: .*\n)*/g, "\n");
|
|
141
|
+
if (cleaned !== content) {
|
|
142
|
+
writeFileSync(dynamicConfig, cleaned, "utf-8");
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
export {
|
|
148
|
+
generatePanopticonTraefikConfig,
|
|
149
|
+
cleanupTemplateFiles,
|
|
150
|
+
generateTlsConfig,
|
|
151
|
+
ensureProjectCerts,
|
|
152
|
+
cleanupStaleTlsSections
|
|
153
|
+
};
|
|
154
|
+
//# sourceMappingURL=chunk-PPRFKTVC.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/traefik.ts"],"sourcesContent":["/**\n * Traefik Configuration Generator\n *\n * Generates the Panopticon dashboard Traefik routing config\n * from a template, substituting values from config.toml.\n * Also generates TLS certificate configuration from discovered certs.\n */\n\nimport { existsSync, readFileSync, writeFileSync, mkdirSync, unlinkSync, readdirSync } from 'fs';\nimport { join, basename } from 'path';\nimport { execSync } from 'child_process';\nimport { TRAEFIK_DYNAMIC_DIR, TRAEFIK_CERTS_DIR, TRAEFIK_DIR, SOURCE_TRAEFIK_TEMPLATES } from './paths.js';\nimport { loadConfig } from './config.js';\nimport { loadProjectsConfig } from './projects.js';\n\n/**\n * Generate panopticon.yml from template using current config values.\n * Safe to call multiple times (idempotent).\n * Returns true if file was written, false if template not found.\n */\nexport function generatePanopticonTraefikConfig(): boolean {\n const templatePath = join(SOURCE_TRAEFIK_TEMPLATES, 'dynamic', 'panopticon.yml.template');\n if (!existsSync(templatePath)) {\n return false;\n }\n\n const { config } = loadConfig();\n const placeholders: Record<string, string> = {\n TRAEFIK_DOMAIN: config.traefik?.domain || 'pan.localhost',\n DASHBOARD_PORT: String(config.dashboard.port),\n DASHBOARD_API_PORT: String(config.dashboard.api_port),\n };\n\n let content = readFileSync(templatePath, 'utf-8');\n for (const [key, value] of Object.entries(placeholders)) {\n content = content.replace(new RegExp(`\\\\{\\\\{${key}\\\\}\\\\}`, 'g'), value);\n }\n\n mkdirSync(TRAEFIK_DYNAMIC_DIR, { recursive: true });\n const outputPath = join(TRAEFIK_DYNAMIC_DIR, 'panopticon.yml');\n writeFileSync(outputPath, content, 'utf-8');\n return true;\n}\n\n/**\n * Remove any accidentally-copied .template files from the runtime Traefik dir.\n * Called after copyDirectoryRecursive in pan install.\n */\nexport function cleanupTemplateFiles(): void {\n const copiedTemplate = join(TRAEFIK_DYNAMIC_DIR, 'panopticon.yml.template');\n if (existsSync(copiedTemplate)) {\n unlinkSync(copiedTemplate);\n }\n}\n\n/**\n * Generate tls.yml from all discovered certificate files in the certs directory.\n *\n * Traefik v3 ignores `tls:` sections when they appear in the same dynamic config\n * file as `http:` routers/services. This function creates a dedicated tls.yml file\n * that Traefik's file provider will pick up separately.\n *\n * The first cert found (pan.localhost) is used as the default certificate.\n * All certs are listed in the certificates array for SNI matching.\n *\n * Safe to call multiple times (idempotent).\n * Returns true if file was written, false if no certs found.\n */\nexport function generateTlsConfig(): boolean {\n if (!existsSync(TRAEFIK_CERTS_DIR)) {\n return false;\n }\n\n // Scan for cert files (exclude -key.pem files)\n const files = readdirSync(TRAEFIK_CERTS_DIR);\n const certFiles = files.filter(f => f.endsWith('.pem') && !f.endsWith('-key.pem'));\n\n if (certFiles.length === 0) {\n return false;\n }\n\n // Pair each cert with its key file\n const certPairs: Array<{ certFile: string; keyFile: string }> = [];\n for (const certFile of certFiles) {\n const keyFile = certFile.replace('.pem', '-key.pem');\n if (files.includes(keyFile)) {\n certPairs.push({\n certFile: `/etc/traefik/certs/${certFile}`,\n keyFile: `/etc/traefik/certs/${keyFile}`,\n });\n }\n }\n\n if (certPairs.length === 0) {\n return false;\n }\n\n // Use the pan.localhost cert as default, fall back to first cert\n const defaultCert = certPairs.find(p => p.certFile.includes('pan.localhost')) || certPairs[0];\n\n // Build YAML content\n let yaml = '# Auto-generated TLS configuration — do not edit manually\\n';\n yaml += '# Generated by: pan up / pan install\\n';\n yaml += '# Traefik v3 requires TLS config in a separate dynamic config file\\n\\n';\n yaml += 'tls:\\n';\n yaml += ' stores:\\n';\n yaml += ' default:\\n';\n yaml += ' defaultCertificate:\\n';\n yaml += ` certFile: ${defaultCert.certFile}\\n`;\n yaml += ` keyFile: ${defaultCert.keyFile}\\n`;\n yaml += ' certificates:\\n';\n for (const pair of certPairs) {\n yaml += ` - certFile: ${pair.certFile}\\n`;\n yaml += ` keyFile: ${pair.keyFile}\\n`;\n }\n\n mkdirSync(TRAEFIK_DYNAMIC_DIR, { recursive: true });\n const outputPath = join(TRAEFIK_DYNAMIC_DIR, 'tls.yml');\n writeFileSync(outputPath, yaml, 'utf-8');\n return true;\n}\n\n/**\n * Ensure wildcard certificates exist for all registered projects that have DNS domains.\n *\n * Scans projects.yaml for projects with workspace.dns.domain, and generates\n * mkcert wildcard certs for any that don't already have certs in the Traefik\n * certs directory.\n *\n * Returns array of domains that had certs generated.\n */\nexport function ensureProjectCerts(): string[] {\n // Check mkcert is available\n try {\n execSync('which mkcert', { stdio: 'pipe' });\n } catch {\n return [];\n }\n\n const projectsConfig = loadProjectsConfig();\n const generated: string[] = [];\n\n for (const [, project] of Object.entries(projectsConfig.projects)) {\n const domain = project.workspace?.dns?.domain;\n if (!domain) continue;\n\n const certFile = join(TRAEFIK_CERTS_DIR, `_wildcard.${domain}.pem`);\n const keyFile = join(TRAEFIK_CERTS_DIR, `_wildcard.${domain}-key.pem`);\n\n if (existsSync(certFile) && existsSync(keyFile)) {\n continue;\n }\n\n // Generate cert for this project's domain\n mkdirSync(TRAEFIK_CERTS_DIR, { recursive: true });\n try {\n execSync(\n `mkcert -cert-file \"${certFile}\" -key-file \"${keyFile}\" \"${domain}\" \"*.${domain}\" 2>/dev/null`,\n { stdio: 'pipe' }\n );\n generated.push(domain);\n } catch {\n // mkcert failed — skip this domain\n }\n }\n\n return generated;\n}\n\n/**\n * Remove stale `tls:` sections from runtime config files.\n *\n * Traefik v3 ignores tls: in static config (traefik.yml) and in dynamic\n * config files that also contain http: routers. This function strips those\n * dead sections to avoid confusion.\n *\n * Called during `pan up` to clean up configs from older Panopticon versions.\n */\nexport function cleanupStaleTlsSections(): void {\n // Clean static config (traefik.yml)\n const staticConfig = join(TRAEFIK_DIR, 'traefik.yml');\n if (existsSync(staticConfig)) {\n const content = readFileSync(staticConfig, 'utf-8');\n // Remove tls: section at the end of the file\n const cleaned = content.replace(/\\n# TLS Configuration\\ntls:\\n(?: .*\\n)*/g, '\\n');\n if (cleaned !== content) {\n writeFileSync(staticConfig, cleaned, 'utf-8');\n }\n }\n\n // Clean dynamic panopticon.yml (regenerated from template, but also clean runtime copy)\n const dynamicConfig = join(TRAEFIK_DYNAMIC_DIR, 'panopticon.yml');\n if (existsSync(dynamicConfig)) {\n const content = readFileSync(dynamicConfig, 'utf-8');\n // Remove standalone tls: section (not nested under http: routers)\n const cleaned = content.replace(/\\ntls:\\n (?:stores|certificates):\\n(?: .*\\n)*/g, '\\n');\n if (cleaned !== content) {\n writeFileSync(dynamicConfig, cleaned, 'utf-8');\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAWA;AACA;AACA;AALA,SAAS,YAAY,cAAc,eAAe,WAAW,YAAY,mBAAmB;AAC5F,SAAS,YAAsB;AAC/B,SAAS,gBAAgB;AAUlB,SAAS,kCAA2C;AACzD,QAAM,eAAe,KAAK,0BAA0B,WAAW,yBAAyB;AACxF,MAAI,CAAC,WAAW,YAAY,GAAG;AAC7B,WAAO;AAAA,EACT;AAEA,QAAM,EAAE,OAAO,IAAI,WAAW;AAC9B,QAAM,eAAuC;AAAA,IAC3C,gBAAgB,OAAO,SAAS,UAAU;AAAA,IAC1C,gBAAgB,OAAO,OAAO,UAAU,IAAI;AAAA,IAC5C,oBAAoB,OAAO,OAAO,UAAU,QAAQ;AAAA,EACtD;AAEA,MAAI,UAAU,aAAa,cAAc,OAAO;AAChD,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,YAAY,GAAG;AACvD,cAAU,QAAQ,QAAQ,IAAI,OAAO,SAAS,GAAG,UAAU,GAAG,GAAG,KAAK;AAAA,EACxE;AAEA,YAAU,qBAAqB,EAAE,WAAW,KAAK,CAAC;AAClD,QAAM,aAAa,KAAK,qBAAqB,gBAAgB;AAC7D,gBAAc,YAAY,SAAS,OAAO;AAC1C,SAAO;AACT;AAMO,SAAS,uBAA6B;AAC3C,QAAM,iBAAiB,KAAK,qBAAqB,yBAAyB;AAC1E,MAAI,WAAW,cAAc,GAAG;AAC9B,eAAW,cAAc;AAAA,EAC3B;AACF;AAeO,SAAS,oBAA6B;AAC3C,MAAI,CAAC,WAAW,iBAAiB,GAAG;AAClC,WAAO;AAAA,EACT;AAGA,QAAM,QAAQ,YAAY,iBAAiB;AAC3C,QAAM,YAAY,MAAM,OAAO,OAAK,EAAE,SAAS,MAAM,KAAK,CAAC,EAAE,SAAS,UAAU,CAAC;AAEjF,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,EACT;AAGA,QAAM,YAA0D,CAAC;AACjE,aAAW,YAAY,WAAW;AAChC,UAAM,UAAU,SAAS,QAAQ,QAAQ,UAAU;AACnD,QAAI,MAAM,SAAS,OAAO,GAAG;AAC3B,gBAAU,KAAK;AAAA,QACb,UAAU,sBAAsB,QAAQ;AAAA,QACxC,SAAS,sBAAsB,OAAO;AAAA,MACxC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,UAAU,KAAK,OAAK,EAAE,SAAS,SAAS,eAAe,CAAC,KAAK,UAAU,CAAC;AAG5F,MAAI,OAAO;AACX,UAAQ;AACR,UAAQ;AACR,UAAQ;AACR,UAAQ;AACR,UAAQ;AACR,UAAQ;AACR,UAAQ,qBAAqB,YAAY,QAAQ;AAAA;AACjD,UAAQ,oBAAoB,YAAY,OAAO;AAAA;AAC/C,UAAQ;AACR,aAAW,QAAQ,WAAW;AAC5B,YAAQ,mBAAmB,KAAK,QAAQ;AAAA;AACxC,YAAQ,kBAAkB,KAAK,OAAO;AAAA;AAAA,EACxC;AAEA,YAAU,qBAAqB,EAAE,WAAW,KAAK,CAAC;AAClD,QAAM,aAAa,KAAK,qBAAqB,SAAS;AACtD,gBAAc,YAAY,MAAM,OAAO;AACvC,SAAO;AACT;AAWO,SAAS,qBAA+B;AAE7C,MAAI;AACF,aAAS,gBAAgB,EAAE,OAAO,OAAO,CAAC;AAAA,EAC5C,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,iBAAiB,mBAAmB;AAC1C,QAAM,YAAsB,CAAC;AAE7B,aAAW,CAAC,EAAE,OAAO,KAAK,OAAO,QAAQ,eAAe,QAAQ,GAAG;AACjE,UAAM,SAAS,QAAQ,WAAW,KAAK;AACvC,QAAI,CAAC,OAAQ;AAEb,UAAM,WAAW,KAAK,mBAAmB,aAAa,MAAM,MAAM;AAClE,UAAM,UAAU,KAAK,mBAAmB,aAAa,MAAM,UAAU;AAErE,QAAI,WAAW,QAAQ,KAAK,WAAW,OAAO,GAAG;AAC/C;AAAA,IACF;AAGA,cAAU,mBAAmB,EAAE,WAAW,KAAK,CAAC;AAChD,QAAI;AACF;AAAA,QACE,sBAAsB,QAAQ,gBAAgB,OAAO,MAAM,MAAM,QAAQ,MAAM;AAAA,QAC/E,EAAE,OAAO,OAAO;AAAA,MAClB;AACA,gBAAU,KAAK,MAAM;AAAA,IACvB,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,0BAAgC;AAE9C,QAAM,eAAe,KAAK,aAAa,aAAa;AACpD,MAAI,WAAW,YAAY,GAAG;AAC5B,UAAM,UAAU,aAAa,cAAc,OAAO;AAElD,UAAM,UAAU,QAAQ,QAAQ,6CAA6C,IAAI;AACjF,QAAI,YAAY,SAAS;AACvB,oBAAc,cAAc,SAAS,OAAO;AAAA,IAC9C;AAAA,EACF;AAGA,QAAM,gBAAgB,KAAK,qBAAqB,gBAAgB;AAChE,MAAI,WAAW,aAAa,GAAG;AAC7B,UAAM,UAAU,aAAa,eAAe,OAAO;AAEnD,UAAM,UAAU,QAAQ,QAAQ,sDAAsD,IAAI;AAC1F,QAAI,YAAY,SAAS;AACvB,oBAAc,eAAe,SAAS,OAAO;AAAA,IAC/C;AAAA,EACF;AACF;","names":[]}
|