@valkyrianlabs/payload-markdown-docs 0.1.0-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (204) hide show
  1. package/README.md +195 -0
  2. package/dist/admin/DocsSetManager.d.ts +2 -0
  3. package/dist/admin/DocsSetManager.js +298 -0
  4. package/dist/admin/DocsSetManager.js.map +1 -0
  5. package/dist/admin/docsSetManagerData.d.ts +25 -0
  6. package/dist/admin/docsSetManagerData.js +266 -0
  7. package/dist/admin/docsSetManagerData.js.map +1 -0
  8. package/dist/admin/docsSetManagerTypes.d.ts +103 -0
  9. package/dist/admin/docsSetManagerTypes.js +3 -0
  10. package/dist/admin/docsSetManagerTypes.js.map +1 -0
  11. package/dist/admin/index.d.ts +3 -0
  12. package/dist/admin/index.js +4 -0
  13. package/dist/admin/index.js.map +1 -0
  14. package/dist/cli/commands/install.d.ts +2 -0
  15. package/dist/cli/commands/install.js +211 -0
  16. package/dist/cli/commands/install.js.map +1 -0
  17. package/dist/cli/commands/keygen.d.ts +2 -0
  18. package/dist/cli/commands/keygen.js +89 -0
  19. package/dist/cli/commands/keygen.js.map +1 -0
  20. package/dist/cli/commands/manifest.d.ts +2 -0
  21. package/dist/cli/commands/manifest.js +50 -0
  22. package/dist/cli/commands/manifest.js.map +1 -0
  23. package/dist/cli/commands/plan.d.ts +2 -0
  24. package/dist/cli/commands/plan.js +110 -0
  25. package/dist/cli/commands/plan.js.map +1 -0
  26. package/dist/cli/commands/push.d.ts +3 -0
  27. package/dist/cli/commands/push.js +308 -0
  28. package/dist/cli/commands/push.js.map +1 -0
  29. package/dist/cli/commands/validate.d.ts +3 -0
  30. package/dist/cli/commands/validate.js +109 -0
  31. package/dist/cli/commands/validate.js.map +1 -0
  32. package/dist/cli/filesystem.d.ts +20 -0
  33. package/dist/cli/filesystem.js +96 -0
  34. package/dist/cli/filesystem.js.map +1 -0
  35. package/dist/cli/format.d.ts +35 -0
  36. package/dist/cli/format.js +76 -0
  37. package/dist/cli/format.js.map +1 -0
  38. package/dist/cli/http.d.ts +19 -0
  39. package/dist/cli/http.js +39 -0
  40. package/dist/cli/http.js.map +1 -0
  41. package/dist/cli/index.d.ts +3 -0
  42. package/dist/cli/index.js +214 -0
  43. package/dist/cli/index.js.map +1 -0
  44. package/dist/cli/parseArgs.d.ts +5 -0
  45. package/dist/cli/parseArgs.js +219 -0
  46. package/dist/cli/parseArgs.js.map +1 -0
  47. package/dist/cli/types.d.ts +51 -0
  48. package/dist/cli/types.js +3 -0
  49. package/dist/cli/types.js.map +1 -0
  50. package/dist/collections/docs.d.ts +9 -0
  51. package/dist/collections/docs.js +168 -0
  52. package/dist/collections/docs.js.map +1 -0
  53. package/dist/collections/docsGroups.d.ts +5 -0
  54. package/dist/collections/docsGroups.js +57 -0
  55. package/dist/collections/docsGroups.js.map +1 -0
  56. package/dist/collections/docsSets.d.ts +8 -0
  57. package/dist/collections/docsSets.js +158 -0
  58. package/dist/collections/docsSets.js.map +1 -0
  59. package/dist/collections/index.d.ts +10 -0
  60. package/dist/collections/index.js +7 -0
  61. package/dist/collections/index.js.map +1 -0
  62. package/dist/collections/nonces.d.ts +6 -0
  63. package/dist/collections/nonces.js +57 -0
  64. package/dist/collections/nonces.js.map +1 -0
  65. package/dist/collections/syncRuns.d.ts +5 -0
  66. package/dist/collections/syncRuns.js +139 -0
  67. package/dist/collections/syncRuns.js.map +1 -0
  68. package/dist/constants.d.ts +21 -0
  69. package/dist/constants.js +23 -0
  70. package/dist/constants.js.map +1 -0
  71. package/dist/endpoints/index.d.ts +2 -0
  72. package/dist/endpoints/index.js +3 -0
  73. package/dist/endpoints/index.js.map +1 -0
  74. package/dist/endpoints/sync.d.ts +47 -0
  75. package/dist/endpoints/sync.js +616 -0
  76. package/dist/endpoints/sync.js.map +1 -0
  77. package/dist/index.d.ts +9 -0
  78. package/dist/index.js +7 -0
  79. package/dist/index.js.map +1 -0
  80. package/dist/next/PayloadMarkdownDocsPage.d.ts +7 -0
  81. package/dist/next/PayloadMarkdownDocsPage.js +142 -0
  82. package/dist/next/PayloadMarkdownDocsPage.js.map +1 -0
  83. package/dist/next/index.d.ts +9 -0
  84. package/dist/next/index.js +7 -0
  85. package/dist/next/index.js.map +1 -0
  86. package/dist/next/markdown.d.ts +14 -0
  87. package/dist/next/markdown.js +232 -0
  88. package/dist/next/markdown.js.map +1 -0
  89. package/dist/next/metadata.d.ts +3 -0
  90. package/dist/next/metadata.js +33 -0
  91. package/dist/next/metadata.js.map +1 -0
  92. package/dist/next/records.d.ts +14 -0
  93. package/dist/next/records.js +146 -0
  94. package/dist/next/records.js.map +1 -0
  95. package/dist/next/route.d.ts +6 -0
  96. package/dist/next/route.js +271 -0
  97. package/dist/next/route.js.map +1 -0
  98. package/dist/next/sidebar.d.ts +15 -0
  99. package/dist/next/sidebar.js +137 -0
  100. package/dist/next/sidebar.js.map +1 -0
  101. package/dist/next/types.d.ts +117 -0
  102. package/dist/next/types.js +3 -0
  103. package/dist/next/types.js.map +1 -0
  104. package/dist/payload/applyDocsSync.d.ts +54 -0
  105. package/dist/payload/applyDocsSync.js +176 -0
  106. package/dist/payload/applyDocsSync.js.map +1 -0
  107. package/dist/payload/docsConflicts.d.ts +12 -0
  108. package/dist/payload/docsConflicts.js +34 -0
  109. package/dist/payload/docsConflicts.js.map +1 -0
  110. package/dist/payload/docsData.d.ts +23 -0
  111. package/dist/payload/docsData.js +59 -0
  112. package/dist/payload/docsData.js.map +1 -0
  113. package/dist/payload/docsSets.d.ts +38 -0
  114. package/dist/payload/docsSets.js +57 -0
  115. package/dist/payload/docsSets.js.map +1 -0
  116. package/dist/payload/existingDocs.d.ts +43 -0
  117. package/dist/payload/existingDocs.js +97 -0
  118. package/dist/payload/existingDocs.js.map +1 -0
  119. package/dist/payload/index.d.ts +15 -0
  120. package/dist/payload/index.js +10 -0
  121. package/dist/payload/index.js.map +1 -0
  122. package/dist/payload/routeCollisions.d.ts +31 -0
  123. package/dist/payload/routeCollisions.js +104 -0
  124. package/dist/payload/routeCollisions.js.map +1 -0
  125. package/dist/payload/syncRuns.d.ts +60 -0
  126. package/dist/payload/syncRuns.js +53 -0
  127. package/dist/payload/syncRuns.js.map +1 -0
  128. package/dist/plugin.d.ts +3 -0
  129. package/dist/plugin.js +165 -0
  130. package/dist/plugin.js.map +1 -0
  131. package/dist/routing/index.d.ts +3 -0
  132. package/dist/routing/index.js +4 -0
  133. package/dist/routing/index.js.map +1 -0
  134. package/dist/routing/paths.d.ts +7 -0
  135. package/dist/routing/paths.js +23 -0
  136. package/dist/routing/paths.js.map +1 -0
  137. package/dist/routing/reservations.d.ts +37 -0
  138. package/dist/routing/reservations.js +79 -0
  139. package/dist/routing/reservations.js.map +1 -0
  140. package/dist/security/canonical.d.ts +12 -0
  141. package/dist/security/canonical.js +24 -0
  142. package/dist/security/canonical.js.map +1 -0
  143. package/dist/security/githubOidc.d.ts +45 -0
  144. package/dist/security/githubOidc.js +177 -0
  145. package/dist/security/githubOidc.js.map +1 -0
  146. package/dist/security/headers.d.ts +22 -0
  147. package/dist/security/headers.js +44 -0
  148. package/dist/security/headers.js.map +1 -0
  149. package/dist/security/index.d.ts +15 -0
  150. package/dist/security/index.js +9 -0
  151. package/dist/security/index.js.map +1 -0
  152. package/dist/security/jwks.d.ts +20 -0
  153. package/dist/security/jwks.js +40 -0
  154. package/dist/security/jwks.js.map +1 -0
  155. package/dist/security/jwt.d.ts +10 -0
  156. package/dist/security/jwt.js +42 -0
  157. package/dist/security/jwt.js.map +1 -0
  158. package/dist/security/nonce.d.ts +34 -0
  159. package/dist/security/nonce.js +43 -0
  160. package/dist/security/nonce.js.map +1 -0
  161. package/dist/security/sign.d.ts +13 -0
  162. package/dist/security/sign.js +39 -0
  163. package/dist/security/sign.js.map +1 -0
  164. package/dist/security/verify.d.ts +28 -0
  165. package/dist/security/verify.js +54 -0
  166. package/dist/security/verify.js.map +1 -0
  167. package/dist/skills/codex/SKILL.md +173 -0
  168. package/dist/skills/codex/examples/docs-page.md +42 -0
  169. package/dist/skills/codex/examples/github-actions.md +64 -0
  170. package/dist/skills/codex/reference/admin.md +28 -0
  171. package/dist/skills/codex/reference/frontmatter.md +39 -0
  172. package/dist/skills/codex/reference/payload-markdown-directives.md +77 -0
  173. package/dist/skills/codex/reference/routing.md +35 -0
  174. package/dist/skills/codex/reference/sync.md +35 -0
  175. package/dist/skills/codex/reference/troubleshooting.md +53 -0
  176. package/dist/skills/codex/reference/workflow.md +39 -0
  177. package/dist/sync/aiExportManifest.d.ts +58 -0
  178. package/dist/sync/aiExportManifest.js +430 -0
  179. package/dist/sync/aiExportManifest.js.map +1 -0
  180. package/dist/sync/frontmatter.d.ts +28 -0
  181. package/dist/sync/frontmatter.js +210 -0
  182. package/dist/sync/frontmatter.js.map +1 -0
  183. package/dist/sync/hash.d.ts +1 -0
  184. package/dist/sync/hash.js +8 -0
  185. package/dist/sync/hash.js.map +1 -0
  186. package/dist/sync/index.d.ts +12 -0
  187. package/dist/sync/index.js +9 -0
  188. package/dist/sync/index.js.map +1 -0
  189. package/dist/sync/manifest.d.ts +58 -0
  190. package/dist/sync/manifest.js +21 -0
  191. package/dist/sync/manifest.js.map +1 -0
  192. package/dist/sync/paths.d.ts +16 -0
  193. package/dist/sync/paths.js +116 -0
  194. package/dist/sync/paths.js.map +1 -0
  195. package/dist/sync/plan.d.ts +29 -0
  196. package/dist/sync/plan.js +72 -0
  197. package/dist/sync/plan.js.map +1 -0
  198. package/dist/sync/validate.d.ts +26 -0
  199. package/dist/sync/validate.js +308 -0
  200. package/dist/sync/validate.js.map +1 -0
  201. package/dist/types.d.ts +84 -0
  202. package/dist/types.js +3 -0
  203. package/dist/types.js.map +1 -0
  204. package/package.json +143 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/cli/commands/install.ts"],"sourcesContent":["import { access, mkdir, readdir, readFile, writeFile } from 'node:fs/promises'\nimport path from 'node:path'\n\nimport type { CliResult, ParsedCliArgs } from '../types.js'\n\nimport { getFlagBoolean, getFlagString } from '../parseArgs.js'\n\ntype AgentTarget = 'codex'\n\ntype PackageManager = 'bun' | 'npm' | 'pnpm' | 'yarn'\n\ntype SkillTemplateFile = {\n content: string\n relativePath: string\n}\n\ntype PlannedSkillFile = {\n content: string\n path: string\n relativePath: string\n}\n\ntype InstallSkillOptions = {\n agent: AgentTarget\n docsRoot: string\n dryRun: boolean\n force: boolean\n outDir: string\n packageManager: PackageManager\n}\n\nconst packageManagers = new Set<PackageManager>(['bun', 'npm', 'pnpm', 'yarn'])\nconst supportedInstallTargets = new Set(['ai-skill', 'skill'])\nconst defaultSkillOutputPath = '.agents/skills/payload-markdown-docs'\n\nconst skillTemplateRoot = new URL('../../skills/codex/', import.meta.url)\n\nconst fileExists = async (filePath: string): Promise<boolean> => {\n try {\n await access(filePath)\n\n return true\n } catch {\n return false\n }\n}\n\nconst detectPackageManager = async (cwd = process.cwd()): Promise<PackageManager> => {\n const lockfiles: [file: string, packageManager: PackageManager][] = [\n ['pnpm-lock.yaml', 'pnpm'],\n ['package-lock.json', 'npm'],\n ['yarn.lock', 'yarn'],\n ['bun.lockb', 'bun'],\n ]\n\n for (const [file, packageManager] of lockfiles) {\n if (await fileExists(path.join(cwd, file))) {\n return packageManager\n }\n }\n\n return 'pnpm'\n}\n\nconst readTemplateFiles = async (\n directoryUrl = skillTemplateRoot,\n basePath = '',\n): Promise<SkillTemplateFile[]> => {\n const entries = await readdir(directoryUrl, {\n withFileTypes: true,\n })\n const files: SkillTemplateFile[] = []\n\n for (const entry of entries) {\n if (entry.isSymbolicLink()) {\n continue\n }\n\n const relativePath = path.posix.join(basePath, entry.name)\n const entryUrl = new URL(`${entry.name}${entry.isDirectory() ? '/' : ''}`, directoryUrl)\n\n if (entry.isDirectory()) {\n files.push(...(await readTemplateFiles(entryUrl, relativePath)))\n continue\n }\n\n if (!entry.isFile()) {\n continue\n }\n\n files.push({\n content: await readFile(entryUrl, 'utf8'),\n relativePath,\n })\n }\n\n return files.sort((left, right) => left.relativePath.localeCompare(right.relativePath))\n}\n\nconst applyTemplateValues = ({\n content,\n docsRoot,\n packageManager,\n}: {\n content: string\n docsRoot: string\n packageManager: PackageManager\n}): string =>\n content\n .replaceAll('{{docsRoot}}', docsRoot)\n .replaceAll('{{packageManager}}', packageManager)\n\nconst assertSafeRelativePath = (relativePath: string): CliResult | undefined => {\n const normalized = path.posix.normalize(relativePath)\n\n if (\n normalized.startsWith('../') ||\n normalized === '..' ||\n path.isAbsolute(relativePath) ||\n relativePath.includes('\\\\')\n ) {\n return {\n exitCode: 1,\n stderr: `Unsafe bundled skill path \"${relativePath}\".\\n`,\n }\n }\n\n return undefined\n}\n\nconst createPlannedFiles = async ({\n docsRoot,\n outDir,\n packageManager,\n}: Pick<InstallSkillOptions, 'docsRoot' | 'outDir' | 'packageManager'>): Promise<\n CliResult | PlannedSkillFile[]\n> => {\n const absoluteOutDir = path.resolve(outDir)\n const templates = await readTemplateFiles()\n const plannedFiles: PlannedSkillFile[] = []\n\n for (const template of templates) {\n const unsafe = assertSafeRelativePath(template.relativePath)\n\n if (unsafe) {\n return unsafe\n }\n\n const outputPath = path.resolve(absoluteOutDir, template.relativePath)\n\n if (\n outputPath !== absoluteOutDir &&\n !outputPath.startsWith(`${absoluteOutDir}${path.sep}`)\n ) {\n return {\n exitCode: 1,\n stderr: `Refusing to write outside target directory: ${template.relativePath}\\n`,\n }\n }\n\n plannedFiles.push({\n content: applyTemplateValues({\n content: template.content,\n docsRoot,\n packageManager,\n }),\n path: outputPath,\n relativePath: template.relativePath,\n })\n }\n\n return plannedFiles\n}\n\nconst getInstallSkillOptions = async (\n args: ParsedCliArgs,\n): Promise<CliResult | InstallSkillOptions> => {\n const [target] = args.positionals\n\n if (!target || !supportedInstallTargets.has(target)) {\n return {\n exitCode: 1,\n stderr: 'Install requires target \"skill\" or \"ai-skill\".\\n',\n }\n }\n\n const agentFlag = getFlagString(args, 'agent')\n const codex = getFlagBoolean(args, 'codex')\n\n if (agentFlag && agentFlag !== 'codex') {\n return {\n exitCode: 1,\n stderr: '--agent currently supports only \"codex\".\\n',\n }\n }\n\n if (!codex && agentFlag !== 'codex') {\n return {\n exitCode: 1,\n stderr: 'Install skill requires --codex or --agent codex.\\n',\n }\n }\n\n const packageManagerFlag = getFlagString(args, 'package-manager')\n\n if (\n packageManagerFlag !== undefined &&\n !packageManagers.has(packageManagerFlag as PackageManager)\n ) {\n return {\n exitCode: 1,\n stderr: '--package-manager must be pnpm, npm, yarn, or bun.\\n',\n }\n }\n\n return {\n agent: 'codex',\n docsRoot: getFlagString(args, 'docs-root') ?? './docs',\n dryRun: getFlagBoolean(args, 'dry-run'),\n force: getFlagBoolean(args, 'force'),\n outDir: getFlagString(args, 'out') ?? defaultSkillOutputPath,\n packageManager:\n (packageManagerFlag as PackageManager | undefined) ?? (await detectPackageManager()),\n }\n}\n\nconst formatPlannedFiles = ({\n dryRun,\n files,\n outDir,\n}: {\n dryRun: boolean\n files: PlannedSkillFile[]\n outDir: string\n}): string => {\n const lines = [\n dryRun\n ? 'payload-markdown-docs install skill dry-run'\n : 'payload-markdown-docs install skill',\n '',\n `Target: ${path.resolve(outDir)}`,\n 'Files:',\n ...files.map((file) => `- ${file.relativePath}`),\n ]\n\n return `${lines.join('\\n')}\\n`\n}\n\nexport const runInstallCommand = async (\n args: ParsedCliArgs,\n): Promise<CliResult> => {\n const options = await getInstallSkillOptions(args)\n\n if ('exitCode' in options) {\n return options\n }\n\n const plannedFiles = await createPlannedFiles(options)\n\n if ('exitCode' in plannedFiles) {\n return plannedFiles\n }\n\n if (!options.force) {\n const existingFiles: string[] = []\n\n for (const file of plannedFiles) {\n if (await fileExists(file.path)) {\n existingFiles.push(file.relativePath)\n }\n }\n\n if (existingFiles.length > 0) {\n return {\n exitCode: 1,\n stderr: `Skill files already exist. Use --force to overwrite:\\n${existingFiles\n .map((file) => `- ${file}`)\n .join('\\n')}\\n`,\n }\n }\n }\n\n if (options.dryRun) {\n return {\n exitCode: 0,\n stdout: formatPlannedFiles({\n dryRun: true,\n files: plannedFiles,\n outDir: options.outDir,\n }),\n }\n }\n\n for (const file of plannedFiles) {\n await mkdir(path.dirname(file.path), {\n recursive: true,\n })\n await writeFile(file.path, file.content, 'utf8')\n }\n\n return {\n exitCode: 0,\n stdout: formatPlannedFiles({\n dryRun: false,\n files: plannedFiles,\n outDir: options.outDir,\n }),\n }\n}\n"],"names":["access","mkdir","readdir","readFile","writeFile","path","getFlagBoolean","getFlagString","packageManagers","Set","supportedInstallTargets","defaultSkillOutputPath","skillTemplateRoot","URL","url","fileExists","filePath","detectPackageManager","cwd","process","lockfiles","file","packageManager","join","readTemplateFiles","directoryUrl","basePath","entries","withFileTypes","files","entry","isSymbolicLink","relativePath","posix","name","entryUrl","isDirectory","push","isFile","content","sort","left","right","localeCompare","applyTemplateValues","docsRoot","replaceAll","assertSafeRelativePath","normalized","normalize","startsWith","isAbsolute","includes","exitCode","stderr","undefined","createPlannedFiles","outDir","absoluteOutDir","resolve","templates","plannedFiles","template","unsafe","outputPath","sep","getInstallSkillOptions","args","target","positionals","has","agentFlag","codex","packageManagerFlag","agent","dryRun","force","formatPlannedFiles","lines","map","runInstallCommand","options","existingFiles","length","stdout","dirname","recursive"],"mappings":"AAAA,SAASA,MAAM,EAAEC,KAAK,EAAEC,OAAO,EAAEC,QAAQ,EAAEC,SAAS,QAAQ,mBAAkB;AAC9E,OAAOC,UAAU,YAAW;AAI5B,SAASC,cAAc,EAAEC,aAAa,QAAQ,kBAAiB;AA0B/D,MAAMC,kBAAkB,IAAIC,IAAoB;IAAC;IAAO;IAAO;IAAQ;CAAO;AAC9E,MAAMC,0BAA0B,IAAID,IAAI;IAAC;IAAY;CAAQ;AAC7D,MAAME,yBAAyB;AAE/B,MAAMC,oBAAoB,IAAIC,IAAI,uBAAuB,YAAYC,GAAG;AAExE,MAAMC,aAAa,OAAOC;IACxB,IAAI;QACF,MAAMhB,OAAOgB;QAEb,OAAO;IACT,EAAE,OAAM;QACN,OAAO;IACT;AACF;AAEA,MAAMC,uBAAuB,OAAOC,MAAMC,QAAQD,GAAG,EAAE;IACrD,MAAME,YAA8D;QAClE;YAAC;YAAkB;SAAO;QAC1B;YAAC;YAAqB;SAAM;QAC5B;YAAC;YAAa;SAAO;QACrB;YAAC;YAAa;SAAM;KACrB;IAED,KAAK,MAAM,CAACC,MAAMC,eAAe,IAAIF,UAAW;QAC9C,IAAI,MAAML,WAAWV,KAAKkB,IAAI,CAACL,KAAKG,QAAQ;YAC1C,OAAOC;QACT;IACF;IAEA,OAAO;AACT;AAEA,MAAME,oBAAoB,OACxBC,eAAeb,iBAAiB,EAChCc,WAAW,EAAE;IAEb,MAAMC,UAAU,MAAMzB,QAAQuB,cAAc;QAC1CG,eAAe;IACjB;IACA,MAAMC,QAA6B,EAAE;IAErC,KAAK,MAAMC,SAASH,QAAS;QAC3B,IAAIG,MAAMC,cAAc,IAAI;YAC1B;QACF;QAEA,MAAMC,eAAe3B,KAAK4B,KAAK,CAACV,IAAI,CAACG,UAAUI,MAAMI,IAAI;QACzD,MAAMC,WAAW,IAAItB,IAAI,GAAGiB,MAAMI,IAAI,GAAGJ,MAAMM,WAAW,KAAK,MAAM,IAAI,EAAEX;QAE3E,IAAIK,MAAMM,WAAW,IAAI;YACvBP,MAAMQ,IAAI,IAAK,MAAMb,kBAAkBW,UAAUH;YACjD;QACF;QAEA,IAAI,CAACF,MAAMQ,MAAM,IAAI;YACnB;QACF;QAEAT,MAAMQ,IAAI,CAAC;YACTE,SAAS,MAAMpC,SAASgC,UAAU;YAClCH;QACF;IACF;IAEA,OAAOH,MAAMW,IAAI,CAAC,CAACC,MAAMC,QAAUD,KAAKT,YAAY,CAACW,aAAa,CAACD,MAAMV,YAAY;AACvF;AAEA,MAAMY,sBAAsB,CAAC,EAC3BL,OAAO,EACPM,QAAQ,EACRvB,cAAc,EAKf,GACCiB,QACGO,UAAU,CAAC,gBAAgBD,UAC3BC,UAAU,CAAC,sBAAsBxB;AAEtC,MAAMyB,yBAAyB,CAACf;IAC9B,MAAMgB,aAAa3C,KAAK4B,KAAK,CAACgB,SAAS,CAACjB;IAExC,IACEgB,WAAWE,UAAU,CAAC,UACtBF,eAAe,QACf3C,KAAK8C,UAAU,CAACnB,iBAChBA,aAAaoB,QAAQ,CAAC,OACtB;QACA,OAAO;YACLC,UAAU;YACVC,QAAQ,CAAC,2BAA2B,EAAEtB,aAAa,IAAI,CAAC;QAC1D;IACF;IAEA,OAAOuB;AACT;AAEA,MAAMC,qBAAqB,OAAO,EAChCX,QAAQ,EACRY,MAAM,EACNnC,cAAc,EACsD;IAGpE,MAAMoC,iBAAiBrD,KAAKsD,OAAO,CAACF;IACpC,MAAMG,YAAY,MAAMpC;IACxB,MAAMqC,eAAmC,EAAE;IAE3C,KAAK,MAAMC,YAAYF,UAAW;QAChC,MAAMG,SAAShB,uBAAuBe,SAAS9B,YAAY;QAE3D,IAAI+B,QAAQ;YACV,OAAOA;QACT;QAEA,MAAMC,aAAa3D,KAAKsD,OAAO,CAACD,gBAAgBI,SAAS9B,YAAY;QAErE,IACEgC,eAAeN,kBACf,CAACM,WAAWd,UAAU,CAAC,GAAGQ,iBAAiBrD,KAAK4D,GAAG,EAAE,GACrD;YACA,OAAO;gBACLZ,UAAU;gBACVC,QAAQ,CAAC,4CAA4C,EAAEQ,SAAS9B,YAAY,CAAC,EAAE,CAAC;YAClF;QACF;QAEA6B,aAAaxB,IAAI,CAAC;YAChBE,SAASK,oBAAoB;gBAC3BL,SAASuB,SAASvB,OAAO;gBACzBM;gBACAvB;YACF;YACAjB,MAAM2D;YACNhC,cAAc8B,SAAS9B,YAAY;QACrC;IACF;IAEA,OAAO6B;AACT;AAEA,MAAMK,yBAAyB,OAC7BC;IAEA,MAAM,CAACC,OAAO,GAAGD,KAAKE,WAAW;IAEjC,IAAI,CAACD,UAAU,CAAC1D,wBAAwB4D,GAAG,CAACF,SAAS;QACnD,OAAO;YACLf,UAAU;YACVC,QAAQ;QACV;IACF;IAEA,MAAMiB,YAAYhE,cAAc4D,MAAM;IACtC,MAAMK,QAAQlE,eAAe6D,MAAM;IAEnC,IAAII,aAAaA,cAAc,SAAS;QACtC,OAAO;YACLlB,UAAU;YACVC,QAAQ;QACV;IACF;IAEA,IAAI,CAACkB,SAASD,cAAc,SAAS;QACnC,OAAO;YACLlB,UAAU;YACVC,QAAQ;QACV;IACF;IAEA,MAAMmB,qBAAqBlE,cAAc4D,MAAM;IAE/C,IACEM,uBAAuBlB,aACvB,CAAC/C,gBAAgB8D,GAAG,CAACG,qBACrB;QACA,OAAO;YACLpB,UAAU;YACVC,QAAQ;QACV;IACF;IAEA,OAAO;QACLoB,OAAO;QACP7B,UAAUtC,cAAc4D,MAAM,gBAAgB;QAC9CQ,QAAQrE,eAAe6D,MAAM;QAC7BS,OAAOtE,eAAe6D,MAAM;QAC5BV,QAAQlD,cAAc4D,MAAM,UAAUxD;QACtCW,gBACE,AAACmD,sBAAsD,MAAMxD;IACjE;AACF;AAEA,MAAM4D,qBAAqB,CAAC,EAC1BF,MAAM,EACN9C,KAAK,EACL4B,MAAM,EAKP;IACC,MAAMqB,QAAQ;QACZH,SACI,gDACA;QACJ;QACA,CAAC,QAAQ,EAAEtE,KAAKsD,OAAO,CAACF,SAAS;QACjC;WACG5B,MAAMkD,GAAG,CAAC,CAAC1D,OAAS,CAAC,EAAE,EAAEA,KAAKW,YAAY,EAAE;KAChD;IAED,OAAO,GAAG8C,MAAMvD,IAAI,CAAC,MAAM,EAAE,CAAC;AAChC;AAEA,OAAO,MAAMyD,oBAAoB,OAC/Bb;IAEA,MAAMc,UAAU,MAAMf,uBAAuBC;IAE7C,IAAI,cAAcc,SAAS;QACzB,OAAOA;IACT;IAEA,MAAMpB,eAAe,MAAML,mBAAmByB;IAE9C,IAAI,cAAcpB,cAAc;QAC9B,OAAOA;IACT;IAEA,IAAI,CAACoB,QAAQL,KAAK,EAAE;QAClB,MAAMM,gBAA0B,EAAE;QAElC,KAAK,MAAM7D,QAAQwC,aAAc;YAC/B,IAAI,MAAM9C,WAAWM,KAAKhB,IAAI,GAAG;gBAC/B6E,cAAc7C,IAAI,CAAChB,KAAKW,YAAY;YACtC;QACF;QAEA,IAAIkD,cAAcC,MAAM,GAAG,GAAG;YAC5B,OAAO;gBACL9B,UAAU;gBACVC,QAAQ,CAAC,sDAAsD,EAAE4B,cAC9DH,GAAG,CAAC,CAAC1D,OAAS,CAAC,EAAE,EAAEA,MAAM,EACzBE,IAAI,CAAC,MAAM,EAAE,CAAC;YACnB;QACF;IACF;IAEA,IAAI0D,QAAQN,MAAM,EAAE;QAClB,OAAO;YACLtB,UAAU;YACV+B,QAAQP,mBAAmB;gBACzBF,QAAQ;gBACR9C,OAAOgC;gBACPJ,QAAQwB,QAAQxB,MAAM;YACxB;QACF;IACF;IAEA,KAAK,MAAMpC,QAAQwC,aAAc;QAC/B,MAAM5D,MAAMI,KAAKgF,OAAO,CAAChE,KAAKhB,IAAI,GAAG;YACnCiF,WAAW;QACb;QACA,MAAMlF,UAAUiB,KAAKhB,IAAI,EAAEgB,KAAKkB,OAAO,EAAE;IAC3C;IAEA,OAAO;QACLc,UAAU;QACV+B,QAAQP,mBAAmB;YACzBF,QAAQ;YACR9C,OAAOgC;YACPJ,QAAQwB,QAAQxB,MAAM;QACxB;IACF;AACF,EAAC"}
@@ -0,0 +1,2 @@
1
+ import type { CliResult, ParsedCliArgs } from '../types.js';
2
+ export declare const runKeygenCommand: (args: ParsedCliArgs) => Promise<CliResult>;
@@ -0,0 +1,89 @@
1
+ import { generateKeyPairSync } from 'node:crypto';
2
+ import { access, mkdir, writeFile } from 'node:fs/promises';
3
+ import path from 'node:path';
4
+ import { getFlagBoolean, getFlagString } from '../parseArgs.js';
5
+ const keyFormats = new Set([
6
+ 'base64',
7
+ 'pem'
8
+ ]);
9
+ const fileExists = async (filePath)=>{
10
+ try {
11
+ await access(filePath);
12
+ return true;
13
+ } catch {
14
+ return false;
15
+ }
16
+ };
17
+ const generatePemKeys = ()=>{
18
+ const { privateKey, publicKey } = generateKeyPairSync('ed25519', {
19
+ privateKeyEncoding: {
20
+ type: 'pkcs8',
21
+ format: 'pem'
22
+ },
23
+ publicKeyEncoding: {
24
+ type: 'spki',
25
+ format: 'pem'
26
+ }
27
+ });
28
+ return {
29
+ privateKey: privateKey.toString(),
30
+ publicKey: publicKey.toString()
31
+ };
32
+ };
33
+ const generateBase64Keys = ()=>{
34
+ const { privateKey, publicKey } = generateKeyPairSync('ed25519', {
35
+ privateKeyEncoding: {
36
+ type: 'pkcs8',
37
+ format: 'der'
38
+ },
39
+ publicKeyEncoding: {
40
+ type: 'spki',
41
+ format: 'der'
42
+ }
43
+ });
44
+ return {
45
+ privateKey: Buffer.from(privateKey).toString('base64'),
46
+ publicKey: Buffer.from(publicKey).toString('base64')
47
+ };
48
+ };
49
+ const formatKeysForStdout = ({ privateKey, publicKey })=>`Public key:\n\n${publicKey.trim()}\n\nPrivate key:\n\n${privateKey.trim()}\n`;
50
+ export const runKeygenCommand = async (args)=>{
51
+ const format = getFlagString(args, 'format') ?? 'pem';
52
+ if (!keyFormats.has(format)) {
53
+ return {
54
+ exitCode: 1,
55
+ stderr: '--format must be pem or base64.\n'
56
+ };
57
+ }
58
+ const keys = format === 'pem' ? generatePemKeys() : generateBase64Keys();
59
+ const outDir = getFlagString(args, 'out');
60
+ if (!outDir) {
61
+ return {
62
+ exitCode: 0,
63
+ stdout: formatKeysForStdout(keys)
64
+ };
65
+ }
66
+ const absoluteOutDir = path.resolve(outDir);
67
+ const publicKeyPath = path.join(absoluteOutDir, 'docs-sync-public.pem');
68
+ const privateKeyPath = path.join(absoluteOutDir, 'docs-sync-private.pem');
69
+ const force = getFlagBoolean(args, 'force');
70
+ const publicExists = await fileExists(publicKeyPath);
71
+ const privateExists = await fileExists(privateKeyPath);
72
+ if (!force && (publicExists || privateExists)) {
73
+ return {
74
+ exitCode: 1,
75
+ stderr: 'Key files already exist. Use --force to overwrite docs-sync-public.pem and docs-sync-private.pem.\n'
76
+ };
77
+ }
78
+ await mkdir(absoluteOutDir, {
79
+ recursive: true
80
+ });
81
+ await writeFile(publicKeyPath, `${keys.publicKey.trim()}\n`, 'utf8');
82
+ await writeFile(privateKeyPath, `${keys.privateKey.trim()}\n`, 'utf8');
83
+ return {
84
+ exitCode: 0,
85
+ stdout: `Wrote public key: ${publicKeyPath}\nWrote private key: ${privateKeyPath}\n`
86
+ };
87
+ };
88
+
89
+ //# sourceMappingURL=keygen.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/cli/commands/keygen.ts"],"sourcesContent":["import { generateKeyPairSync } from 'node:crypto'\nimport { access, mkdir, writeFile } from 'node:fs/promises'\nimport path from 'node:path'\n\nimport type { CliResult, ParsedCliArgs } from '../types.js'\n\nimport { getFlagBoolean, getFlagString } from '../parseArgs.js'\n\ntype KeyFormat = 'base64' | 'pem'\n\ntype GeneratedKeys = {\n privateKey: string\n publicKey: string\n}\n\nconst keyFormats = new Set<KeyFormat>(['base64', 'pem'])\n\nconst fileExists = async (filePath: string): Promise<boolean> => {\n try {\n await access(filePath)\n\n return true\n } catch {\n return false\n }\n}\n\nconst generatePemKeys = (): GeneratedKeys => {\n const { privateKey, publicKey } = generateKeyPairSync('ed25519', {\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'pem',\n },\n publicKeyEncoding: {\n type: 'spki',\n format: 'pem',\n },\n })\n\n return {\n privateKey: privateKey.toString(),\n publicKey: publicKey.toString(),\n }\n}\n\nconst generateBase64Keys = (): GeneratedKeys => {\n const { privateKey, publicKey } = generateKeyPairSync('ed25519', {\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'der',\n },\n publicKeyEncoding: {\n type: 'spki',\n format: 'der',\n },\n })\n\n return {\n privateKey: Buffer.from(privateKey).toString('base64'),\n publicKey: Buffer.from(publicKey).toString('base64'),\n }\n}\n\nconst formatKeysForStdout = ({ privateKey, publicKey }: GeneratedKeys): string =>\n `Public key:\\n\\n${publicKey.trim()}\\n\\nPrivate key:\\n\\n${privateKey.trim()}\\n`\n\nexport const runKeygenCommand = async (\n args: ParsedCliArgs,\n): Promise<CliResult> => {\n const format = (getFlagString(args, 'format') ?? 'pem') as KeyFormat\n\n if (!keyFormats.has(format)) {\n return {\n exitCode: 1,\n stderr: '--format must be pem or base64.\\n',\n }\n }\n\n const keys = format === 'pem' ? generatePemKeys() : generateBase64Keys()\n const outDir = getFlagString(args, 'out')\n\n if (!outDir) {\n return {\n exitCode: 0,\n stdout: formatKeysForStdout(keys),\n }\n }\n\n const absoluteOutDir = path.resolve(outDir)\n const publicKeyPath = path.join(absoluteOutDir, 'docs-sync-public.pem')\n const privateKeyPath = path.join(absoluteOutDir, 'docs-sync-private.pem')\n const force = getFlagBoolean(args, 'force')\n const publicExists = await fileExists(publicKeyPath)\n const privateExists = await fileExists(privateKeyPath)\n\n if (!force && (publicExists || privateExists)) {\n return {\n exitCode: 1,\n stderr:\n 'Key files already exist. Use --force to overwrite docs-sync-public.pem and docs-sync-private.pem.\\n',\n }\n }\n\n await mkdir(absoluteOutDir, {\n recursive: true,\n })\n await writeFile(publicKeyPath, `${keys.publicKey.trim()}\\n`, 'utf8')\n await writeFile(privateKeyPath, `${keys.privateKey.trim()}\\n`, 'utf8')\n\n return {\n exitCode: 0,\n stdout: `Wrote public key: ${publicKeyPath}\\nWrote private key: ${privateKeyPath}\\n`,\n }\n}\n\n"],"names":["generateKeyPairSync","access","mkdir","writeFile","path","getFlagBoolean","getFlagString","keyFormats","Set","fileExists","filePath","generatePemKeys","privateKey","publicKey","privateKeyEncoding","type","format","publicKeyEncoding","toString","generateBase64Keys","Buffer","from","formatKeysForStdout","trim","runKeygenCommand","args","has","exitCode","stderr","keys","outDir","stdout","absoluteOutDir","resolve","publicKeyPath","join","privateKeyPath","force","publicExists","privateExists","recursive"],"mappings":"AAAA,SAASA,mBAAmB,QAAQ,cAAa;AACjD,SAASC,MAAM,EAAEC,KAAK,EAAEC,SAAS,QAAQ,mBAAkB;AAC3D,OAAOC,UAAU,YAAW;AAI5B,SAASC,cAAc,EAAEC,aAAa,QAAQ,kBAAiB;AAS/D,MAAMC,aAAa,IAAIC,IAAe;IAAC;IAAU;CAAM;AAEvD,MAAMC,aAAa,OAAOC;IACxB,IAAI;QACF,MAAMT,OAAOS;QAEb,OAAO;IACT,EAAE,OAAM;QACN,OAAO;IACT;AACF;AAEA,MAAMC,kBAAkB;IACtB,MAAM,EAAEC,UAAU,EAAEC,SAAS,EAAE,GAAGb,oBAAoB,WAAW;QAC/Dc,oBAAoB;YAClBC,MAAM;YACNC,QAAQ;QACV;QACAC,mBAAmB;YACjBF,MAAM;YACNC,QAAQ;QACV;IACF;IAEA,OAAO;QACLJ,YAAYA,WAAWM,QAAQ;QAC/BL,WAAWA,UAAUK,QAAQ;IAC/B;AACF;AAEA,MAAMC,qBAAqB;IACzB,MAAM,EAAEP,UAAU,EAAEC,SAAS,EAAE,GAAGb,oBAAoB,WAAW;QAC/Dc,oBAAoB;YAClBC,MAAM;YACNC,QAAQ;QACV;QACAC,mBAAmB;YACjBF,MAAM;YACNC,QAAQ;QACV;IACF;IAEA,OAAO;QACLJ,YAAYQ,OAAOC,IAAI,CAACT,YAAYM,QAAQ,CAAC;QAC7CL,WAAWO,OAAOC,IAAI,CAACR,WAAWK,QAAQ,CAAC;IAC7C;AACF;AAEA,MAAMI,sBAAsB,CAAC,EAAEV,UAAU,EAAEC,SAAS,EAAiB,GACnE,CAAC,eAAe,EAAEA,UAAUU,IAAI,GAAG,oBAAoB,EAAEX,WAAWW,IAAI,GAAG,EAAE,CAAC;AAEhF,OAAO,MAAMC,mBAAmB,OAC9BC;IAEA,MAAMT,SAAUV,cAAcmB,MAAM,aAAa;IAEjD,IAAI,CAAClB,WAAWmB,GAAG,CAACV,SAAS;QAC3B,OAAO;YACLW,UAAU;YACVC,QAAQ;QACV;IACF;IAEA,MAAMC,OAAOb,WAAW,QAAQL,oBAAoBQ;IACpD,MAAMW,SAASxB,cAAcmB,MAAM;IAEnC,IAAI,CAACK,QAAQ;QACX,OAAO;YACLH,UAAU;YACVI,QAAQT,oBAAoBO;QAC9B;IACF;IAEA,MAAMG,iBAAiB5B,KAAK6B,OAAO,CAACH;IACpC,MAAMI,gBAAgB9B,KAAK+B,IAAI,CAACH,gBAAgB;IAChD,MAAMI,iBAAiBhC,KAAK+B,IAAI,CAACH,gBAAgB;IACjD,MAAMK,QAAQhC,eAAeoB,MAAM;IACnC,MAAMa,eAAe,MAAM7B,WAAWyB;IACtC,MAAMK,gBAAgB,MAAM9B,WAAW2B;IAEvC,IAAI,CAACC,SAAUC,CAAAA,gBAAgBC,aAAY,GAAI;QAC7C,OAAO;YACLZ,UAAU;YACVC,QACE;QACJ;IACF;IAEA,MAAM1B,MAAM8B,gBAAgB;QAC1BQ,WAAW;IACb;IACA,MAAMrC,UAAU+B,eAAe,GAAGL,KAAKhB,SAAS,CAACU,IAAI,GAAG,EAAE,CAAC,EAAE;IAC7D,MAAMpB,UAAUiC,gBAAgB,GAAGP,KAAKjB,UAAU,CAACW,IAAI,GAAG,EAAE,CAAC,EAAE;IAE/D,OAAO;QACLI,UAAU;QACVI,QAAQ,CAAC,kBAAkB,EAAEG,cAAc,qBAAqB,EAAEE,eAAe,EAAE,CAAC;IACtF;AACF,EAAC"}
@@ -0,0 +1,2 @@
1
+ import type { CliResult, ParsedCliArgs } from '../types.js';
2
+ export declare const runManifestCommand: (args: ParsedCliArgs) => Promise<CliResult>;
@@ -0,0 +1,50 @@
1
+ import { buildDocsManifest, validateDocsManifest } from '../../sync/index.js';
2
+ import { readDocsAiExportManifest, walkDocsFiles } from '../filesystem.js';
3
+ import { formatIssues, printJson } from '../format.js';
4
+ import { getFlagBoolean } from '../parseArgs.js';
5
+ import { getDocsCommandOptions } from './validate.js';
6
+ export const runManifestCommand = async (args)=>{
7
+ const options = getDocsCommandOptions(args);
8
+ if ('exitCode' in options) {
9
+ return options;
10
+ }
11
+ const files = await walkDocsFiles({
12
+ root: options.docsRoot
13
+ });
14
+ const aiExport = await readDocsAiExportManifest({
15
+ root: options.docsRoot
16
+ });
17
+ if (!aiExport.ok) {
18
+ return {
19
+ exitCode: 1,
20
+ stderr: `AI export manifest is invalid.\n\nErrors:\n${formatIssues(aiExport.issues)}\n`
21
+ };
22
+ }
23
+ const manifest = buildDocsManifest({
24
+ aiExport: aiExport.manifest,
25
+ branch: options.branch,
26
+ commit: options.commit,
27
+ files,
28
+ repository: options.repository,
29
+ root: options.sourceRoot,
30
+ sourceId: options.sourceId
31
+ });
32
+ const validation = validateDocsManifest(manifest, {
33
+ maxFileBytes: options.maxFileBytes,
34
+ maxFiles: options.maxFiles,
35
+ maxTotalBytes: options.maxTotalBytes,
36
+ routeBase: options.routeBase
37
+ });
38
+ if (!validation.ok) {
39
+ return {
40
+ exitCode: 1,
41
+ stderr: `Manifest is invalid.\n\nErrors:\n${formatIssues(validation.issues)}\n`
42
+ };
43
+ }
44
+ return {
45
+ exitCode: 0,
46
+ stdout: printJson(manifest, getFlagBoolean(args, 'pretty'))
47
+ };
48
+ };
49
+
50
+ //# sourceMappingURL=manifest.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/cli/commands/manifest.ts"],"sourcesContent":["import type { CliResult, ParsedCliArgs } from '../types.js'\n\nimport {\n buildDocsManifest,\n validateDocsManifest,\n} from '../../sync/index.js'\nimport {\n readDocsAiExportManifest,\n walkDocsFiles,\n} from '../filesystem.js'\nimport { formatIssues, printJson } from '../format.js'\nimport { getFlagBoolean } from '../parseArgs.js'\nimport { getDocsCommandOptions } from './validate.js'\n\nexport const runManifestCommand = async (\n args: ParsedCliArgs,\n): Promise<CliResult> => {\n const options = getDocsCommandOptions(args)\n\n if ('exitCode' in options) {\n return options\n }\n\n const files = await walkDocsFiles({\n root: options.docsRoot,\n })\n const aiExport = await readDocsAiExportManifest({\n root: options.docsRoot,\n })\n\n if (!aiExport.ok) {\n return {\n exitCode: 1,\n stderr: `AI export manifest is invalid.\\n\\nErrors:\\n${formatIssues(aiExport.issues)}\\n`,\n }\n }\n\n const manifest = buildDocsManifest({\n aiExport: aiExport.manifest,\n branch: options.branch,\n commit: options.commit,\n files,\n repository: options.repository,\n root: options.sourceRoot,\n sourceId: options.sourceId,\n })\n const validation = validateDocsManifest(manifest, {\n maxFileBytes: options.maxFileBytes,\n maxFiles: options.maxFiles,\n maxTotalBytes: options.maxTotalBytes,\n routeBase: options.routeBase,\n })\n\n if (!validation.ok) {\n return {\n exitCode: 1,\n stderr: `Manifest is invalid.\\n\\nErrors:\\n${formatIssues(validation.issues)}\\n`,\n }\n }\n\n return {\n exitCode: 0,\n stdout: printJson(manifest, getFlagBoolean(args, 'pretty')),\n }\n}\n"],"names":["buildDocsManifest","validateDocsManifest","readDocsAiExportManifest","walkDocsFiles","formatIssues","printJson","getFlagBoolean","getDocsCommandOptions","runManifestCommand","args","options","files","root","docsRoot","aiExport","ok","exitCode","stderr","issues","manifest","branch","commit","repository","sourceRoot","sourceId","validation","maxFileBytes","maxFiles","maxTotalBytes","routeBase","stdout"],"mappings":"AAEA,SACEA,iBAAiB,EACjBC,oBAAoB,QACf,sBAAqB;AAC5B,SACEC,wBAAwB,EACxBC,aAAa,QACR,mBAAkB;AACzB,SAASC,YAAY,EAAEC,SAAS,QAAQ,eAAc;AACtD,SAASC,cAAc,QAAQ,kBAAiB;AAChD,SAASC,qBAAqB,QAAQ,gBAAe;AAErD,OAAO,MAAMC,qBAAqB,OAChCC;IAEA,MAAMC,UAAUH,sBAAsBE;IAEtC,IAAI,cAAcC,SAAS;QACzB,OAAOA;IACT;IAEA,MAAMC,QAAQ,MAAMR,cAAc;QAChCS,MAAMF,QAAQG,QAAQ;IACxB;IACA,MAAMC,WAAW,MAAMZ,yBAAyB;QAC9CU,MAAMF,QAAQG,QAAQ;IACxB;IAEA,IAAI,CAACC,SAASC,EAAE,EAAE;QAChB,OAAO;YACLC,UAAU;YACVC,QAAQ,CAAC,2CAA2C,EAAEb,aAAaU,SAASI,MAAM,EAAE,EAAE,CAAC;QACzF;IACF;IAEA,MAAMC,WAAWnB,kBAAkB;QACjCc,UAAUA,SAASK,QAAQ;QAC3BC,QAAQV,QAAQU,MAAM;QACtBC,QAAQX,QAAQW,MAAM;QACtBV;QACAW,YAAYZ,QAAQY,UAAU;QAC9BV,MAAMF,QAAQa,UAAU;QACxBC,UAAUd,QAAQc,QAAQ;IAC5B;IACA,MAAMC,aAAaxB,qBAAqBkB,UAAU;QAChDO,cAAchB,QAAQgB,YAAY;QAClCC,UAAUjB,QAAQiB,QAAQ;QAC1BC,eAAelB,QAAQkB,aAAa;QACpCC,WAAWnB,QAAQmB,SAAS;IAC9B;IAEA,IAAI,CAACJ,WAAWV,EAAE,EAAE;QAClB,OAAO;YACLC,UAAU;YACVC,QAAQ,CAAC,iCAAiC,EAAEb,aAAaqB,WAAWP,MAAM,EAAE,EAAE,CAAC;QACjF;IACF;IAEA,OAAO;QACLF,UAAU;QACVc,QAAQzB,UAAUc,UAAUb,eAAeG,MAAM;IACnD;AACF,EAAC"}
@@ -0,0 +1,2 @@
1
+ import type { CliResult, ParsedCliArgs } from '../types.js';
2
+ export declare const runPlanCommand: (args: ParsedCliArgs) => Promise<CliResult>;
@@ -0,0 +1,110 @@
1
+ import { readFile } from 'node:fs/promises';
2
+ import { buildDocsManifest, planDocsSync, validateDocsManifest } from '../../sync/index.js';
3
+ import { readDocsAiExportManifest, walkDocsFiles } from '../filesystem.js';
4
+ import { formatIssues, formatPlanSummary, printJson } from '../format.js';
5
+ import { getFlagBoolean, getFlagString } from '../parseArgs.js';
6
+ import { getDocsCommandOptions } from './validate.js';
7
+ const deleteBehaviors = new Set([
8
+ 'archive',
9
+ 'delete',
10
+ 'draft',
11
+ 'ignore'
12
+ ]);
13
+ const isExistingDocsRecord = (value)=>{
14
+ if (typeof value !== 'object' || value === null || Array.isArray(value)) {
15
+ return false;
16
+ }
17
+ const record = value;
18
+ return typeof record.route === 'string' && typeof record.sourcePath === 'string' && (record.sourceHash === undefined || typeof record.sourceHash === 'string') && (record.title === undefined || typeof record.title === 'string') && (record.archived === undefined || typeof record.archived === 'boolean');
19
+ };
20
+ const loadExistingDocs = async (existingPath)=>{
21
+ if (!existingPath) {
22
+ return [];
23
+ }
24
+ let parsed;
25
+ try {
26
+ const raw = await readFile(existingPath, 'utf8');
27
+ parsed = JSON.parse(raw);
28
+ } catch (error) {
29
+ return {
30
+ exitCode: 1,
31
+ stderr: error instanceof Error ? `Could not read --existing file: ${error.message}\n` : 'Could not read --existing file.\n'
32
+ };
33
+ }
34
+ if (!Array.isArray(parsed) || !parsed.every(isExistingDocsRecord)) {
35
+ return {
36
+ exitCode: 1,
37
+ stderr: '--existing must point to a JSON array of existing docs records.\n'
38
+ };
39
+ }
40
+ return parsed;
41
+ };
42
+ export const runPlanCommand = async (args)=>{
43
+ const options = getDocsCommandOptions(args);
44
+ if ('exitCode' in options) {
45
+ return options;
46
+ }
47
+ const deleteBehaviorFlag = getFlagString(args, 'delete-behavior');
48
+ if (deleteBehaviorFlag !== undefined && !deleteBehaviors.has(deleteBehaviorFlag)) {
49
+ return {
50
+ exitCode: 1,
51
+ stderr: '--delete-behavior must be archive, delete, draft, or ignore.\n'
52
+ };
53
+ }
54
+ const existing = await loadExistingDocs(getFlagString(args, 'existing'));
55
+ if ('exitCode' in existing) {
56
+ return existing;
57
+ }
58
+ const files = await walkDocsFiles({
59
+ root: options.docsRoot
60
+ });
61
+ const aiExport = await readDocsAiExportManifest({
62
+ root: options.docsRoot
63
+ });
64
+ if (!aiExport.ok) {
65
+ return {
66
+ exitCode: 1,
67
+ stderr: `AI export manifest is invalid.\n\nErrors:\n${formatIssues(aiExport.issues)}\n`
68
+ };
69
+ }
70
+ const deleteBehavior = deleteBehaviorFlag;
71
+ const manifest = buildDocsManifest({
72
+ aiExport: aiExport.manifest,
73
+ branch: options.branch,
74
+ commit: options.commit,
75
+ deleteBehavior,
76
+ files,
77
+ repository: options.repository,
78
+ root: options.sourceRoot,
79
+ sourceId: options.sourceId
80
+ });
81
+ const validation = validateDocsManifest(manifest, {
82
+ maxFileBytes: options.maxFileBytes,
83
+ maxFiles: options.maxFiles,
84
+ maxTotalBytes: options.maxTotalBytes,
85
+ routeBase: options.routeBase
86
+ });
87
+ if (!validation.ok) {
88
+ return {
89
+ exitCode: 1,
90
+ stderr: `Manifest is invalid.\n\nErrors:\n${formatIssues(validation.issues)}\n`
91
+ };
92
+ }
93
+ const plan = planDocsSync({
94
+ deleteBehavior,
95
+ desired: validation.data,
96
+ existing
97
+ });
98
+ if (getFlagBoolean(args, 'json')) {
99
+ return {
100
+ exitCode: 0,
101
+ stdout: printJson(plan, getFlagBoolean(args, 'pretty'))
102
+ };
103
+ }
104
+ return {
105
+ exitCode: 0,
106
+ stdout: formatPlanSummary(plan)
107
+ };
108
+ };
109
+
110
+ //# sourceMappingURL=plan.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/cli/commands/plan.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises'\n\nimport type {\n DocsDeleteBehavior,\n ExistingDocsRecord,\n} from '../../sync/index.js'\nimport type { CliResult, ParsedCliArgs } from '../types.js'\n\nimport {\n buildDocsManifest,\n planDocsSync,\n validateDocsManifest,\n} from '../../sync/index.js'\nimport {\n readDocsAiExportManifest,\n walkDocsFiles,\n} from '../filesystem.js'\nimport { formatIssues, formatPlanSummary, printJson } from '../format.js'\nimport { getFlagBoolean, getFlagString } from '../parseArgs.js'\nimport { getDocsCommandOptions } from './validate.js'\n\nconst deleteBehaviors = new Set<DocsDeleteBehavior>([\n 'archive',\n 'delete',\n 'draft',\n 'ignore',\n])\n\nconst isExistingDocsRecord = (value: unknown): value is ExistingDocsRecord => {\n if (typeof value !== 'object' || value === null || Array.isArray(value)) {\n return false\n }\n\n const record = value as Record<string, unknown>\n\n return (\n typeof record.route === 'string' &&\n typeof record.sourcePath === 'string' &&\n (record.sourceHash === undefined || typeof record.sourceHash === 'string') &&\n (record.title === undefined || typeof record.title === 'string') &&\n (record.archived === undefined || typeof record.archived === 'boolean')\n )\n}\n\nconst loadExistingDocs = async (\n existingPath: string | undefined,\n): Promise<CliResult | ExistingDocsRecord[]> => {\n if (!existingPath) {\n return []\n }\n\n let parsed: unknown\n\n try {\n const raw = await readFile(existingPath, 'utf8')\n parsed = JSON.parse(raw) as unknown\n } catch (error) {\n return {\n exitCode: 1,\n stderr:\n error instanceof Error\n ? `Could not read --existing file: ${error.message}\\n`\n : 'Could not read --existing file.\\n',\n }\n }\n\n if (!Array.isArray(parsed) || !parsed.every(isExistingDocsRecord)) {\n return {\n exitCode: 1,\n stderr: '--existing must point to a JSON array of existing docs records.\\n',\n }\n }\n\n return parsed\n}\n\nexport const runPlanCommand = async (args: ParsedCliArgs): Promise<CliResult> => {\n const options = getDocsCommandOptions(args)\n\n if ('exitCode' in options) {\n return options\n }\n\n const deleteBehaviorFlag = getFlagString(args, 'delete-behavior')\n\n if (\n deleteBehaviorFlag !== undefined &&\n !deleteBehaviors.has(deleteBehaviorFlag as DocsDeleteBehavior)\n ) {\n return {\n exitCode: 1,\n stderr: '--delete-behavior must be archive, delete, draft, or ignore.\\n',\n }\n }\n\n const existing = await loadExistingDocs(getFlagString(args, 'existing'))\n\n if ('exitCode' in existing) {\n return existing\n }\n\n const files = await walkDocsFiles({\n root: options.docsRoot,\n })\n const aiExport = await readDocsAiExportManifest({\n root: options.docsRoot,\n })\n\n if (!aiExport.ok) {\n return {\n exitCode: 1,\n stderr: `AI export manifest is invalid.\\n\\nErrors:\\n${formatIssues(aiExport.issues)}\\n`,\n }\n }\n\n const deleteBehavior = deleteBehaviorFlag as DocsDeleteBehavior | undefined\n const manifest = buildDocsManifest({\n aiExport: aiExport.manifest,\n branch: options.branch,\n commit: options.commit,\n deleteBehavior,\n files,\n repository: options.repository,\n root: options.sourceRoot,\n sourceId: options.sourceId,\n })\n const validation = validateDocsManifest(manifest, {\n maxFileBytes: options.maxFileBytes,\n maxFiles: options.maxFiles,\n maxTotalBytes: options.maxTotalBytes,\n routeBase: options.routeBase,\n })\n\n if (!validation.ok) {\n return {\n exitCode: 1,\n stderr: `Manifest is invalid.\\n\\nErrors:\\n${formatIssues(validation.issues)}\\n`,\n }\n }\n\n const plan = planDocsSync({\n deleteBehavior,\n desired: validation.data,\n existing,\n })\n\n if (getFlagBoolean(args, 'json')) {\n return {\n exitCode: 0,\n stdout: printJson(plan, getFlagBoolean(args, 'pretty')),\n }\n }\n\n return {\n exitCode: 0,\n stdout: formatPlanSummary(plan),\n }\n}\n"],"names":["readFile","buildDocsManifest","planDocsSync","validateDocsManifest","readDocsAiExportManifest","walkDocsFiles","formatIssues","formatPlanSummary","printJson","getFlagBoolean","getFlagString","getDocsCommandOptions","deleteBehaviors","Set","isExistingDocsRecord","value","Array","isArray","record","route","sourcePath","sourceHash","undefined","title","archived","loadExistingDocs","existingPath","parsed","raw","JSON","parse","error","exitCode","stderr","Error","message","every","runPlanCommand","args","options","deleteBehaviorFlag","has","existing","files","root","docsRoot","aiExport","ok","issues","deleteBehavior","manifest","branch","commit","repository","sourceRoot","sourceId","validation","maxFileBytes","maxFiles","maxTotalBytes","routeBase","plan","desired","data","stdout"],"mappings":"AAAA,SAASA,QAAQ,QAAQ,mBAAkB;AAQ3C,SACEC,iBAAiB,EACjBC,YAAY,EACZC,oBAAoB,QACf,sBAAqB;AAC5B,SACEC,wBAAwB,EACxBC,aAAa,QACR,mBAAkB;AACzB,SAASC,YAAY,EAAEC,iBAAiB,EAAEC,SAAS,QAAQ,eAAc;AACzE,SAASC,cAAc,EAAEC,aAAa,QAAQ,kBAAiB;AAC/D,SAASC,qBAAqB,QAAQ,gBAAe;AAErD,MAAMC,kBAAkB,IAAIC,IAAwB;IAClD;IACA;IACA;IACA;CACD;AAED,MAAMC,uBAAuB,CAACC;IAC5B,IAAI,OAAOA,UAAU,YAAYA,UAAU,QAAQC,MAAMC,OAAO,CAACF,QAAQ;QACvE,OAAO;IACT;IAEA,MAAMG,SAASH;IAEf,OACE,OAAOG,OAAOC,KAAK,KAAK,YACxB,OAAOD,OAAOE,UAAU,KAAK,YAC5BF,CAAAA,OAAOG,UAAU,KAAKC,aAAa,OAAOJ,OAAOG,UAAU,KAAK,QAAO,KACvEH,CAAAA,OAAOK,KAAK,KAAKD,aAAa,OAAOJ,OAAOK,KAAK,KAAK,QAAO,KAC7DL,CAAAA,OAAOM,QAAQ,KAAKF,aAAa,OAAOJ,OAAOM,QAAQ,KAAK,SAAQ;AAEzE;AAEA,MAAMC,mBAAmB,OACvBC;IAEA,IAAI,CAACA,cAAc;QACjB,OAAO,EAAE;IACX;IAEA,IAAIC;IAEJ,IAAI;QACF,MAAMC,MAAM,MAAM5B,SAAS0B,cAAc;QACzCC,SAASE,KAAKC,KAAK,CAACF;IACtB,EAAE,OAAOG,OAAO;QACd,OAAO;YACLC,UAAU;YACVC,QACEF,iBAAiBG,QACb,CAAC,gCAAgC,EAAEH,MAAMI,OAAO,CAAC,EAAE,CAAC,GACpD;QACR;IACF;IAEA,IAAI,CAACnB,MAAMC,OAAO,CAACU,WAAW,CAACA,OAAOS,KAAK,CAACtB,uBAAuB;QACjE,OAAO;YACLkB,UAAU;YACVC,QAAQ;QACV;IACF;IAEA,OAAON;AACT;AAEA,OAAO,MAAMU,iBAAiB,OAAOC;IACnC,MAAMC,UAAU5B,sBAAsB2B;IAEtC,IAAI,cAAcC,SAAS;QACzB,OAAOA;IACT;IAEA,MAAMC,qBAAqB9B,cAAc4B,MAAM;IAE/C,IACEE,uBAAuBlB,aACvB,CAACV,gBAAgB6B,GAAG,CAACD,qBACrB;QACA,OAAO;YACLR,UAAU;YACVC,QAAQ;QACV;IACF;IAEA,MAAMS,WAAW,MAAMjB,iBAAiBf,cAAc4B,MAAM;IAE5D,IAAI,cAAcI,UAAU;QAC1B,OAAOA;IACT;IAEA,MAAMC,QAAQ,MAAMtC,cAAc;QAChCuC,MAAML,QAAQM,QAAQ;IACxB;IACA,MAAMC,WAAW,MAAM1C,yBAAyB;QAC9CwC,MAAML,QAAQM,QAAQ;IACxB;IAEA,IAAI,CAACC,SAASC,EAAE,EAAE;QAChB,OAAO;YACLf,UAAU;YACVC,QAAQ,CAAC,2CAA2C,EAAE3B,aAAawC,SAASE,MAAM,EAAE,EAAE,CAAC;QACzF;IACF;IAEA,MAAMC,iBAAiBT;IACvB,MAAMU,WAAWjD,kBAAkB;QACjC6C,UAAUA,SAASI,QAAQ;QAC3BC,QAAQZ,QAAQY,MAAM;QACtBC,QAAQb,QAAQa,MAAM;QACtBH;QACAN;QACAU,YAAYd,QAAQc,UAAU;QAC9BT,MAAML,QAAQe,UAAU;QACxBC,UAAUhB,QAAQgB,QAAQ;IAC5B;IACA,MAAMC,aAAarD,qBAAqB+C,UAAU;QAChDO,cAAclB,QAAQkB,YAAY;QAClCC,UAAUnB,QAAQmB,QAAQ;QAC1BC,eAAepB,QAAQoB,aAAa;QACpCC,WAAWrB,QAAQqB,SAAS;IAC9B;IAEA,IAAI,CAACJ,WAAWT,EAAE,EAAE;QAClB,OAAO;YACLf,UAAU;YACVC,QAAQ,CAAC,iCAAiC,EAAE3B,aAAakD,WAAWR,MAAM,EAAE,EAAE,CAAC;QACjF;IACF;IAEA,MAAMa,OAAO3D,aAAa;QACxB+C;QACAa,SAASN,WAAWO,IAAI;QACxBrB;IACF;IAEA,IAAIjC,eAAe6B,MAAM,SAAS;QAChC,OAAO;YACLN,UAAU;YACVgC,QAAQxD,UAAUqD,MAAMpD,eAAe6B,MAAM;QAC/C;IACF;IAEA,OAAO;QACLN,UAAU;QACVgC,QAAQzD,kBAAkBsD;IAC5B;AACF,EAAC"}
@@ -0,0 +1,3 @@
1
+ import type { HttpGetJson, HttpPostJson } from '../http.js';
2
+ import type { CliResult, ParsedCliArgs } from '../types.js';
3
+ export declare const runPushCommand: (args: ParsedCliArgs, httpPost?: HttpPostJson, httpGet?: HttpGetJson) => Promise<CliResult>;
@@ -0,0 +1,308 @@
1
+ import { readFile } from 'node:fs/promises';
2
+ import { DEFAULT_GITHUB_OIDC_AUDIENCE } from '../../constants.js';
3
+ import { signDocsSyncRequest } from '../../security/index.js';
4
+ import { buildDocsManifest, sha256Hex, validateDocsManifest } from '../../sync/index.js';
5
+ import { readDocsAiExportManifest, walkDocsFiles } from '../filesystem.js';
6
+ import { formatIssues, formatPushSummary, printJson } from '../format.js';
7
+ import { getJson, postJson } from '../http.js';
8
+ import { getFlagBoolean, getFlagString } from '../parseArgs.js';
9
+ import { getDocsCommandOptions } from './validate.js';
10
+ const supportedPushDeleteBehaviors = new Set([
11
+ 'archive',
12
+ 'delete',
13
+ 'draft',
14
+ 'ignore'
15
+ ]);
16
+ const isRecord = (value)=>typeof value === 'object' && value !== null && !Array.isArray(value);
17
+ const isServerPushResponse = (value)=>isRecord(value);
18
+ const validateEndpointUrl = (endpoint)=>{
19
+ try {
20
+ const parsed = new URL(endpoint);
21
+ if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
22
+ return {
23
+ exitCode: 1,
24
+ stderr: '--endpoint must be a full http:// or https:// URL.\n'
25
+ };
26
+ }
27
+ return parsed.toString();
28
+ } catch {
29
+ return {
30
+ exitCode: 1,
31
+ stderr: '--endpoint must be a valid full http:// or https:// URL.\n'
32
+ };
33
+ }
34
+ };
35
+ const readPrivateKey = async (args)=>{
36
+ const privateKeyFile = getFlagString(args, 'private-key-file');
37
+ const privateKeyEnv = getFlagString(args, 'private-key-env');
38
+ if (privateKeyFile && privateKeyEnv) {
39
+ return {
40
+ exitCode: 1,
41
+ stderr: 'Use either --private-key-file or --private-key-env, not both.\n'
42
+ };
43
+ }
44
+ if (!privateKeyFile && !privateKeyEnv) {
45
+ return {
46
+ exitCode: 1,
47
+ stderr: 'Push requires --private-key-file or --private-key-env.\n'
48
+ };
49
+ }
50
+ if (privateKeyEnv) {
51
+ const privateKey = process.env[privateKeyEnv];
52
+ if (!privateKey) {
53
+ return {
54
+ exitCode: 1,
55
+ stderr: `Environment variable "${privateKeyEnv}" is not set.\n`
56
+ };
57
+ }
58
+ return privateKey;
59
+ }
60
+ try {
61
+ return await readFile(privateKeyFile ?? '', 'utf8');
62
+ } catch (error) {
63
+ return {
64
+ exitCode: 1,
65
+ stderr: error instanceof Error ? `Could not read private key file: ${error.message}\n` : 'Could not read private key file.\n'
66
+ };
67
+ }
68
+ };
69
+ const getGithubOidcTokenRequestUrl = ({ audience, requestUrl })=>{
70
+ try {
71
+ const url = new URL(requestUrl);
72
+ url.searchParams.set('audience', audience);
73
+ return url.toString();
74
+ } catch {
75
+ return {
76
+ exitCode: 1,
77
+ stderr: 'ACTIONS_ID_TOKEN_REQUEST_URL is not a valid URL.\n'
78
+ };
79
+ }
80
+ };
81
+ const readGithubOidcToken = async ({ args, audience, httpGet })=>{
82
+ const tokenEnv = getFlagString(args, 'oidc-token-env');
83
+ if (tokenEnv) {
84
+ const token = process.env[tokenEnv];
85
+ if (!token) {
86
+ return {
87
+ exitCode: 1,
88
+ stderr: `Environment variable "${tokenEnv}" is not set.\n`
89
+ };
90
+ }
91
+ return token;
92
+ }
93
+ const requestUrl = process.env.ACTIONS_ID_TOKEN_REQUEST_URL;
94
+ const requestToken = process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN;
95
+ if (!requestUrl || !requestToken) {
96
+ return {
97
+ exitCode: 1,
98
+ stderr: 'GitHub OIDC push requires ACTIONS_ID_TOKEN_REQUEST_URL and ACTIONS_ID_TOKEN_REQUEST_TOKEN, or --oidc-token-env.\n'
99
+ };
100
+ }
101
+ const url = getGithubOidcTokenRequestUrl({
102
+ audience,
103
+ requestUrl
104
+ });
105
+ if (typeof url !== 'string') {
106
+ return url;
107
+ }
108
+ const response = await httpGet({
109
+ headers: {
110
+ Authorization: `bearer ${requestToken}`
111
+ },
112
+ url
113
+ });
114
+ if (!response.ok || !isRecord(response.body) || typeof response.body.value !== 'string') {
115
+ return {
116
+ exitCode: 1,
117
+ stderr: `Could not retrieve GitHub OIDC token. HTTP status ${response.status}.\n`
118
+ };
119
+ }
120
+ return response.body.value;
121
+ };
122
+ const getPushCommandOptions = async (args)=>{
123
+ const docsOptions = getDocsCommandOptions(args);
124
+ if ('exitCode' in docsOptions) {
125
+ return docsOptions;
126
+ }
127
+ const endpointFlag = getFlagString(args, 'endpoint');
128
+ if (!endpointFlag) {
129
+ return {
130
+ exitCode: 1,
131
+ stderr: 'Push requires --endpoint <url>.\n'
132
+ };
133
+ }
134
+ const endpoint = validateEndpointUrl(endpointFlag);
135
+ if (typeof endpoint !== 'string') {
136
+ return endpoint;
137
+ }
138
+ if (getFlagBoolean(args, 'dry-run') && getFlagBoolean(args, 'sync')) {
139
+ return {
140
+ exitCode: 1,
141
+ stderr: 'Use either --dry-run or --sync, not both.\n'
142
+ };
143
+ }
144
+ const deleteBehaviorFlag = getFlagString(args, 'delete-behavior');
145
+ if (deleteBehaviorFlag !== undefined && !supportedPushDeleteBehaviors.has(deleteBehaviorFlag)) {
146
+ return {
147
+ exitCode: 1,
148
+ stderr: '--delete-behavior for push must be archive, delete, draft, or ignore.\n'
149
+ };
150
+ }
151
+ const mode = getFlagBoolean(args, 'sync') ? 'sync' : 'dry-run';
152
+ const baseOptions = {
153
+ ...docsOptions,
154
+ deleteBehavior: deleteBehaviorFlag,
155
+ endpoint,
156
+ mode,
157
+ publish: getFlagBoolean(args, 'publish')
158
+ };
159
+ if (getFlagBoolean(args, 'github-oidc')) {
160
+ if (getFlagString(args, 'key-id')) {
161
+ return {
162
+ exitCode: 1,
163
+ stderr: 'Do not use --key-id with --github-oidc.\n'
164
+ };
165
+ }
166
+ if (getFlagString(args, 'private-key-file') || getFlagString(args, 'private-key-env')) {
167
+ return {
168
+ exitCode: 1,
169
+ stderr: 'Do not use Ed25519 private key flags with --github-oidc.\n'
170
+ };
171
+ }
172
+ return {
173
+ ...baseOptions,
174
+ authMode: 'github-oidc',
175
+ oidcAudience: getFlagString(args, 'oidc-audience') ?? DEFAULT_GITHUB_OIDC_AUDIENCE,
176
+ oidcTokenEnv: getFlagString(args, 'oidc-token-env')
177
+ };
178
+ }
179
+ const keyId = getFlagString(args, 'key-id');
180
+ if (!keyId) {
181
+ return {
182
+ exitCode: 1,
183
+ stderr: 'Push requires --key-id <id>.\n'
184
+ };
185
+ }
186
+ const privateKey = await readPrivateKey(args);
187
+ if (typeof privateKey !== 'string') {
188
+ return privateKey;
189
+ }
190
+ return {
191
+ ...baseOptions,
192
+ authMode: 'ed25519',
193
+ keyId,
194
+ privateKey
195
+ };
196
+ };
197
+ const formatServerFailure = ({ body, status })=>{
198
+ if (isServerPushResponse(body) && body.error?.message) {
199
+ return `${body.error.message}\n`;
200
+ }
201
+ return `Sync request failed with HTTP status ${status}.\n`;
202
+ };
203
+ export const runPushCommand = async (args, httpPost = postJson, httpGet = getJson)=>{
204
+ const options = await getPushCommandOptions(args);
205
+ if ('exitCode' in options) {
206
+ return options;
207
+ }
208
+ const files = await walkDocsFiles({
209
+ root: options.docsRoot
210
+ });
211
+ const aiExport = await readDocsAiExportManifest({
212
+ root: options.docsRoot
213
+ });
214
+ if (!aiExport.ok) {
215
+ return {
216
+ exitCode: 1,
217
+ stderr: `AI export manifest is invalid.\n\nErrors:\n${formatIssues(aiExport.issues)}\n`
218
+ };
219
+ }
220
+ const manifest = buildDocsManifest({
221
+ aiExport: aiExport.manifest,
222
+ branch: options.branch,
223
+ commit: options.commit,
224
+ deleteBehavior: options.deleteBehavior ?? 'archive',
225
+ files,
226
+ mode: options.mode,
227
+ publish: options.publish,
228
+ repository: options.repository,
229
+ root: options.sourceRoot,
230
+ sourceId: options.sourceId
231
+ });
232
+ const validation = validateDocsManifest(manifest, {
233
+ maxFileBytes: options.maxFileBytes,
234
+ maxFiles: options.maxFiles,
235
+ maxTotalBytes: options.maxTotalBytes,
236
+ routeBase: options.routeBase
237
+ });
238
+ if (!validation.ok) {
239
+ return {
240
+ exitCode: 1,
241
+ stderr: `Manifest is invalid.\n\nErrors:\n${formatIssues(validation.issues)}\n`
242
+ };
243
+ }
244
+ const body = JSON.stringify(manifest);
245
+ let signedRequest;
246
+ if (options.authMode === 'github-oidc') {
247
+ const oidcToken = await readGithubOidcToken({
248
+ args,
249
+ audience: options.oidcAudience,
250
+ httpGet
251
+ });
252
+ if (typeof oidcToken !== 'string') {
253
+ return oidcToken;
254
+ }
255
+ signedRequest = {
256
+ body,
257
+ headers: {
258
+ Authorization: `Bearer ${oidcToken}`,
259
+ 'Content-Type': 'application/json',
260
+ 'X-VL-MD-DOCS-Body-SHA256': sha256Hex(body)
261
+ }
262
+ };
263
+ } else {
264
+ signedRequest = signDocsSyncRequest({
265
+ body,
266
+ endpoint: options.endpoint,
267
+ keyId: options.keyId,
268
+ privateKey: options.privateKey
269
+ });
270
+ }
271
+ const response = await httpPost({
272
+ body: signedRequest.body,
273
+ headers: signedRequest.headers,
274
+ url: options.endpoint
275
+ });
276
+ if (getFlagBoolean(args, 'json')) {
277
+ return {
278
+ exitCode: response.ok && isServerPushResponse(response.body) && response.body.ok === true ? 0 : 1,
279
+ stdout: printJson({
280
+ endpoint: options.endpoint,
281
+ mode: options.mode,
282
+ response: response.body,
283
+ sourceId: options.sourceId,
284
+ status: response.status
285
+ }, getFlagBoolean(args, 'pretty'))
286
+ };
287
+ }
288
+ if (!response.ok || !isServerPushResponse(response.body) || response.body.ok !== true) {
289
+ return {
290
+ exitCode: 1,
291
+ stderr: formatServerFailure({
292
+ body: response.body,
293
+ status: response.status
294
+ })
295
+ };
296
+ }
297
+ return {
298
+ exitCode: 0,
299
+ stdout: formatPushSummary({
300
+ endpoint: options.endpoint,
301
+ mode: options.mode,
302
+ response: response.body,
303
+ sourceId: options.sourceId
304
+ })
305
+ };
306
+ };
307
+
308
+ //# sourceMappingURL=push.js.map