@valkyrianlabs/payload-markdown-docs 0.1.0-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (204) hide show
  1. package/README.md +195 -0
  2. package/dist/admin/DocsSetManager.d.ts +2 -0
  3. package/dist/admin/DocsSetManager.js +298 -0
  4. package/dist/admin/DocsSetManager.js.map +1 -0
  5. package/dist/admin/docsSetManagerData.d.ts +25 -0
  6. package/dist/admin/docsSetManagerData.js +266 -0
  7. package/dist/admin/docsSetManagerData.js.map +1 -0
  8. package/dist/admin/docsSetManagerTypes.d.ts +103 -0
  9. package/dist/admin/docsSetManagerTypes.js +3 -0
  10. package/dist/admin/docsSetManagerTypes.js.map +1 -0
  11. package/dist/admin/index.d.ts +3 -0
  12. package/dist/admin/index.js +4 -0
  13. package/dist/admin/index.js.map +1 -0
  14. package/dist/cli/commands/install.d.ts +2 -0
  15. package/dist/cli/commands/install.js +211 -0
  16. package/dist/cli/commands/install.js.map +1 -0
  17. package/dist/cli/commands/keygen.d.ts +2 -0
  18. package/dist/cli/commands/keygen.js +89 -0
  19. package/dist/cli/commands/keygen.js.map +1 -0
  20. package/dist/cli/commands/manifest.d.ts +2 -0
  21. package/dist/cli/commands/manifest.js +50 -0
  22. package/dist/cli/commands/manifest.js.map +1 -0
  23. package/dist/cli/commands/plan.d.ts +2 -0
  24. package/dist/cli/commands/plan.js +110 -0
  25. package/dist/cli/commands/plan.js.map +1 -0
  26. package/dist/cli/commands/push.d.ts +3 -0
  27. package/dist/cli/commands/push.js +308 -0
  28. package/dist/cli/commands/push.js.map +1 -0
  29. package/dist/cli/commands/validate.d.ts +3 -0
  30. package/dist/cli/commands/validate.js +109 -0
  31. package/dist/cli/commands/validate.js.map +1 -0
  32. package/dist/cli/filesystem.d.ts +20 -0
  33. package/dist/cli/filesystem.js +96 -0
  34. package/dist/cli/filesystem.js.map +1 -0
  35. package/dist/cli/format.d.ts +35 -0
  36. package/dist/cli/format.js +76 -0
  37. package/dist/cli/format.js.map +1 -0
  38. package/dist/cli/http.d.ts +19 -0
  39. package/dist/cli/http.js +39 -0
  40. package/dist/cli/http.js.map +1 -0
  41. package/dist/cli/index.d.ts +3 -0
  42. package/dist/cli/index.js +214 -0
  43. package/dist/cli/index.js.map +1 -0
  44. package/dist/cli/parseArgs.d.ts +5 -0
  45. package/dist/cli/parseArgs.js +219 -0
  46. package/dist/cli/parseArgs.js.map +1 -0
  47. package/dist/cli/types.d.ts +51 -0
  48. package/dist/cli/types.js +3 -0
  49. package/dist/cli/types.js.map +1 -0
  50. package/dist/collections/docs.d.ts +9 -0
  51. package/dist/collections/docs.js +168 -0
  52. package/dist/collections/docs.js.map +1 -0
  53. package/dist/collections/docsGroups.d.ts +5 -0
  54. package/dist/collections/docsGroups.js +57 -0
  55. package/dist/collections/docsGroups.js.map +1 -0
  56. package/dist/collections/docsSets.d.ts +8 -0
  57. package/dist/collections/docsSets.js +158 -0
  58. package/dist/collections/docsSets.js.map +1 -0
  59. package/dist/collections/index.d.ts +10 -0
  60. package/dist/collections/index.js +7 -0
  61. package/dist/collections/index.js.map +1 -0
  62. package/dist/collections/nonces.d.ts +6 -0
  63. package/dist/collections/nonces.js +57 -0
  64. package/dist/collections/nonces.js.map +1 -0
  65. package/dist/collections/syncRuns.d.ts +5 -0
  66. package/dist/collections/syncRuns.js +139 -0
  67. package/dist/collections/syncRuns.js.map +1 -0
  68. package/dist/constants.d.ts +21 -0
  69. package/dist/constants.js +23 -0
  70. package/dist/constants.js.map +1 -0
  71. package/dist/endpoints/index.d.ts +2 -0
  72. package/dist/endpoints/index.js +3 -0
  73. package/dist/endpoints/index.js.map +1 -0
  74. package/dist/endpoints/sync.d.ts +47 -0
  75. package/dist/endpoints/sync.js +616 -0
  76. package/dist/endpoints/sync.js.map +1 -0
  77. package/dist/index.d.ts +9 -0
  78. package/dist/index.js +7 -0
  79. package/dist/index.js.map +1 -0
  80. package/dist/next/PayloadMarkdownDocsPage.d.ts +7 -0
  81. package/dist/next/PayloadMarkdownDocsPage.js +142 -0
  82. package/dist/next/PayloadMarkdownDocsPage.js.map +1 -0
  83. package/dist/next/index.d.ts +9 -0
  84. package/dist/next/index.js +7 -0
  85. package/dist/next/index.js.map +1 -0
  86. package/dist/next/markdown.d.ts +14 -0
  87. package/dist/next/markdown.js +232 -0
  88. package/dist/next/markdown.js.map +1 -0
  89. package/dist/next/metadata.d.ts +3 -0
  90. package/dist/next/metadata.js +33 -0
  91. package/dist/next/metadata.js.map +1 -0
  92. package/dist/next/records.d.ts +14 -0
  93. package/dist/next/records.js +146 -0
  94. package/dist/next/records.js.map +1 -0
  95. package/dist/next/route.d.ts +6 -0
  96. package/dist/next/route.js +271 -0
  97. package/dist/next/route.js.map +1 -0
  98. package/dist/next/sidebar.d.ts +15 -0
  99. package/dist/next/sidebar.js +137 -0
  100. package/dist/next/sidebar.js.map +1 -0
  101. package/dist/next/types.d.ts +117 -0
  102. package/dist/next/types.js +3 -0
  103. package/dist/next/types.js.map +1 -0
  104. package/dist/payload/applyDocsSync.d.ts +54 -0
  105. package/dist/payload/applyDocsSync.js +176 -0
  106. package/dist/payload/applyDocsSync.js.map +1 -0
  107. package/dist/payload/docsConflicts.d.ts +12 -0
  108. package/dist/payload/docsConflicts.js +34 -0
  109. package/dist/payload/docsConflicts.js.map +1 -0
  110. package/dist/payload/docsData.d.ts +23 -0
  111. package/dist/payload/docsData.js +59 -0
  112. package/dist/payload/docsData.js.map +1 -0
  113. package/dist/payload/docsSets.d.ts +38 -0
  114. package/dist/payload/docsSets.js +57 -0
  115. package/dist/payload/docsSets.js.map +1 -0
  116. package/dist/payload/existingDocs.d.ts +43 -0
  117. package/dist/payload/existingDocs.js +97 -0
  118. package/dist/payload/existingDocs.js.map +1 -0
  119. package/dist/payload/index.d.ts +15 -0
  120. package/dist/payload/index.js +10 -0
  121. package/dist/payload/index.js.map +1 -0
  122. package/dist/payload/routeCollisions.d.ts +31 -0
  123. package/dist/payload/routeCollisions.js +104 -0
  124. package/dist/payload/routeCollisions.js.map +1 -0
  125. package/dist/payload/syncRuns.d.ts +60 -0
  126. package/dist/payload/syncRuns.js +53 -0
  127. package/dist/payload/syncRuns.js.map +1 -0
  128. package/dist/plugin.d.ts +3 -0
  129. package/dist/plugin.js +165 -0
  130. package/dist/plugin.js.map +1 -0
  131. package/dist/routing/index.d.ts +3 -0
  132. package/dist/routing/index.js +4 -0
  133. package/dist/routing/index.js.map +1 -0
  134. package/dist/routing/paths.d.ts +7 -0
  135. package/dist/routing/paths.js +23 -0
  136. package/dist/routing/paths.js.map +1 -0
  137. package/dist/routing/reservations.d.ts +37 -0
  138. package/dist/routing/reservations.js +79 -0
  139. package/dist/routing/reservations.js.map +1 -0
  140. package/dist/security/canonical.d.ts +12 -0
  141. package/dist/security/canonical.js +24 -0
  142. package/dist/security/canonical.js.map +1 -0
  143. package/dist/security/githubOidc.d.ts +45 -0
  144. package/dist/security/githubOidc.js +177 -0
  145. package/dist/security/githubOidc.js.map +1 -0
  146. package/dist/security/headers.d.ts +22 -0
  147. package/dist/security/headers.js +44 -0
  148. package/dist/security/headers.js.map +1 -0
  149. package/dist/security/index.d.ts +15 -0
  150. package/dist/security/index.js +9 -0
  151. package/dist/security/index.js.map +1 -0
  152. package/dist/security/jwks.d.ts +20 -0
  153. package/dist/security/jwks.js +40 -0
  154. package/dist/security/jwks.js.map +1 -0
  155. package/dist/security/jwt.d.ts +10 -0
  156. package/dist/security/jwt.js +42 -0
  157. package/dist/security/jwt.js.map +1 -0
  158. package/dist/security/nonce.d.ts +34 -0
  159. package/dist/security/nonce.js +43 -0
  160. package/dist/security/nonce.js.map +1 -0
  161. package/dist/security/sign.d.ts +13 -0
  162. package/dist/security/sign.js +39 -0
  163. package/dist/security/sign.js.map +1 -0
  164. package/dist/security/verify.d.ts +28 -0
  165. package/dist/security/verify.js +54 -0
  166. package/dist/security/verify.js.map +1 -0
  167. package/dist/skills/codex/SKILL.md +173 -0
  168. package/dist/skills/codex/examples/docs-page.md +42 -0
  169. package/dist/skills/codex/examples/github-actions.md +64 -0
  170. package/dist/skills/codex/reference/admin.md +28 -0
  171. package/dist/skills/codex/reference/frontmatter.md +39 -0
  172. package/dist/skills/codex/reference/payload-markdown-directives.md +77 -0
  173. package/dist/skills/codex/reference/routing.md +35 -0
  174. package/dist/skills/codex/reference/sync.md +35 -0
  175. package/dist/skills/codex/reference/troubleshooting.md +53 -0
  176. package/dist/skills/codex/reference/workflow.md +39 -0
  177. package/dist/sync/aiExportManifest.d.ts +58 -0
  178. package/dist/sync/aiExportManifest.js +430 -0
  179. package/dist/sync/aiExportManifest.js.map +1 -0
  180. package/dist/sync/frontmatter.d.ts +28 -0
  181. package/dist/sync/frontmatter.js +210 -0
  182. package/dist/sync/frontmatter.js.map +1 -0
  183. package/dist/sync/hash.d.ts +1 -0
  184. package/dist/sync/hash.js +8 -0
  185. package/dist/sync/hash.js.map +1 -0
  186. package/dist/sync/index.d.ts +12 -0
  187. package/dist/sync/index.js +9 -0
  188. package/dist/sync/index.js.map +1 -0
  189. package/dist/sync/manifest.d.ts +58 -0
  190. package/dist/sync/manifest.js +21 -0
  191. package/dist/sync/manifest.js.map +1 -0
  192. package/dist/sync/paths.d.ts +16 -0
  193. package/dist/sync/paths.js +116 -0
  194. package/dist/sync/paths.js.map +1 -0
  195. package/dist/sync/plan.d.ts +29 -0
  196. package/dist/sync/plan.js +72 -0
  197. package/dist/sync/plan.js.map +1 -0
  198. package/dist/sync/validate.d.ts +26 -0
  199. package/dist/sync/validate.js +308 -0
  200. package/dist/sync/validate.js.map +1 -0
  201. package/dist/types.d.ts +84 -0
  202. package/dist/types.js +3 -0
  203. package/dist/types.js.map +1 -0
  204. package/package.json +143 -0
@@ -0,0 +1,210 @@
1
+ import { normalizeDocsPath } from './paths.js';
2
+ const knownFrontmatterFields = new Set([
3
+ 'description',
4
+ 'draft',
5
+ 'navTitle',
6
+ 'order',
7
+ 'redirectFrom',
8
+ 'slug',
9
+ 'status',
10
+ 'tags',
11
+ 'title'
12
+ ]);
13
+ const arrayFrontmatterFields = new Set([
14
+ 'redirectFrom',
15
+ 'tags'
16
+ ]);
17
+ const stripQuotes = (value)=>{
18
+ const trimmed = value.trim();
19
+ if (trimmed.startsWith('"') && trimmed.endsWith('"') || trimmed.startsWith("'") && trimmed.endsWith("'")) {
20
+ return trimmed.slice(1, -1);
21
+ }
22
+ return trimmed;
23
+ };
24
+ const createFrontmatterIssue = ({ message, path })=>({
25
+ code: 'invalid_frontmatter',
26
+ message,
27
+ path
28
+ });
29
+ const isFrontmatterKey = (value)=>{
30
+ const firstCharacter = value.charCodeAt(0);
31
+ const startsWithLetter = firstCharacter >= 65 && firstCharacter <= 90 || firstCharacter >= 97 && firstCharacter <= 122;
32
+ if (!startsWithLetter) {
33
+ return false;
34
+ }
35
+ return [
36
+ ...value
37
+ ].every((character)=>{
38
+ const code = character.charCodeAt(0);
39
+ return code >= 48 && code <= 57 || code >= 65 && code <= 90 || code >= 97 && code <= 122;
40
+ });
41
+ };
42
+ const assignFrontmatterValue = ({ frontmatter, key, path, rawValue })=>{
43
+ const value = stripQuotes(rawValue);
44
+ switch(key){
45
+ case 'description':
46
+ case 'navTitle':
47
+ case 'slug':
48
+ case 'title':
49
+ frontmatter[key] = value;
50
+ return undefined;
51
+ case 'draft':
52
+ if (value === 'true' || value === 'false') {
53
+ frontmatter.draft = value === 'true';
54
+ return undefined;
55
+ }
56
+ return createFrontmatterIssue({
57
+ message: 'Frontmatter field "draft" must be a boolean.',
58
+ path
59
+ });
60
+ case 'order':
61
+ {
62
+ const order = Number(value);
63
+ if (Number.isFinite(order)) {
64
+ frontmatter.order = order;
65
+ return undefined;
66
+ }
67
+ return createFrontmatterIssue({
68
+ message: 'Frontmatter field "order" must be a number.',
69
+ path
70
+ });
71
+ }
72
+ case 'status':
73
+ if (value === 'draft' || value === 'published') {
74
+ frontmatter.status = value;
75
+ return undefined;
76
+ }
77
+ return createFrontmatterIssue({
78
+ message: 'Frontmatter field "status" must be "draft" or "published".',
79
+ path
80
+ });
81
+ default:
82
+ return undefined;
83
+ }
84
+ };
85
+ const validateParsedFrontmatter = (frontmatter, path)=>{
86
+ const issues = [];
87
+ if (frontmatter.slug && !/^[a-z0-9][a-z0-9-]*$/i.test(frontmatter.slug)) {
88
+ issues.push(createFrontmatterIssue({
89
+ message: 'Frontmatter field "slug" must contain only letters, numbers, and hyphens.',
90
+ path
91
+ }));
92
+ }
93
+ return issues;
94
+ };
95
+ export const parseDocsFrontmatter = (markdown, options = {})=>{
96
+ const issues = [];
97
+ const warnings = [];
98
+ if (!markdown.startsWith('---\n') && !markdown.startsWith('---\r\n')) {
99
+ return {
100
+ content: markdown,
101
+ frontmatter: {},
102
+ issues,
103
+ warnings
104
+ };
105
+ }
106
+ const lines = markdown.split(/\r?\n/);
107
+ const closingIndex = lines.findIndex((line, index)=>index > 0 && line.trim() === '---');
108
+ if (closingIndex === -1) {
109
+ return {
110
+ content: markdown,
111
+ frontmatter: {},
112
+ issues: [
113
+ createFrontmatterIssue({
114
+ message: 'Frontmatter block is missing a closing delimiter.',
115
+ path: options.path
116
+ })
117
+ ],
118
+ warnings
119
+ };
120
+ }
121
+ const frontmatter = {};
122
+ const frontmatterLines = lines.slice(1, closingIndex);
123
+ let currentArrayKey;
124
+ for (const line of frontmatterLines){
125
+ if (line.trim() === '') {
126
+ continue;
127
+ }
128
+ const trimmedStart = line.trimStart();
129
+ if (trimmedStart.startsWith('- ')) {
130
+ if (!currentArrayKey) {
131
+ issues.push(createFrontmatterIssue({
132
+ message: 'Frontmatter array item does not belong to a supported array field.',
133
+ path: options.path
134
+ }));
135
+ continue;
136
+ }
137
+ frontmatter[currentArrayKey] = [
138
+ ...frontmatter[currentArrayKey] ?? [],
139
+ stripQuotes(trimmedStart.slice(2))
140
+ ];
141
+ continue;
142
+ }
143
+ const separatorIndex = line.indexOf(':');
144
+ const key = separatorIndex > 0 ? line.slice(0, separatorIndex).trim() : '';
145
+ const rawValue = separatorIndex > 0 ? line.slice(separatorIndex + 1).trim() : '';
146
+ if (!isFrontmatterKey(key)) {
147
+ issues.push(createFrontmatterIssue({
148
+ message: `Unsupported frontmatter line: ${line}`,
149
+ path: options.path
150
+ }));
151
+ currentArrayKey = undefined;
152
+ continue;
153
+ }
154
+ currentArrayKey = undefined;
155
+ if (!knownFrontmatterFields.has(key)) {
156
+ warnings.push({
157
+ code: 'invalid_frontmatter',
158
+ message: `Unknown frontmatter field "${key}" was ignored.`,
159
+ path: options.path
160
+ });
161
+ continue;
162
+ }
163
+ if (arrayFrontmatterFields.has(key)) {
164
+ if (rawValue.trim() !== '') {
165
+ issues.push(createFrontmatterIssue({
166
+ message: `Frontmatter field "${key}" must use list item syntax.`,
167
+ path: options.path
168
+ }));
169
+ continue;
170
+ }
171
+ currentArrayKey = key;
172
+ frontmatter[currentArrayKey] = [];
173
+ continue;
174
+ }
175
+ const issue = assignFrontmatterValue({
176
+ frontmatter,
177
+ key,
178
+ path: options.path,
179
+ rawValue
180
+ });
181
+ if (issue) {
182
+ issues.push(issue);
183
+ }
184
+ }
185
+ issues.push(...validateParsedFrontmatter(frontmatter, options.path));
186
+ return {
187
+ content: lines.slice(closingIndex + 1).join('\n').replace(/^\n/, ''),
188
+ frontmatter,
189
+ issues,
190
+ warnings
191
+ };
192
+ };
193
+ export const inferTitleFromMarkdown = (content)=>{
194
+ const h1Line = content.split(/\r?\n/).find((line)=>/^#\s+[^#]/.test(line.trim()));
195
+ return h1Line?.replace(/^#\s+/, '').replace(/\s+#*$/, '').trim() || undefined;
196
+ };
197
+ export const titleFromSourcePath = (sourcePath)=>{
198
+ const normalizedPath = normalizeDocsPath(sourcePath);
199
+ if (!normalizedPath.ok) {
200
+ return 'Untitled';
201
+ }
202
+ const pathSegments = normalizedPath.path.split('/');
203
+ const lastSegment = pathSegments.at(-1) ?? 'index.md';
204
+ const baseName = lastSegment === 'index.md' ? pathSegments.at(-2) ?? 'index' : lastSegment;
205
+ const withoutExtension = baseName.replace(/\.md$/, '');
206
+ return withoutExtension.split(/[-_\s]+/).filter(Boolean).map((part)=>`${part.charAt(0).toUpperCase()}${part.slice(1)}`).join(' ');
207
+ };
208
+ export const resolveDocsTitle = ({ content, frontmatter, sourcePath })=>frontmatter.title ?? inferTitleFromMarkdown(content) ?? titleFromSourcePath(sourcePath);
209
+
210
+ //# sourceMappingURL=frontmatter.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/sync/frontmatter.ts"],"sourcesContent":["import type { DocsValidationIssue } from './validate.js'\n\nimport { normalizeDocsPath } from './paths.js'\n\nexport type DocsFrontmatter = {\n description?: string\n draft?: boolean\n navTitle?: string\n order?: number\n redirectFrom?: string[]\n slug?: string\n status?: 'draft' | 'published'\n tags?: string[]\n title?: string\n}\n\nexport type ParseDocsFrontmatterResult = {\n content: string\n frontmatter: DocsFrontmatter\n issues: DocsValidationIssue[]\n warnings: DocsValidationIssue[]\n}\n\nconst knownFrontmatterFields = new Set([\n 'description',\n 'draft',\n 'navTitle',\n 'order',\n 'redirectFrom',\n 'slug',\n 'status',\n 'tags',\n 'title',\n])\n\nconst arrayFrontmatterFields = new Set(['redirectFrom', 'tags'])\n\nconst stripQuotes = (value: string): string => {\n const trimmed = value.trim()\n\n if (\n (trimmed.startsWith('\"') && trimmed.endsWith('\"')) ||\n (trimmed.startsWith(\"'\") && trimmed.endsWith(\"'\"))\n ) {\n return trimmed.slice(1, -1)\n }\n\n return trimmed\n}\n\nconst createFrontmatterIssue = ({\n message,\n path,\n}: {\n message: string\n path?: string\n}): DocsValidationIssue => ({\n code: 'invalid_frontmatter',\n message,\n path,\n})\n\nconst isFrontmatterKey = (value: string): boolean => {\n const firstCharacter = value.charCodeAt(0)\n const startsWithLetter =\n (firstCharacter >= 65 && firstCharacter <= 90) ||\n (firstCharacter >= 97 && firstCharacter <= 122)\n\n if (!startsWithLetter) {\n return false\n }\n\n return [...value].every((character) => {\n const code = character.charCodeAt(0)\n\n return (\n (code >= 48 && code <= 57) ||\n (code >= 65 && code <= 90) ||\n (code >= 97 && code <= 122)\n )\n })\n}\n\nconst assignFrontmatterValue = ({\n frontmatter,\n key,\n path,\n rawValue,\n}: {\n frontmatter: DocsFrontmatter\n key: string\n path?: string\n rawValue: string\n}): DocsValidationIssue | undefined => {\n const value = stripQuotes(rawValue)\n\n switch (key) {\n case 'description':\n case 'navTitle':\n case 'slug':\n case 'title':\n frontmatter[key] = value\n return undefined\n\n case 'draft':\n if (value === 'true' || value === 'false') {\n frontmatter.draft = value === 'true'\n return undefined\n }\n\n return createFrontmatterIssue({\n message: 'Frontmatter field \"draft\" must be a boolean.',\n path,\n })\n\n case 'order': {\n const order = Number(value)\n\n if (Number.isFinite(order)) {\n frontmatter.order = order\n return undefined\n }\n\n return createFrontmatterIssue({\n message: 'Frontmatter field \"order\" must be a number.',\n path,\n })\n }\n\n case 'status':\n if (value === 'draft' || value === 'published') {\n frontmatter.status = value\n return undefined\n }\n\n return createFrontmatterIssue({\n message: 'Frontmatter field \"status\" must be \"draft\" or \"published\".',\n path,\n })\n\n default:\n return undefined\n }\n}\n\nconst validateParsedFrontmatter = (\n frontmatter: DocsFrontmatter,\n path?: string,\n): DocsValidationIssue[] => {\n const issues: DocsValidationIssue[] = []\n\n if (frontmatter.slug && !/^[a-z0-9][a-z0-9-]*$/i.test(frontmatter.slug)) {\n issues.push(\n createFrontmatterIssue({\n message:\n 'Frontmatter field \"slug\" must contain only letters, numbers, and hyphens.',\n path,\n }),\n )\n }\n\n return issues\n}\n\nexport const parseDocsFrontmatter = (\n markdown: string,\n options: {\n path?: string\n } = {},\n): ParseDocsFrontmatterResult => {\n const issues: DocsValidationIssue[] = []\n const warnings: DocsValidationIssue[] = []\n\n if (!markdown.startsWith('---\\n') && !markdown.startsWith('---\\r\\n')) {\n return {\n content: markdown,\n frontmatter: {},\n issues,\n warnings,\n }\n }\n\n const lines = markdown.split(/\\r?\\n/)\n const closingIndex = lines.findIndex((line, index) => index > 0 && line.trim() === '---')\n\n if (closingIndex === -1) {\n return {\n content: markdown,\n frontmatter: {},\n issues: [\n createFrontmatterIssue({\n message: 'Frontmatter block is missing a closing delimiter.',\n path: options.path,\n }),\n ],\n warnings,\n }\n }\n\n const frontmatter: DocsFrontmatter = {}\n const frontmatterLines = lines.slice(1, closingIndex)\n let currentArrayKey: 'redirectFrom' | 'tags' | undefined\n\n for (const line of frontmatterLines) {\n if (line.trim() === '') {\n continue\n }\n\n const trimmedStart = line.trimStart()\n\n if (trimmedStart.startsWith('- ')) {\n if (!currentArrayKey) {\n issues.push(\n createFrontmatterIssue({\n message: 'Frontmatter array item does not belong to a supported array field.',\n path: options.path,\n }),\n )\n continue\n }\n\n frontmatter[currentArrayKey] = [\n ...(frontmatter[currentArrayKey] ?? []),\n stripQuotes(trimmedStart.slice(2)),\n ]\n continue\n }\n\n const separatorIndex = line.indexOf(':')\n const key = separatorIndex > 0 ? line.slice(0, separatorIndex).trim() : ''\n const rawValue = separatorIndex > 0 ? line.slice(separatorIndex + 1).trim() : ''\n\n if (!isFrontmatterKey(key)) {\n issues.push(\n createFrontmatterIssue({\n message: `Unsupported frontmatter line: ${line}`,\n path: options.path,\n }),\n )\n currentArrayKey = undefined\n continue\n }\n\n currentArrayKey = undefined\n\n if (!knownFrontmatterFields.has(key)) {\n warnings.push({\n code: 'invalid_frontmatter',\n message: `Unknown frontmatter field \"${key}\" was ignored.`,\n path: options.path,\n })\n continue\n }\n\n if (arrayFrontmatterFields.has(key)) {\n if (rawValue.trim() !== '') {\n issues.push(\n createFrontmatterIssue({\n message: `Frontmatter field \"${key}\" must use list item syntax.`,\n path: options.path,\n }),\n )\n continue\n }\n\n currentArrayKey = key as 'redirectFrom' | 'tags'\n frontmatter[currentArrayKey] = []\n continue\n }\n\n const issue = assignFrontmatterValue({\n frontmatter,\n key,\n path: options.path,\n rawValue,\n })\n\n if (issue) {\n issues.push(issue)\n }\n }\n\n issues.push(...validateParsedFrontmatter(frontmatter, options.path))\n\n return {\n content: lines.slice(closingIndex + 1).join('\\n').replace(/^\\n/, ''),\n frontmatter,\n issues,\n warnings,\n }\n}\n\nexport const inferTitleFromMarkdown = (content: string): string | undefined => {\n const h1Line = content\n .split(/\\r?\\n/)\n .find((line) => /^#\\s+[^#]/.test(line.trim()))\n\n return h1Line?.replace(/^#\\s+/, '').replace(/\\s+#*$/, '').trim() || undefined\n}\n\nexport const titleFromSourcePath = (sourcePath: string): string => {\n const normalizedPath = normalizeDocsPath(sourcePath)\n\n if (!normalizedPath.ok) {\n return 'Untitled'\n }\n\n const pathSegments = normalizedPath.path.split('/')\n const lastSegment = pathSegments.at(-1) ?? 'index.md'\n const baseName = lastSegment === 'index.md' ? pathSegments.at(-2) ?? 'index' : lastSegment\n const withoutExtension = baseName.replace(/\\.md$/, '')\n\n return withoutExtension\n .split(/[-_\\s]+/)\n .filter(Boolean)\n .map((part) => `${part.charAt(0).toUpperCase()}${part.slice(1)}`)\n .join(' ')\n}\n\nexport const resolveDocsTitle = ({\n content,\n frontmatter,\n sourcePath,\n}: {\n content: string\n frontmatter: DocsFrontmatter\n sourcePath: string\n}): string =>\n frontmatter.title ?? inferTitleFromMarkdown(content) ?? titleFromSourcePath(sourcePath)\n"],"names":["normalizeDocsPath","knownFrontmatterFields","Set","arrayFrontmatterFields","stripQuotes","value","trimmed","trim","startsWith","endsWith","slice","createFrontmatterIssue","message","path","code","isFrontmatterKey","firstCharacter","charCodeAt","startsWithLetter","every","character","assignFrontmatterValue","frontmatter","key","rawValue","undefined","draft","order","Number","isFinite","status","validateParsedFrontmatter","issues","slug","test","push","parseDocsFrontmatter","markdown","options","warnings","content","lines","split","closingIndex","findIndex","line","index","frontmatterLines","currentArrayKey","trimmedStart","trimStart","separatorIndex","indexOf","has","issue","join","replace","inferTitleFromMarkdown","h1Line","find","titleFromSourcePath","sourcePath","normalizedPath","ok","pathSegments","lastSegment","at","baseName","withoutExtension","filter","Boolean","map","part","charAt","toUpperCase","resolveDocsTitle","title"],"mappings":"AAEA,SAASA,iBAAiB,QAAQ,aAAY;AAqB9C,MAAMC,yBAAyB,IAAIC,IAAI;IACrC;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;CACD;AAED,MAAMC,yBAAyB,IAAID,IAAI;IAAC;IAAgB;CAAO;AAE/D,MAAME,cAAc,CAACC;IACnB,MAAMC,UAAUD,MAAME,IAAI;IAE1B,IACE,AAACD,QAAQE,UAAU,CAAC,QAAQF,QAAQG,QAAQ,CAAC,QAC5CH,QAAQE,UAAU,CAAC,QAAQF,QAAQG,QAAQ,CAAC,MAC7C;QACA,OAAOH,QAAQI,KAAK,CAAC,GAAG,CAAC;IAC3B;IAEA,OAAOJ;AACT;AAEA,MAAMK,yBAAyB,CAAC,EAC9BC,OAAO,EACPC,IAAI,EAIL,GAA2B,CAAA;QAC1BC,MAAM;QACNF;QACAC;IACF,CAAA;AAEA,MAAME,mBAAmB,CAACV;IACxB,MAAMW,iBAAiBX,MAAMY,UAAU,CAAC;IACxC,MAAMC,mBACJ,AAACF,kBAAkB,MAAMA,kBAAkB,MAC1CA,kBAAkB,MAAMA,kBAAkB;IAE7C,IAAI,CAACE,kBAAkB;QACrB,OAAO;IACT;IAEA,OAAO;WAAIb;KAAM,CAACc,KAAK,CAAC,CAACC;QACvB,MAAMN,OAAOM,UAAUH,UAAU,CAAC;QAElC,OACE,AAACH,QAAQ,MAAMA,QAAQ,MACtBA,QAAQ,MAAMA,QAAQ,MACtBA,QAAQ,MAAMA,QAAQ;IAE3B;AACF;AAEA,MAAMO,yBAAyB,CAAC,EAC9BC,WAAW,EACXC,GAAG,EACHV,IAAI,EACJW,QAAQ,EAMT;IACC,MAAMnB,QAAQD,YAAYoB;IAE1B,OAAQD;QACN,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;YACHD,WAAW,CAACC,IAAI,GAAGlB;YACnB,OAAOoB;QAET,KAAK;YACH,IAAIpB,UAAU,UAAUA,UAAU,SAAS;gBACzCiB,YAAYI,KAAK,GAAGrB,UAAU;gBAC9B,OAAOoB;YACT;YAEA,OAAOd,uBAAuB;gBAC5BC,SAAS;gBACTC;YACF;QAEF,KAAK;YAAS;gBACZ,MAAMc,QAAQC,OAAOvB;gBAErB,IAAIuB,OAAOC,QAAQ,CAACF,QAAQ;oBAC1BL,YAAYK,KAAK,GAAGA;oBACpB,OAAOF;gBACT;gBAEA,OAAOd,uBAAuB;oBAC5BC,SAAS;oBACTC;gBACF;YACF;QAEA,KAAK;YACH,IAAIR,UAAU,WAAWA,UAAU,aAAa;gBAC9CiB,YAAYQ,MAAM,GAAGzB;gBACrB,OAAOoB;YACT;YAEA,OAAOd,uBAAuB;gBAC5BC,SAAS;gBACTC;YACF;QAEF;YACE,OAAOY;IACX;AACF;AAEA,MAAMM,4BAA4B,CAChCT,aACAT;IAEA,MAAMmB,SAAgC,EAAE;IAExC,IAAIV,YAAYW,IAAI,IAAI,CAAC,wBAAwBC,IAAI,CAACZ,YAAYW,IAAI,GAAG;QACvED,OAAOG,IAAI,CACTxB,uBAAuB;YACrBC,SACE;YACFC;QACF;IAEJ;IAEA,OAAOmB;AACT;AAEA,OAAO,MAAMI,uBAAuB,CAClCC,UACAC,UAEI,CAAC,CAAC;IAEN,MAAMN,SAAgC,EAAE;IACxC,MAAMO,WAAkC,EAAE;IAE1C,IAAI,CAACF,SAAS7B,UAAU,CAAC,YAAY,CAAC6B,SAAS7B,UAAU,CAAC,YAAY;QACpE,OAAO;YACLgC,SAASH;YACTf,aAAa,CAAC;YACdU;YACAO;QACF;IACF;IAEA,MAAME,QAAQJ,SAASK,KAAK,CAAC;IAC7B,MAAMC,eAAeF,MAAMG,SAAS,CAAC,CAACC,MAAMC,QAAUA,QAAQ,KAAKD,KAAKtC,IAAI,OAAO;IAEnF,IAAIoC,iBAAiB,CAAC,GAAG;QACvB,OAAO;YACLH,SAASH;YACTf,aAAa,CAAC;YACdU,QAAQ;gBACNrB,uBAAuB;oBACrBC,SAAS;oBACTC,MAAMyB,QAAQzB,IAAI;gBACpB;aACD;YACD0B;QACF;IACF;IAEA,MAAMjB,cAA+B,CAAC;IACtC,MAAMyB,mBAAmBN,MAAM/B,KAAK,CAAC,GAAGiC;IACxC,IAAIK;IAEJ,KAAK,MAAMH,QAAQE,iBAAkB;QACnC,IAAIF,KAAKtC,IAAI,OAAO,IAAI;YACtB;QACF;QAEA,MAAM0C,eAAeJ,KAAKK,SAAS;QAEnC,IAAID,aAAazC,UAAU,CAAC,OAAO;YACjC,IAAI,CAACwC,iBAAiB;gBACpBhB,OAAOG,IAAI,CACTxB,uBAAuB;oBACrBC,SAAS;oBACTC,MAAMyB,QAAQzB,IAAI;gBACpB;gBAEF;YACF;YAEAS,WAAW,CAAC0B,gBAAgB,GAAG;mBACzB1B,WAAW,CAAC0B,gBAAgB,IAAI,EAAE;gBACtC5C,YAAY6C,aAAavC,KAAK,CAAC;aAChC;YACD;QACF;QAEA,MAAMyC,iBAAiBN,KAAKO,OAAO,CAAC;QACpC,MAAM7B,MAAM4B,iBAAiB,IAAIN,KAAKnC,KAAK,CAAC,GAAGyC,gBAAgB5C,IAAI,KAAK;QACxE,MAAMiB,WAAW2B,iBAAiB,IAAIN,KAAKnC,KAAK,CAACyC,iBAAiB,GAAG5C,IAAI,KAAK;QAE9E,IAAI,CAACQ,iBAAiBQ,MAAM;YAC1BS,OAAOG,IAAI,CACTxB,uBAAuB;gBACrBC,SAAS,CAAC,8BAA8B,EAAEiC,MAAM;gBAChDhC,MAAMyB,QAAQzB,IAAI;YACpB;YAEFmC,kBAAkBvB;YAClB;QACF;QAEAuB,kBAAkBvB;QAElB,IAAI,CAACxB,uBAAuBoD,GAAG,CAAC9B,MAAM;YACpCgB,SAASJ,IAAI,CAAC;gBACZrB,MAAM;gBACNF,SAAS,CAAC,2BAA2B,EAAEW,IAAI,cAAc,CAAC;gBAC1DV,MAAMyB,QAAQzB,IAAI;YACpB;YACA;QACF;QAEA,IAAIV,uBAAuBkD,GAAG,CAAC9B,MAAM;YACnC,IAAIC,SAASjB,IAAI,OAAO,IAAI;gBAC1ByB,OAAOG,IAAI,CACTxB,uBAAuB;oBACrBC,SAAS,CAAC,mBAAmB,EAAEW,IAAI,4BAA4B,CAAC;oBAChEV,MAAMyB,QAAQzB,IAAI;gBACpB;gBAEF;YACF;YAEAmC,kBAAkBzB;YAClBD,WAAW,CAAC0B,gBAAgB,GAAG,EAAE;YACjC;QACF;QAEA,MAAMM,QAAQjC,uBAAuB;YACnCC;YACAC;YACAV,MAAMyB,QAAQzB,IAAI;YAClBW;QACF;QAEA,IAAI8B,OAAO;YACTtB,OAAOG,IAAI,CAACmB;QACd;IACF;IAEAtB,OAAOG,IAAI,IAAIJ,0BAA0BT,aAAagB,QAAQzB,IAAI;IAElE,OAAO;QACL2B,SAASC,MAAM/B,KAAK,CAACiC,eAAe,GAAGY,IAAI,CAAC,MAAMC,OAAO,CAAC,OAAO;QACjElC;QACAU;QACAO;IACF;AACF,EAAC;AAED,OAAO,MAAMkB,yBAAyB,CAACjB;IACrC,MAAMkB,SAASlB,QACZE,KAAK,CAAC,SACNiB,IAAI,CAAC,CAACd,OAAS,YAAYX,IAAI,CAACW,KAAKtC,IAAI;IAE5C,OAAOmD,QAAQF,QAAQ,SAAS,IAAIA,QAAQ,UAAU,IAAIjD,UAAUkB;AACtE,EAAC;AAED,OAAO,MAAMmC,sBAAsB,CAACC;IAClC,MAAMC,iBAAiB9D,kBAAkB6D;IAEzC,IAAI,CAACC,eAAeC,EAAE,EAAE;QACtB,OAAO;IACT;IAEA,MAAMC,eAAeF,eAAejD,IAAI,CAAC6B,KAAK,CAAC;IAC/C,MAAMuB,cAAcD,aAAaE,EAAE,CAAC,CAAC,MAAM;IAC3C,MAAMC,WAAWF,gBAAgB,aAAaD,aAAaE,EAAE,CAAC,CAAC,MAAM,UAAUD;IAC/E,MAAMG,mBAAmBD,SAASX,OAAO,CAAC,SAAS;IAEnD,OAAOY,iBACJ1B,KAAK,CAAC,WACN2B,MAAM,CAACC,SACPC,GAAG,CAAC,CAACC,OAAS,GAAGA,KAAKC,MAAM,CAAC,GAAGC,WAAW,KAAKF,KAAK9D,KAAK,CAAC,IAAI,EAC/D6C,IAAI,CAAC;AACV,EAAC;AAED,OAAO,MAAMoB,mBAAmB,CAAC,EAC/BnC,OAAO,EACPlB,WAAW,EACXuC,UAAU,EAKX,GACCvC,YAAYsD,KAAK,IAAInB,uBAAuBjB,YAAYoB,oBAAoBC,YAAW"}
@@ -0,0 +1 @@
1
+ export declare const sha256Hex: (content: string | Uint8Array) => string;
@@ -0,0 +1,8 @@
1
+ import { createHash } from 'node:crypto';
2
+ export const sha256Hex = (content)=>{
3
+ const hash = createHash('sha256');
4
+ hash.update(content);
5
+ return hash.digest('hex');
6
+ };
7
+
8
+ //# sourceMappingURL=hash.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/sync/hash.ts"],"sourcesContent":["import { createHash } from 'node:crypto'\n\nexport const sha256Hex = (content: string | Uint8Array): string => {\n const hash = createHash('sha256')\n\n hash.update(content)\n\n return hash.digest('hex')\n}\n"],"names":["createHash","sha256Hex","content","hash","update","digest"],"mappings":"AAAA,SAASA,UAAU,QAAQ,cAAa;AAExC,OAAO,MAAMC,YAAY,CAACC;IACxB,MAAMC,OAAOH,WAAW;IAExBG,KAAKC,MAAM,CAACF;IAEZ,OAAOC,KAAKE,MAAM,CAAC;AACrB,EAAC"}
@@ -0,0 +1,12 @@
1
+ export { AI_MARKDOWN_EXPORT_MANIFEST_FILENAMES, isAiMarkdownExportManifestPath, isExcludedFromAiExport, matchesAiExportExcludePattern, parseDocsAiExportManifestYaml, validateDocsAiExportManifest, } from './aiExportManifest.js';
2
+ export type { DocsAiExportHeadingMode, DocsAiExportManifest, DocsAiExportManifestInput, DocsAiExportManifestValidationOptions, DocsAiExportManifestValidationResult, DocsAiExportOrphans, } from './aiExportManifest.js';
3
+ export { inferTitleFromMarkdown, parseDocsFrontmatter, resolveDocsTitle, titleFromSourcePath, } from './frontmatter.js';
4
+ export type { DocsFrontmatter, ParseDocsFrontmatterResult, } from './frontmatter.js';
5
+ export { sha256Hex } from './hash.js';
6
+ export { buildDocsManifest } from './manifest.js';
7
+ export type { DocsDeleteBehavior, DocsManifest, DocsManifestFile, DocsManifestInputFile, DocsManifestSource, DocsSyncMode, ValidatedDocsManifest, ValidatedDocsManifestFile, } from './manifest.js';
8
+ export { deriveRouteFromSourcePath, normalizeDocsPath } from './paths.js';
9
+ export { planDocsSync } from './plan.js';
10
+ export type { DocsSyncPlan, ExistingDocsRecord, PlannedDocChange, } from './plan.js';
11
+ export { validateDocsManifest } from './validate.js';
12
+ export type { DocsValidationErrorCode, DocsValidationIssue, DocsValidationOptions, DocsValidationResult, } from './validate.js';
@@ -0,0 +1,9 @@
1
+ export { AI_MARKDOWN_EXPORT_MANIFEST_FILENAMES, isAiMarkdownExportManifestPath, isExcludedFromAiExport, matchesAiExportExcludePattern, parseDocsAiExportManifestYaml, validateDocsAiExportManifest } from './aiExportManifest.js';
2
+ export { inferTitleFromMarkdown, parseDocsFrontmatter, resolveDocsTitle, titleFromSourcePath } from './frontmatter.js';
3
+ export { sha256Hex } from './hash.js';
4
+ export { buildDocsManifest } from './manifest.js';
5
+ export { deriveRouteFromSourcePath, normalizeDocsPath } from './paths.js';
6
+ export { planDocsSync } from './plan.js';
7
+ export { validateDocsManifest } from './validate.js';
8
+
9
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/sync/index.ts"],"sourcesContent":["export {\n AI_MARKDOWN_EXPORT_MANIFEST_FILENAMES,\n isAiMarkdownExportManifestPath,\n isExcludedFromAiExport,\n matchesAiExportExcludePattern,\n parseDocsAiExportManifestYaml,\n validateDocsAiExportManifest,\n} from './aiExportManifest.js'\nexport type {\n DocsAiExportHeadingMode,\n DocsAiExportManifest,\n DocsAiExportManifestInput,\n DocsAiExportManifestValidationOptions,\n DocsAiExportManifestValidationResult,\n DocsAiExportOrphans,\n} from './aiExportManifest.js'\nexport {\n inferTitleFromMarkdown,\n parseDocsFrontmatter,\n resolveDocsTitle,\n titleFromSourcePath,\n} from './frontmatter.js'\nexport type {\n DocsFrontmatter,\n ParseDocsFrontmatterResult,\n} from './frontmatter.js'\nexport { sha256Hex } from './hash.js'\nexport { buildDocsManifest } from './manifest.js'\nexport type {\n DocsDeleteBehavior,\n DocsManifest,\n DocsManifestFile,\n DocsManifestInputFile,\n DocsManifestSource,\n DocsSyncMode,\n ValidatedDocsManifest,\n ValidatedDocsManifestFile,\n} from './manifest.js'\nexport { deriveRouteFromSourcePath, normalizeDocsPath } from './paths.js'\nexport { planDocsSync } from './plan.js'\nexport type {\n DocsSyncPlan,\n ExistingDocsRecord,\n PlannedDocChange,\n} from './plan.js'\nexport { validateDocsManifest } from './validate.js'\nexport type {\n DocsValidationErrorCode,\n DocsValidationIssue,\n DocsValidationOptions,\n DocsValidationResult,\n} from './validate.js'\n"],"names":["AI_MARKDOWN_EXPORT_MANIFEST_FILENAMES","isAiMarkdownExportManifestPath","isExcludedFromAiExport","matchesAiExportExcludePattern","parseDocsAiExportManifestYaml","validateDocsAiExportManifest","inferTitleFromMarkdown","parseDocsFrontmatter","resolveDocsTitle","titleFromSourcePath","sha256Hex","buildDocsManifest","deriveRouteFromSourcePath","normalizeDocsPath","planDocsSync","validateDocsManifest"],"mappings":"AAAA,SACEA,qCAAqC,EACrCC,8BAA8B,EAC9BC,sBAAsB,EACtBC,6BAA6B,EAC7BC,6BAA6B,EAC7BC,4BAA4B,QACvB,wBAAuB;AAS9B,SACEC,sBAAsB,EACtBC,oBAAoB,EACpBC,gBAAgB,EAChBC,mBAAmB,QACd,mBAAkB;AAKzB,SAASC,SAAS,QAAQ,YAAW;AACrC,SAASC,iBAAiB,QAAQ,gBAAe;AAWjD,SAASC,yBAAyB,EAAEC,iBAAiB,QAAQ,aAAY;AACzE,SAASC,YAAY,QAAQ,YAAW;AAMxC,SAASC,oBAAoB,QAAQ,gBAAe"}
@@ -0,0 +1,58 @@
1
+ import type { DocsAiExportManifest } from './aiExportManifest.js';
2
+ import type { DocsFrontmatter } from './frontmatter.js';
3
+ export type DocsSyncMode = 'dry-run' | 'sync';
4
+ export type DocsDeleteBehavior = 'archive' | 'delete' | 'draft' | 'ignore';
5
+ export type DocsManifestSource = {
6
+ branch?: string;
7
+ commit?: string;
8
+ id: string;
9
+ repository?: string;
10
+ root?: string;
11
+ };
12
+ export type DocsManifestFile = {
13
+ content: string;
14
+ path: string;
15
+ sha256?: string;
16
+ };
17
+ export type DocsManifest = {
18
+ aiExport?: DocsAiExportManifest;
19
+ deleteBehavior?: DocsDeleteBehavior;
20
+ files: DocsManifestFile[];
21
+ mode?: DocsSyncMode;
22
+ publish?: boolean;
23
+ source: DocsManifestSource;
24
+ version: 1;
25
+ };
26
+ export type ValidatedDocsManifestFile = {
27
+ content: string;
28
+ frontmatter: DocsFrontmatter;
29
+ path: string;
30
+ route: string;
31
+ sha256: string;
32
+ title: string;
33
+ };
34
+ export type ValidatedDocsManifest = {
35
+ aiExport?: DocsAiExportManifest;
36
+ deleteBehavior: DocsDeleteBehavior;
37
+ files: ValidatedDocsManifestFile[];
38
+ mode: DocsSyncMode;
39
+ publish: boolean;
40
+ source: DocsManifestSource;
41
+ version: 1;
42
+ };
43
+ export type DocsManifestInputFile = {
44
+ content: string;
45
+ path: string;
46
+ };
47
+ export declare const buildDocsManifest: ({ aiExport, branch, commit, deleteBehavior, files, mode, publish, repository, root, sourceId, }: {
48
+ aiExport?: DocsAiExportManifest;
49
+ branch?: string;
50
+ commit?: string;
51
+ deleteBehavior?: DocsDeleteBehavior;
52
+ files: DocsManifestInputFile[];
53
+ mode?: DocsSyncMode;
54
+ publish?: boolean;
55
+ repository?: string;
56
+ root?: string;
57
+ sourceId: string;
58
+ }) => DocsManifest;
@@ -0,0 +1,21 @@
1
+ import { sha256Hex } from './hash.js';
2
+ export const buildDocsManifest = ({ aiExport, branch, commit, deleteBehavior, files, mode, publish, repository, root, sourceId })=>({
3
+ aiExport,
4
+ deleteBehavior,
5
+ files: files.map((file)=>({
6
+ ...file,
7
+ sha256: sha256Hex(file.content)
8
+ })),
9
+ mode,
10
+ publish,
11
+ source: {
12
+ id: sourceId,
13
+ branch,
14
+ commit,
15
+ repository,
16
+ root
17
+ },
18
+ version: 1
19
+ });
20
+
21
+ //# sourceMappingURL=manifest.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/sync/manifest.ts"],"sourcesContent":["import type { DocsAiExportManifest } from './aiExportManifest.js'\nimport type { DocsFrontmatter } from './frontmatter.js'\n\nimport { sha256Hex } from './hash.js'\n\nexport type DocsSyncMode = 'dry-run' | 'sync'\n\nexport type DocsDeleteBehavior = 'archive' | 'delete' | 'draft' | 'ignore'\n\nexport type DocsManifestSource = {\n branch?: string\n commit?: string\n id: string\n repository?: string\n root?: string\n}\n\nexport type DocsManifestFile = {\n content: string\n path: string\n sha256?: string\n}\n\nexport type DocsManifest = {\n aiExport?: DocsAiExportManifest\n deleteBehavior?: DocsDeleteBehavior\n files: DocsManifestFile[]\n mode?: DocsSyncMode\n publish?: boolean\n source: DocsManifestSource\n version: 1\n}\n\nexport type ValidatedDocsManifestFile = {\n content: string\n frontmatter: DocsFrontmatter\n path: string\n route: string\n sha256: string\n title: string\n}\n\nexport type ValidatedDocsManifest = {\n aiExport?: DocsAiExportManifest\n deleteBehavior: DocsDeleteBehavior\n files: ValidatedDocsManifestFile[]\n mode: DocsSyncMode\n publish: boolean\n source: DocsManifestSource\n version: 1\n}\n\nexport type DocsManifestInputFile = {\n content: string\n path: string\n}\n\nexport const buildDocsManifest = ({\n aiExport,\n branch,\n commit,\n deleteBehavior,\n files,\n mode,\n publish,\n repository,\n root,\n sourceId,\n}: {\n aiExport?: DocsAiExportManifest\n branch?: string\n commit?: string\n deleteBehavior?: DocsDeleteBehavior\n files: DocsManifestInputFile[]\n mode?: DocsSyncMode\n publish?: boolean\n repository?: string\n root?: string\n sourceId: string\n}): DocsManifest => ({\n aiExport,\n deleteBehavior,\n files: files.map((file) => ({\n ...file,\n sha256: sha256Hex(file.content),\n })),\n mode,\n publish,\n source: {\n id: sourceId,\n branch,\n commit,\n repository,\n root,\n },\n version: 1,\n})\n"],"names":["sha256Hex","buildDocsManifest","aiExport","branch","commit","deleteBehavior","files","mode","publish","repository","root","sourceId","map","file","sha256","content","source","id","version"],"mappings":"AAGA,SAASA,SAAS,QAAQ,YAAW;AAsDrC,OAAO,MAAMC,oBAAoB,CAAC,EAChCC,QAAQ,EACRC,MAAM,EACNC,MAAM,EACNC,cAAc,EACdC,KAAK,EACLC,IAAI,EACJC,OAAO,EACPC,UAAU,EACVC,IAAI,EACJC,QAAQ,EAYT,GAAoB,CAAA;QACnBT;QACAG;QACAC,OAAOA,MAAMM,GAAG,CAAC,CAACC,OAAU,CAAA;gBAC1B,GAAGA,IAAI;gBACPC,QAAQd,UAAUa,KAAKE,OAAO;YAChC,CAAA;QACAR;QACAC;QACAQ,QAAQ;YACNC,IAAIN;YACJR;YACAC;YACAK;YACAC;QACF;QACAQ,SAAS;IACX,CAAA,EAAE"}
@@ -0,0 +1,16 @@
1
+ import type { DocsValidationErrorCode } from './validate.js';
2
+ export type NormalizeDocsPathResult = {
3
+ code: DocsValidationErrorCode;
4
+ message: string;
5
+ ok: false;
6
+ } | {
7
+ ok: true;
8
+ path: string;
9
+ routeSegments: string[];
10
+ };
11
+ export declare const normalizeDocsPath: (input: string) => NormalizeDocsPathResult;
12
+ export declare const deriveRouteFromSourcePath: ({ slug, routeBase, sourcePath, }: {
13
+ routeBase: string;
14
+ slug?: string;
15
+ sourcePath: string;
16
+ }) => string;
@@ -0,0 +1,116 @@
1
+ const trimLeadingCurrentDirectory = (path)=>{
2
+ let nextPath = path;
3
+ while(nextPath.startsWith('./')){
4
+ nextPath = nextPath.slice(2);
5
+ }
6
+ return nextPath;
7
+ };
8
+ export const normalizeDocsPath = (input)=>{
9
+ if (typeof input !== 'string' || input.trim() === '') {
10
+ return {
11
+ code: 'invalid_path',
12
+ message: 'Docs path must be a non-empty string.',
13
+ ok: false
14
+ };
15
+ }
16
+ const trimmedInput = input.trim();
17
+ if (/^[a-z]:[\\/]/i.test(trimmedInput)) {
18
+ return {
19
+ code: 'invalid_path',
20
+ message: 'Docs path must not be an absolute Windows path.',
21
+ ok: false
22
+ };
23
+ }
24
+ if (trimmedInput.startsWith('/')) {
25
+ return {
26
+ code: 'invalid_path',
27
+ message: 'Docs path must not be an absolute path.',
28
+ ok: false
29
+ };
30
+ }
31
+ const normalizedPath = trimLeadingCurrentDirectory(trimmedInput.replace(/\\/g, '/').replace(/\/+/g, '/'));
32
+ if (normalizedPath === '' || normalizedPath.endsWith('/')) {
33
+ return {
34
+ code: 'invalid_path',
35
+ message: 'Docs path must point to a Markdown file.',
36
+ ok: false
37
+ };
38
+ }
39
+ const segments = normalizedPath.split('/');
40
+ if (segments.some((segment)=>segment === '..')) {
41
+ return {
42
+ code: 'path_traversal',
43
+ message: 'Docs path must not contain path traversal segments.',
44
+ ok: false
45
+ };
46
+ }
47
+ if (segments.some((segment)=>segment === '' || segment === '.')) {
48
+ return {
49
+ code: 'invalid_path',
50
+ message: 'Docs path contains an invalid path segment.',
51
+ ok: false
52
+ };
53
+ }
54
+ if (!normalizedPath.endsWith('.md')) {
55
+ return {
56
+ code: 'non_markdown_file',
57
+ message: 'Docs path must end in .md.',
58
+ ok: false
59
+ };
60
+ }
61
+ const fileName = segments.at(-1);
62
+ if (!fileName || fileName === '.md') {
63
+ return {
64
+ code: 'invalid_path',
65
+ message: 'Docs path must include a Markdown filename.',
66
+ ok: false
67
+ };
68
+ }
69
+ const routeSegments = segments.map((segment, index)=>{
70
+ if (index === segments.length - 1) {
71
+ return segment.slice(0, -'.md'.length);
72
+ }
73
+ return segment;
74
+ });
75
+ if (routeSegments.at(-1) === 'index') {
76
+ routeSegments.pop();
77
+ }
78
+ return {
79
+ ok: true,
80
+ path: normalizedPath,
81
+ routeSegments
82
+ };
83
+ };
84
+ const normalizeRouteBase = (routeBase)=>{
85
+ const normalized = `/${routeBase.trim()}`.replace(/\\/g, '/').replace(/\/+/g, '/');
86
+ const withoutTrailingSlash = normalized.length > 1 ? normalized.replace(/\/+$/g, '') : normalized;
87
+ return withoutTrailingSlash || '/';
88
+ };
89
+ export const deriveRouteFromSourcePath = ({ slug, routeBase, sourcePath })=>{
90
+ const normalizedPath = normalizeDocsPath(sourcePath);
91
+ const normalizedRouteBase = normalizeRouteBase(routeBase);
92
+ if (!normalizedPath.ok) {
93
+ return normalizedRouteBase;
94
+ }
95
+ let routeSegments = [
96
+ ...normalizedPath.routeSegments
97
+ ];
98
+ const routeBaseSegments = normalizedRouteBase.split('/').filter(Boolean);
99
+ if (routeBaseSegments.length > 0 && routeBaseSegments.every((segment, index)=>routeSegments[index] === segment)) {
100
+ routeSegments = routeSegments.slice(routeBaseSegments.length);
101
+ }
102
+ if (slug && routeSegments.length > 0) {
103
+ routeSegments[routeSegments.length - 1] = slug;
104
+ } else if (slug) {
105
+ routeSegments = [
106
+ slug
107
+ ];
108
+ }
109
+ const routeSuffix = routeSegments.join('/');
110
+ if (!routeSuffix) {
111
+ return normalizedRouteBase;
112
+ }
113
+ return `${normalizedRouteBase}/${routeSuffix}`.replace(/\/+/g, '/');
114
+ };
115
+
116
+ //# sourceMappingURL=paths.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/sync/paths.ts"],"sourcesContent":["import type { DocsValidationErrorCode } from './validate.js'\n\nexport type NormalizeDocsPathResult =\n | {\n code: DocsValidationErrorCode\n message: string\n ok: false\n }\n | {\n ok: true\n path: string\n routeSegments: string[]\n }\n\nconst trimLeadingCurrentDirectory = (path: string): string => {\n let nextPath = path\n\n while (nextPath.startsWith('./')) {\n nextPath = nextPath.slice(2)\n }\n\n return nextPath\n}\n\nexport const normalizeDocsPath = (input: string): NormalizeDocsPathResult => {\n if (typeof input !== 'string' || input.trim() === '') {\n return {\n code: 'invalid_path',\n message: 'Docs path must be a non-empty string.',\n ok: false,\n }\n }\n\n const trimmedInput = input.trim()\n\n if (/^[a-z]:[\\\\/]/i.test(trimmedInput)) {\n return {\n code: 'invalid_path',\n message: 'Docs path must not be an absolute Windows path.',\n ok: false,\n }\n }\n\n if (trimmedInput.startsWith('/')) {\n return {\n code: 'invalid_path',\n message: 'Docs path must not be an absolute path.',\n ok: false,\n }\n }\n\n const normalizedPath = trimLeadingCurrentDirectory(\n trimmedInput.replace(/\\\\/g, '/').replace(/\\/+/g, '/'),\n )\n\n if (normalizedPath === '' || normalizedPath.endsWith('/')) {\n return {\n code: 'invalid_path',\n message: 'Docs path must point to a Markdown file.',\n ok: false,\n }\n }\n\n const segments = normalizedPath.split('/')\n\n if (segments.some((segment) => segment === '..')) {\n return {\n code: 'path_traversal',\n message: 'Docs path must not contain path traversal segments.',\n ok: false,\n }\n }\n\n if (segments.some((segment) => segment === '' || segment === '.')) {\n return {\n code: 'invalid_path',\n message: 'Docs path contains an invalid path segment.',\n ok: false,\n }\n }\n\n if (!normalizedPath.endsWith('.md')) {\n return {\n code: 'non_markdown_file',\n message: 'Docs path must end in .md.',\n ok: false,\n }\n }\n\n const fileName = segments.at(-1)\n\n if (!fileName || fileName === '.md') {\n return {\n code: 'invalid_path',\n message: 'Docs path must include a Markdown filename.',\n ok: false,\n }\n }\n\n const routeSegments = segments.map((segment, index) => {\n if (index === segments.length - 1) {\n return segment.slice(0, -'.md'.length)\n }\n\n return segment\n })\n\n if (routeSegments.at(-1) === 'index') {\n routeSegments.pop()\n }\n\n return {\n ok: true,\n path: normalizedPath,\n routeSegments,\n }\n}\n\nconst normalizeRouteBase = (routeBase: string): string => {\n const normalized = `/${routeBase.trim()}`.replace(/\\\\/g, '/').replace(/\\/+/g, '/')\n const withoutTrailingSlash =\n normalized.length > 1 ? normalized.replace(/\\/+$/g, '') : normalized\n\n return withoutTrailingSlash || '/'\n}\n\nexport const deriveRouteFromSourcePath = ({\n slug,\n routeBase,\n sourcePath,\n}: {\n routeBase: string\n slug?: string\n sourcePath: string\n}): string => {\n const normalizedPath = normalizeDocsPath(sourcePath)\n const normalizedRouteBase = normalizeRouteBase(routeBase)\n\n if (!normalizedPath.ok) {\n return normalizedRouteBase\n }\n\n let routeSegments = [...normalizedPath.routeSegments]\n const routeBaseSegments = normalizedRouteBase.split('/').filter(Boolean)\n\n if (\n routeBaseSegments.length > 0 &&\n routeBaseSegments.every((segment, index) => routeSegments[index] === segment)\n ) {\n routeSegments = routeSegments.slice(routeBaseSegments.length)\n }\n\n if (slug && routeSegments.length > 0) {\n routeSegments[routeSegments.length - 1] = slug\n } else if (slug) {\n routeSegments = [slug]\n }\n\n const routeSuffix = routeSegments.join('/')\n\n if (!routeSuffix) {\n return normalizedRouteBase\n }\n\n return `${normalizedRouteBase}/${routeSuffix}`.replace(/\\/+/g, '/')\n}\n"],"names":["trimLeadingCurrentDirectory","path","nextPath","startsWith","slice","normalizeDocsPath","input","trim","code","message","ok","trimmedInput","test","normalizedPath","replace","endsWith","segments","split","some","segment","fileName","at","routeSegments","map","index","length","pop","normalizeRouteBase","routeBase","normalized","withoutTrailingSlash","deriveRouteFromSourcePath","slug","sourcePath","normalizedRouteBase","routeBaseSegments","filter","Boolean","every","routeSuffix","join"],"mappings":"AAcA,MAAMA,8BAA8B,CAACC;IACnC,IAAIC,WAAWD;IAEf,MAAOC,SAASC,UAAU,CAAC,MAAO;QAChCD,WAAWA,SAASE,KAAK,CAAC;IAC5B;IAEA,OAAOF;AACT;AAEA,OAAO,MAAMG,oBAAoB,CAACC;IAChC,IAAI,OAAOA,UAAU,YAAYA,MAAMC,IAAI,OAAO,IAAI;QACpD,OAAO;YACLC,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,MAAMC,eAAeL,MAAMC,IAAI;IAE/B,IAAI,gBAAgBK,IAAI,CAACD,eAAe;QACtC,OAAO;YACLH,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,IAAIC,aAAaR,UAAU,CAAC,MAAM;QAChC,OAAO;YACLK,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,MAAMG,iBAAiBb,4BACrBW,aAAaG,OAAO,CAAC,OAAO,KAAKA,OAAO,CAAC,QAAQ;IAGnD,IAAID,mBAAmB,MAAMA,eAAeE,QAAQ,CAAC,MAAM;QACzD,OAAO;YACLP,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,MAAMM,WAAWH,eAAeI,KAAK,CAAC;IAEtC,IAAID,SAASE,IAAI,CAAC,CAACC,UAAYA,YAAY,OAAO;QAChD,OAAO;YACLX,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,IAAIM,SAASE,IAAI,CAAC,CAACC,UAAYA,YAAY,MAAMA,YAAY,MAAM;QACjE,OAAO;YACLX,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,IAAI,CAACG,eAAeE,QAAQ,CAAC,QAAQ;QACnC,OAAO;YACLP,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,MAAMU,WAAWJ,SAASK,EAAE,CAAC,CAAC;IAE9B,IAAI,CAACD,YAAYA,aAAa,OAAO;QACnC,OAAO;YACLZ,MAAM;YACNC,SAAS;YACTC,IAAI;QACN;IACF;IAEA,MAAMY,gBAAgBN,SAASO,GAAG,CAAC,CAACJ,SAASK;QAC3C,IAAIA,UAAUR,SAASS,MAAM,GAAG,GAAG;YACjC,OAAON,QAAQf,KAAK,CAAC,GAAG,CAAC,MAAMqB,MAAM;QACvC;QAEA,OAAON;IACT;IAEA,IAAIG,cAAcD,EAAE,CAAC,CAAC,OAAO,SAAS;QACpCC,cAAcI,GAAG;IACnB;IAEA,OAAO;QACLhB,IAAI;QACJT,MAAMY;QACNS;IACF;AACF,EAAC;AAED,MAAMK,qBAAqB,CAACC;IAC1B,MAAMC,aAAa,CAAC,CAAC,EAAED,UAAUrB,IAAI,IAAI,CAACO,OAAO,CAAC,OAAO,KAAKA,OAAO,CAAC,QAAQ;IAC9E,MAAMgB,uBACJD,WAAWJ,MAAM,GAAG,IAAII,WAAWf,OAAO,CAAC,SAAS,MAAMe;IAE5D,OAAOC,wBAAwB;AACjC;AAEA,OAAO,MAAMC,4BAA4B,CAAC,EACxCC,IAAI,EACJJ,SAAS,EACTK,UAAU,EAKX;IACC,MAAMpB,iBAAiBR,kBAAkB4B;IACzC,MAAMC,sBAAsBP,mBAAmBC;IAE/C,IAAI,CAACf,eAAeH,EAAE,EAAE;QACtB,OAAOwB;IACT;IAEA,IAAIZ,gBAAgB;WAAIT,eAAeS,aAAa;KAAC;IACrD,MAAMa,oBAAoBD,oBAAoBjB,KAAK,CAAC,KAAKmB,MAAM,CAACC;IAEhE,IACEF,kBAAkBV,MAAM,GAAG,KAC3BU,kBAAkBG,KAAK,CAAC,CAACnB,SAASK,QAAUF,aAAa,CAACE,MAAM,KAAKL,UACrE;QACAG,gBAAgBA,cAAclB,KAAK,CAAC+B,kBAAkBV,MAAM;IAC9D;IAEA,IAAIO,QAAQV,cAAcG,MAAM,GAAG,GAAG;QACpCH,aAAa,CAACA,cAAcG,MAAM,GAAG,EAAE,GAAGO;IAC5C,OAAO,IAAIA,MAAM;QACfV,gBAAgB;YAACU;SAAK;IACxB;IAEA,MAAMO,cAAcjB,cAAckB,IAAI,CAAC;IAEvC,IAAI,CAACD,aAAa;QAChB,OAAOL;IACT;IAEA,OAAO,GAAGA,oBAAoB,CAAC,EAAEK,aAAa,CAACzB,OAAO,CAAC,QAAQ;AACjE,EAAC"}
@@ -0,0 +1,29 @@
1
+ import type { DocsDeleteBehavior, ValidatedDocsManifest, ValidatedDocsManifestFile } from './manifest.js';
2
+ import type { DocsValidationIssue } from './validate.js';
3
+ export type ExistingDocsRecord = {
4
+ archived?: boolean;
5
+ route: string;
6
+ sourceHash?: string;
7
+ sourcePath: string;
8
+ title?: string;
9
+ };
10
+ export type PlannedDocChange = {
11
+ current?: ExistingDocsRecord;
12
+ desired?: ValidatedDocsManifestFile;
13
+ reason: string;
14
+ sourcePath: string;
15
+ };
16
+ export type DocsSyncPlan = {
17
+ archive: PlannedDocChange[];
18
+ create: PlannedDocChange[];
19
+ delete: PlannedDocChange[];
20
+ draft: PlannedDocChange[];
21
+ unchanged: PlannedDocChange[];
22
+ update: PlannedDocChange[];
23
+ warnings: DocsValidationIssue[];
24
+ };
25
+ export declare const planDocsSync: ({ deleteBehavior, desired, existing, }: {
26
+ deleteBehavior?: DocsDeleteBehavior;
27
+ desired: ValidatedDocsManifest;
28
+ existing: ExistingDocsRecord[];
29
+ }) => DocsSyncPlan;
@@ -0,0 +1,72 @@
1
+ const createEmptyPlan = ()=>({
2
+ archive: [],
3
+ create: [],
4
+ delete: [],
5
+ draft: [],
6
+ unchanged: [],
7
+ update: [],
8
+ warnings: []
9
+ });
10
+ export const planDocsSync = ({ deleteBehavior, desired, existing })=>{
11
+ const plan = createEmptyPlan();
12
+ const effectiveDeleteBehavior = deleteBehavior ?? desired.deleteBehavior ?? 'archive';
13
+ const existingBySourcePath = new Map();
14
+ for (const existingRecord of existing){
15
+ if (existingBySourcePath.has(existingRecord.sourcePath)) {
16
+ plan.warnings.push({
17
+ code: 'duplicate_existing_path',
18
+ message: `Existing docs contain duplicate sourcePath "${existingRecord.sourcePath}".`,
19
+ path: existingRecord.sourcePath
20
+ });
21
+ continue;
22
+ }
23
+ existingBySourcePath.set(existingRecord.sourcePath, existingRecord);
24
+ }
25
+ const desiredSourcePaths = new Set(desired.files.map((file)=>file.path));
26
+ for (const desiredFile of desired.files){
27
+ const current = existingBySourcePath.get(desiredFile.path);
28
+ if (!current) {
29
+ plan.create.push({
30
+ desired: desiredFile,
31
+ reason: 'No existing doc has this sourcePath.',
32
+ sourcePath: desiredFile.path
33
+ });
34
+ continue;
35
+ }
36
+ if (current.sourceHash === desiredFile.sha256) {
37
+ plan.unchanged.push({
38
+ current,
39
+ desired: desiredFile,
40
+ reason: 'Existing source hash matches desired source hash.',
41
+ sourcePath: desiredFile.path
42
+ });
43
+ continue;
44
+ }
45
+ plan.update.push({
46
+ current,
47
+ desired: desiredFile,
48
+ reason: 'Existing source hash differs from desired source hash.',
49
+ sourcePath: desiredFile.path
50
+ });
51
+ }
52
+ for (const current of existingBySourcePath.values()){
53
+ if (desiredSourcePaths.has(current.sourcePath)) {
54
+ continue;
55
+ }
56
+ const change = {
57
+ current,
58
+ reason: 'Existing doc is missing from desired manifest.',
59
+ sourcePath: current.sourcePath
60
+ };
61
+ if (effectiveDeleteBehavior === 'archive') {
62
+ plan.archive.push(change);
63
+ } else if (effectiveDeleteBehavior === 'delete') {
64
+ plan.delete.push(change);
65
+ } else if (effectiveDeleteBehavior === 'draft') {
66
+ plan.draft.push(change);
67
+ }
68
+ }
69
+ return plan;
70
+ };
71
+
72
+ //# sourceMappingURL=plan.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/sync/plan.ts"],"sourcesContent":["import type {\n DocsDeleteBehavior,\n ValidatedDocsManifest,\n ValidatedDocsManifestFile,\n} from './manifest.js'\nimport type { DocsValidationIssue } from './validate.js'\n\nexport type ExistingDocsRecord = {\n archived?: boolean\n route: string\n sourceHash?: string\n sourcePath: string\n title?: string\n}\n\nexport type PlannedDocChange = {\n current?: ExistingDocsRecord\n desired?: ValidatedDocsManifestFile\n reason: string\n sourcePath: string\n}\n\nexport type DocsSyncPlan = {\n archive: PlannedDocChange[]\n create: PlannedDocChange[]\n delete: PlannedDocChange[]\n draft: PlannedDocChange[]\n unchanged: PlannedDocChange[]\n update: PlannedDocChange[]\n warnings: DocsValidationIssue[]\n}\n\nconst createEmptyPlan = (): DocsSyncPlan => ({\n archive: [],\n create: [],\n delete: [],\n draft: [],\n unchanged: [],\n update: [],\n warnings: [],\n})\n\nexport const planDocsSync = ({\n deleteBehavior,\n desired,\n existing,\n}: {\n deleteBehavior?: DocsDeleteBehavior\n desired: ValidatedDocsManifest\n existing: ExistingDocsRecord[]\n}): DocsSyncPlan => {\n const plan = createEmptyPlan()\n const effectiveDeleteBehavior = deleteBehavior ?? desired.deleteBehavior ?? 'archive'\n const existingBySourcePath = new Map<string, ExistingDocsRecord>()\n\n for (const existingRecord of existing) {\n if (existingBySourcePath.has(existingRecord.sourcePath)) {\n plan.warnings.push({\n code: 'duplicate_existing_path',\n message: `Existing docs contain duplicate sourcePath \"${existingRecord.sourcePath}\".`,\n path: existingRecord.sourcePath,\n })\n continue\n }\n\n existingBySourcePath.set(existingRecord.sourcePath, existingRecord)\n }\n\n const desiredSourcePaths = new Set(desired.files.map((file) => file.path))\n\n for (const desiredFile of desired.files) {\n const current = existingBySourcePath.get(desiredFile.path)\n\n if (!current) {\n plan.create.push({\n desired: desiredFile,\n reason: 'No existing doc has this sourcePath.',\n sourcePath: desiredFile.path,\n })\n continue\n }\n\n if (current.sourceHash === desiredFile.sha256) {\n plan.unchanged.push({\n current,\n desired: desiredFile,\n reason: 'Existing source hash matches desired source hash.',\n sourcePath: desiredFile.path,\n })\n continue\n }\n\n plan.update.push({\n current,\n desired: desiredFile,\n reason: 'Existing source hash differs from desired source hash.',\n sourcePath: desiredFile.path,\n })\n }\n\n for (const current of existingBySourcePath.values()) {\n if (desiredSourcePaths.has(current.sourcePath)) {\n continue\n }\n\n const change = {\n current,\n reason: 'Existing doc is missing from desired manifest.',\n sourcePath: current.sourcePath,\n }\n\n if (effectiveDeleteBehavior === 'archive') {\n plan.archive.push(change)\n } else if (effectiveDeleteBehavior === 'delete') {\n plan.delete.push(change)\n } else if (effectiveDeleteBehavior === 'draft') {\n plan.draft.push(change)\n }\n }\n\n return plan\n}\n"],"names":["createEmptyPlan","archive","create","delete","draft","unchanged","update","warnings","planDocsSync","deleteBehavior","desired","existing","plan","effectiveDeleteBehavior","existingBySourcePath","Map","existingRecord","has","sourcePath","push","code","message","path","set","desiredSourcePaths","Set","files","map","file","desiredFile","current","get","reason","sourceHash","sha256","values","change"],"mappings":"AAgCA,MAAMA,kBAAkB,IAAqB,CAAA;QAC3CC,SAAS,EAAE;QACXC,QAAQ,EAAE;QACVC,QAAQ,EAAE;QACVC,OAAO,EAAE;QACTC,WAAW,EAAE;QACbC,QAAQ,EAAE;QACVC,UAAU,EAAE;IACd,CAAA;AAEA,OAAO,MAAMC,eAAe,CAAC,EAC3BC,cAAc,EACdC,OAAO,EACPC,QAAQ,EAKT;IACC,MAAMC,OAAOZ;IACb,MAAMa,0BAA0BJ,kBAAkBC,QAAQD,cAAc,IAAI;IAC5E,MAAMK,uBAAuB,IAAIC;IAEjC,KAAK,MAAMC,kBAAkBL,SAAU;QACrC,IAAIG,qBAAqBG,GAAG,CAACD,eAAeE,UAAU,GAAG;YACvDN,KAAKL,QAAQ,CAACY,IAAI,CAAC;gBACjBC,MAAM;gBACNC,SAAS,CAAC,4CAA4C,EAAEL,eAAeE,UAAU,CAAC,EAAE,CAAC;gBACrFI,MAAMN,eAAeE,UAAU;YACjC;YACA;QACF;QAEAJ,qBAAqBS,GAAG,CAACP,eAAeE,UAAU,EAAEF;IACtD;IAEA,MAAMQ,qBAAqB,IAAIC,IAAIf,QAAQgB,KAAK,CAACC,GAAG,CAAC,CAACC,OAASA,KAAKN,IAAI;IAExE,KAAK,MAAMO,eAAenB,QAAQgB,KAAK,CAAE;QACvC,MAAMI,UAAUhB,qBAAqBiB,GAAG,CAACF,YAAYP,IAAI;QAEzD,IAAI,CAACQ,SAAS;YACZlB,KAAKV,MAAM,CAACiB,IAAI,CAAC;gBACfT,SAASmB;gBACTG,QAAQ;gBACRd,YAAYW,YAAYP,IAAI;YAC9B;YACA;QACF;QAEA,IAAIQ,QAAQG,UAAU,KAAKJ,YAAYK,MAAM,EAAE;YAC7CtB,KAAKP,SAAS,CAACc,IAAI,CAAC;gBAClBW;gBACApB,SAASmB;gBACTG,QAAQ;gBACRd,YAAYW,YAAYP,IAAI;YAC9B;YACA;QACF;QAEAV,KAAKN,MAAM,CAACa,IAAI,CAAC;YACfW;YACApB,SAASmB;YACTG,QAAQ;YACRd,YAAYW,YAAYP,IAAI;QAC9B;IACF;IAEA,KAAK,MAAMQ,WAAWhB,qBAAqBqB,MAAM,GAAI;QACnD,IAAIX,mBAAmBP,GAAG,CAACa,QAAQZ,UAAU,GAAG;YAC9C;QACF;QAEA,MAAMkB,SAAS;YACbN;YACAE,QAAQ;YACRd,YAAYY,QAAQZ,UAAU;QAChC;QAEA,IAAIL,4BAA4B,WAAW;YACzCD,KAAKX,OAAO,CAACkB,IAAI,CAACiB;QACpB,OAAO,IAAIvB,4BAA4B,UAAU;YAC/CD,KAAKT,MAAM,CAACgB,IAAI,CAACiB;QACnB,OAAO,IAAIvB,4BAA4B,SAAS;YAC9CD,KAAKR,KAAK,CAACe,IAAI,CAACiB;QAClB;IACF;IAEA,OAAOxB;AACT,EAAC"}