@p-buddy/parkdown 0.0.2 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/api/utils.ts DELETED
@@ -1,161 +0,0 @@
1
- import type { CreateParser, MethodDefinitions, ParsingCandidates, TypeRecord } from "./types";
2
-
3
- const supportedTypes = ["string", "number", "boolean"] as const satisfies (keyof TypeRecord)[];
4
-
5
- const isSupportedType = (type: string): type is keyof TypeRecord => supportedTypes.includes(type as keyof TypeRecord);
6
-
7
- // This regex is used to parse function-like invocations such as "code(ts,some-meta)"
8
- // Breaking down the regex pattern:
9
- // ^ - Asserts position at the start of the string
10
- // ([a-zA-Z0-9_-]+) - Capture group 1: The function name
11
- // [a-zA-Z0-9_-]+ - One or more alphanumeric characters, underscores, or hyphens
12
- // (?:\(([^)]*)\))? - Optional non-capturing group for the parameters section
13
- // \( - Literal opening parenthesis
14
- // ([^)]*) - Capture group 2: Zero or more characters that are not closing parentheses
15
- // \) - Literal closing parenthesis
16
- // ? - Makes the entire parameter section optional
17
- // $ - Asserts position at the end of the string
18
- const INVOCATION_REGEX = /^([a-zA-Z0-9_-]+)(?:\(([^)]*)\))?$/;
19
-
20
- export const parseInvocation = (input: string): { name: string, parameters: (string | undefined)[] } => {
21
- const match = input.match(INVOCATION_REGEX);
22
- if (!match) throw new Error(`Invalid invocation: ${input}`);
23
-
24
- const [, name, rawParams = ""] = match;
25
-
26
- if (!rawParams.trim()) return { name, parameters: [] };
27
-
28
- const parameters = splitOnUnquotedComma(rawParams);
29
- return { name, parameters };
30
- }
31
-
32
- const restIsUndefined = <T>(arr: T[], index: number) => {
33
- for (let i = index + 1; i < arr.length; i++)
34
- if (arr[i] !== undefined) return false;
35
- return true;
36
- }
37
-
38
- export const splitOnUnquotedComma = (input: string): (string | undefined)[] => {
39
- const result: (string | undefined)[] = [];
40
-
41
- let current = "";
42
- let inSingleQuotes = false;
43
-
44
- for (let i = 0; i < input.length; i++) {
45
- const ch = input[i];
46
- const isSingleQuote = ch === "'";
47
-
48
- if (isSingleQuote && input.at(i + 1) === "'") {
49
- const triple = input.at(i + 2) === "'";
50
- inSingleQuotes = !inSingleQuotes;
51
- current += triple ? "'''" : "''";
52
- i += triple ? 2 : 1;
53
- } else if (isSingleQuote) {
54
- inSingleQuotes = !inSingleQuotes;
55
- current += ch;
56
- } else if (ch === "," && !inSingleQuotes) {
57
- result.push(current.trim());
58
- current = "";
59
- } else current += ch;
60
- }
61
-
62
- result.push(current.trim());
63
-
64
- return result
65
- .map(item => item === "" ? undefined : item ? stripSurroundingSingleQuotes(item) : undefined)
66
- .filter((item, index, arr) => item !== undefined || !restIsUndefined(arr, index));
67
- }
68
-
69
- const stripSurroundingSingleQuotes = (input: string): string => {
70
- const isWrapped = input.length >= 2 && input[0] === "'" && input.at(-1) === "'";
71
- const isDoubleWrapped = isWrapped && (input.length >= 4 && input[1] === "'" && input.at(-2) === "'");
72
- return !isWrapped || isDoubleWrapped ? input : input.slice(1, -1);
73
- };
74
-
75
- type ParsedParameterDefinition = { name: string; optional: boolean; type: keyof TypeRecord };
76
- type ParsedMethodDefinition = { name: string; parameters?: ParsedParameterDefinition[] };
77
-
78
- export const parseDefinition = <T extends string>(definition: T): ParsedMethodDefinition => {
79
- // Match a method definition pattern:
80
- // ^ - start of string
81
- // (\w+) - capture group 1: one or more word characters (method name)
82
- // (?:\(([^)]*)\))? - optional non-capturing group:
83
- // \( - literal opening parenthesis
84
- // ([^)]*) - capture group 2: zero or more characters that are not closing parenthesis (parameter list)
85
- // \) - literal closing parenthesis
86
- const METHOD_REGEX = /^(\w+)(?:\(([^)]*)\))?/;
87
-
88
- const methodMatch = definition.match(METHOD_REGEX);
89
- if (!methodMatch) return { name: definition };
90
-
91
- const [, methodName, paramString] = methodMatch;
92
-
93
- if (!paramString) return { name: methodName };
94
-
95
- // (\w+) - Capture group 1: One or more word characters (parameter name)
96
- // (\?)? - Capture group 2: Optional question mark (indicates optional parameter)
97
- // : - Literal colon character
98
- // \s* - Zero or more whitespace characters
99
- // ([^,]+) - Capture group 3: One or more characters that are not commas (parameter type)
100
- // /g - Global flag: Find all matches in the string, not just the first one
101
- const PARAM_REGEX = /(\w+)(\?)?:\s*([^,]+)/g;
102
-
103
- const parameters: ParsedParameterDefinition[] = [];
104
-
105
- let match: RegExpExecArray | null;
106
- while ((match = PARAM_REGEX.exec(paramString)) !== null) {
107
- const [, name, optSymbol, paramType] = match;
108
- const type = paramType.trim();
109
- if (!isSupportedType(type)) throw new Error(`Unsupported type: ${type}`);
110
- parameters.push({ name, optional: optSymbol === "?", type });
111
- }
112
-
113
- return { name: methodName, parameters };
114
- }
115
-
116
- export const createParser: CreateParser = <T extends MethodDefinitions>(definitions: T) => {
117
- const parsedDefinitions = definitions
118
- .map(parseDefinition)
119
- .reduce(
120
- (map, { name, parameters }) => map.set(name, parameters),
121
- new Map<string, ParsedParameterDefinition[] | undefined>()
122
- );
123
- return (query: string) => {
124
- const { name, parameters } = parseInvocation(query);
125
- if (!parsedDefinitions.has(name))
126
- throw new Error(`Unknown method: ${name}`);
127
-
128
- const parameterDefinitions = parsedDefinitions.get(name);
129
- if (parameterDefinitions === undefined)
130
- return { name } satisfies ParsingCandidates<[string]> as ParsingCandidates<T>;
131
-
132
- if (parameters.length > parameterDefinitions.length) {
133
- const requiredCount = parameterDefinitions.filter(({ optional }) => !optional).length;
134
- const expected = requiredCount === parameterDefinitions.length
135
- ? requiredCount.toString()
136
- : `${requiredCount} - ${parameterDefinitions.length}`;
137
- throw new Error(`Too many parameters: ${parameters.length} for method '${name}' (expected: ${expected})`);
138
- }
139
-
140
- return parameterDefinitions.reduce((acc, { name: param, optional, type }, index) => {
141
- const value = parameters[index];
142
-
143
- if (value === undefined)
144
- if (optional) return acc;
145
- else throw new Error(`Missing required parameter: ${param} for method '${name}'`);
146
-
147
- switch (type) {
148
- case "string":
149
- acc[param] = value;
150
- break;
151
- case "number":
152
- case "boolean":
153
- acc[param] = JSON.parse(value);
154
- break;
155
- }
156
- if (typeof acc[param] !== type)
157
- throw new Error(`Invalid type: ${param} must be ${type}, got ${typeof acc[param]} for method '${name}'`);
158
- return acc;
159
- }, { name } as ParsingCandidates<T>);
160
- }
161
- }
package/src/cli.ts DELETED
@@ -1,31 +0,0 @@
1
- #!/usr/bin/env node
2
- import { Command } from '@commander-js/extra-typings';
3
- import { version } from '../package.json';
4
- import { populateMarkdownInclusions, depopulateMarkdownInclusions } from '.';
5
-
6
- const program = new Command()
7
- .version(version)
8
- .option('--nw, --no-write', 'Do NOT write result to file (defaults to false)', false as boolean)
9
- .option('--ni, --no-inclusions', 'Do NOT process file inclusions (defaults to false)', false as boolean)
10
- .option('-d, --depopulate', 'Remove populated inclusions from the file', false as boolean)
11
- .option('-f, --file <flag>', 'The file(s) to process', (value, arr) => (arr.push(value), arr), new Array<string>())
12
- .option('-r, --remap-imports', 'Remap import specifiers in code blocks from one destination to another')
13
- .parse();
14
-
15
- const { inclusions: noInclusions, depopulate, file, write: noWrite } = program.opts();
16
-
17
-
18
- if (file.length === 0) file.push("README.md");
19
-
20
- /** parkdown: process-order */
21
- const processors = [
22
- [populateMarkdownInclusions, !noInclusions],
23
- [depopulateMarkdownInclusions, depopulate],
24
- ] as const;
25
- /** parkdown: process-order */
26
-
27
- for (const [processor] of processors.filter(([_, condition]) => condition))
28
- for (const _file of file) {
29
- const result = processor(_file, !noWrite);
30
- if (noWrite) console.log(result);
31
- }
@@ -1,369 +0,0 @@
1
- import { describe, expect, test } from "vitest";
2
- import { dedent } from "ts-dedent";
3
- import { extractContent, getAllPositionNodes, nodeSort, parse } from "./utils";
4
- import { PsuedoFilesystem, lorem } from "./utils.test";
5
- import { join } from "node:path";
6
- import {
7
- getReplacementTargets,
8
- isSpecialLink,
9
- applyHeadingDepth,
10
- extendGetRelativePathContent,
11
- recursivelyPopulateInclusions,
12
- nodeDepthFinder,
13
- specialComment,
14
- isSpecialComment,
15
- getTopLevelCommentBlocks,
16
- } from "./include";
17
-
18
- describe(isSpecialLink.name, () => {
19
- const check = (md: string, expectation: boolean) =>
20
- getAllPositionNodes(parse.md(md), "link")
21
- .forEach(node => expect(isSpecialLink(node)).toBe(expectation));
22
-
23
- const cases = {
24
- "non-link": ["test", false],
25
- "link has text": ["[test](http://example.com)", false],
26
- "link has no text, but unsupported target": ["[](file.md)", false],
27
- "web link": ["[](http://example.com)", true],
28
- "relative file, same directory": ["[](./file.md)", true],
29
- "relative file, different directory": ["[](../file.md)", true],
30
- } as const;
31
-
32
- for (const [description, [md, expectation]] of Object.entries(cases))
33
- test(description, () => check(md, expectation));
34
- });
35
-
36
- describe(nodeDepthFinder.name, () => {
37
- test("find depth in lorem", () => {
38
- for (const markdown of lorem.md) {
39
- const headings = markdown.split("\n")
40
- .map((content, index) => ({ content, line: index + 1 }))
41
- .filter(({ content }) => content.startsWith("#"))
42
- .map(heading => ({ ...heading, depth: heading.content.match(/^#+/)?.[0].length }));
43
- const ast = parse.md(markdown);
44
- const findDepth = nodeDepthFinder(ast);
45
- let index = headings.length - 1;
46
- for (const node of getAllPositionNodes(ast).sort(nodeSort.reverse)) {
47
- if (node.position.start.line < headings[index].line) index--;
48
- const depth = findDepth(node);
49
- expect(depth).toBe(headings[index].depth,);
50
- }
51
- }
52
- });
53
- });
54
-
55
- describe('applyHeadingDepth', () => {
56
- test('should increase heading levels by the specified depth', () => {
57
- const markdown = "# Heading 1\n\n## Heading 2\n\n### Heading 3";
58
- const result = applyHeadingDepth(markdown, 1);
59
- expect(result).toBe("## Heading 1\n\n### Heading 2\n\n#### Heading 3");
60
- });
61
-
62
- test('should decrease heading levels by the specified depth', () => {
63
- const markdown = "### Heading 3\n\n## Heading 2\n\n# Heading 1";
64
- const result = applyHeadingDepth(markdown, -1);
65
- expect(result).toBe("## Heading 3\n\n# Heading 2\n\n# Heading 1");
66
- });
67
-
68
- test('should cap heading levels at 6', () => {
69
- const markdown = "#### Heading 4\n\n##### Heading 5\n\n###### Heading 6";
70
- const result = applyHeadingDepth(markdown, 2);
71
- expect(result).toBe("###### Heading 4\n\n###### Heading 5\n\n###### Heading 6");
72
- });
73
-
74
- test('should not modify non-heading content', () => {
75
- const markdown = "# Heading 1\n\nSome regular text\n\n## Heading 2\n\n- List item 1\n- List item 2";
76
- const result = applyHeadingDepth(markdown, 1);
77
- expect(result).toBe("## Heading 1\n\nSome regular text\n\n### Heading 2\n\n- List item 1\n- List item 2");
78
- });
79
-
80
- test('should handle headings with different formatting', () => {
81
- const markdown = "# *Italic Heading*\n\n## **Bold Heading**\n\n### `Code Heading`";
82
- const result = applyHeadingDepth(markdown, 1);
83
- expect(result).toBe("## *Italic Heading*\n\n### **Bold Heading**\n\n#### `Code Heading`");
84
- });
85
-
86
- test('should handle headings with special characters', () => {
87
- const markdown = "# Heading with & special < characters >";
88
- const result = applyHeadingDepth(markdown, 2);
89
- expect(result).toBe("### Heading with & special < characters >");
90
- });
91
-
92
- test('should accept an existing AST as input', () => {
93
- const markdown = "# Heading 1\n\n## Heading 2";
94
- const ast = parse.md(markdown);
95
- const result = applyHeadingDepth(markdown, 2, ast);
96
- expect(result).toBe("### Heading 1\n\n#### Heading 2");
97
- });
98
- });
99
-
100
- describe(getTopLevelCommentBlocks.name, () => {
101
- test("problematic case", () => {
102
- const content = dedent`
103
- # Main heading
104
-
105
- [](./child/README.md)
106
- <!-- p▼ Begin -->
107
- ## Child heading
108
-
109
- [](./grandchild/README.md)
110
- <!-- p▼ Begin -->
111
- ### Grandchild heading
112
-
113
- Hello!
114
- <!-- p▼ End -->
115
- <!-- p▼ End -->
116
-
117
- End
118
- `;
119
- const ast = parse.md(content);
120
- const openingComments = getAllPositionNodes(ast, "html").filter(isSpecialComment("begin"));
121
- const closingComments = getAllPositionNodes(ast, "html").filter(isSpecialComment("end"));
122
- expect(openingComments.length).toBe(2);
123
- expect(closingComments.length).toBe(2);
124
- const blocks = getTopLevelCommentBlocks(openingComments, closingComments);
125
- expect(blocks.length).toBe(1);
126
- expect(blocks[0].open).toBe(openingComments[0]);
127
- expect(blocks[0].close).toBe(closingComments[1]);
128
- })
129
- });
130
-
131
- describe(extendGetRelativePathContent.name, () => {
132
- test('should call original function with resolved path', () => {
133
- const filesystem = new PsuedoFilesystem({
134
- root: { base: "", child: { a: "", b: "", nested: { a: "" } } }
135
- }, { setContentToPath: true });
136
-
137
- const fromRoot = (path: string) => filesystem.getFileFromAbsolutePath(join("root", path));
138
-
139
- expect(fromRoot("./base")).toBe("root/base");
140
- expect(fromRoot("./child/a")).toBe("root/child/a");
141
- expect(fromRoot("./child/b")).toBe("root/child/b");
142
- expect(fromRoot("./child/nested/a")).toBe("root/child/nested/a");
143
-
144
- const extended = extendGetRelativePathContent(fromRoot, { url: "./child/a" });
145
- expect(extended("../base")).toBe("root/base");
146
- expect(extended("./a")).toBe("root/child/a");
147
- expect(extended("./b")).toBe("root/child/b");
148
- expect(extended("./nested/a")).toBe("root/child/nested/a");
149
- });
150
- });
151
-
152
-
153
-
154
- describe(recursivelyPopulateInclusions.name, () => {
155
- test("basic unpopulated", () => {
156
- const filesystem = new PsuedoFilesystem({
157
- "README.md": dedent`
158
- # Main heading
159
-
160
- [](./child/README.md)
161
-
162
- End parent
163
- `,
164
- child: {
165
- "README.md": dedent`
166
- # Child heading
167
-
168
- End child
169
- `,
170
- }
171
- });
172
-
173
- const fromRoot = (path: string) => filesystem.getFileFromAbsolutePath(join("", path));
174
-
175
- const result = recursivelyPopulateInclusions(filesystem.getFileFromAbsolutePath("README.md"), 0, fromRoot);
176
- expect(result).toBe(dedent`
177
- # Main heading
178
-
179
- [](./child/README.md)
180
- ${specialComment.begin}
181
- ## Child heading
182
-
183
- End child
184
- ${specialComment.end}
185
-
186
- End parent
187
- `)
188
- expect(result).toBe(recursivelyPopulateInclusions(result, 0, fromRoot));
189
- });
190
-
191
- test('should apply modifications to all top-level links in a markdown file', () => {
192
-
193
- const filesystem = new PsuedoFilesystem({
194
- "README.md": dedent`
195
- # Main heading
196
-
197
- [](./child/README.md)
198
-
199
- End
200
- `,
201
- child: {
202
- "README.md": dedent`
203
- # Child heading
204
-
205
- [](./grandchild/README.md)
206
- ${specialComment.begin}
207
- THIS SHOULD BE DELETED
208
- ${specialComment.end}
209
- `,
210
- grandchild: {
211
- "README.md": dedent`
212
- # Grandchild heading
213
-
214
- Hello!
215
- `,
216
- }
217
- }
218
- });
219
-
220
- const fromRoot = (path: string) => filesystem.getFileFromAbsolutePath(join("", path));
221
-
222
- const result = recursivelyPopulateInclusions(filesystem.getFileFromAbsolutePath("README.md"), 0, fromRoot);
223
- expect(result).toBe(recursivelyPopulateInclusions(result, 0, fromRoot));
224
- });
225
-
226
- test('with code boundary', () => {
227
- const filesystem = new PsuedoFilesystem({
228
- "README.md": dedent`
229
- # Main heading
230
-
231
- [](./child/file.ts?boundary=boundary)
232
-
233
- End
234
- `,
235
- child: {
236
- "file.ts": dedent`
237
- if (true) {
238
- /* boundary */
239
- const x = 5;
240
- /* boundary */
241
- }
242
- `,
243
- }
244
- });
245
-
246
- const fromRoot = (path: string) => filesystem.getFileFromAbsolutePath(join("", path));
247
-
248
- const result = recursivelyPopulateInclusions(filesystem.getFileFromAbsolutePath("README.md"), 0, fromRoot);
249
- expect(result).toBe(dedent`
250
- # Main heading
251
-
252
- [](./child/file.ts?boundary=boundary)
253
- ${specialComment.begin}
254
- \`\`\`ts
255
- const x = 5;
256
- \`\`\`
257
- ${specialComment.end}
258
-
259
- End`
260
- )
261
- })
262
-
263
- test('with wrapped in dropdown', () => {
264
- const filesystem = new PsuedoFilesystem({
265
- "README.md": dedent`
266
- # Main heading
267
-
268
- [](./child/README.md?tag=dropdown('Open-me,-please!'))
269
- `,
270
- child: {
271
- "README.md": dedent`
272
- Hello!
273
- `,
274
- }
275
- });
276
-
277
- const fromRoot = (path: string) => filesystem.getFileFromAbsolutePath(join("", path));
278
-
279
- const result = recursivelyPopulateInclusions(filesystem.getFileFromAbsolutePath("README.md"), 0, fromRoot);
280
- expect(result).toBe(dedent`
281
- # Main heading
282
-
283
- [](./child/README.md?tag=dropdown('Open-me,-please!'))
284
- ${specialComment.begin}
285
-
286
- <details>
287
- <summary>Open me, please!</summary>
288
-
289
- Hello!
290
- </details>
291
-
292
- ${specialComment.end}`
293
- );
294
- })
295
- });
296
-
297
- describe(getReplacementTargets.name, () => {
298
- test('should return empty array for no special links or comments', () => {
299
- const emptyMarkdown = "# Just a heading\n\nNo special links or comments here.";
300
- const emptyAst = parse.md(emptyMarkdown);
301
- expect(getReplacementTargets(emptyMarkdown, emptyAst)).toEqual([]);
302
- });
303
-
304
- test('should handle single unpopulated special link (no closing comment)', () => {
305
- const singleLinkMarkdown = "# Heading\n\n[](http://example.com)";
306
- const singleLinkAst = parse.md(singleLinkMarkdown);
307
- const singleLinkTargets = getReplacementTargets(singleLinkMarkdown, singleLinkAst);
308
- expect(singleLinkTargets.length).toBe(1);
309
- expect(singleLinkTargets[0].url).toBe("http://example.com");
310
- expect(singleLinkTargets[0].headingDepth).toBe(1);
311
- expect(extractContent(singleLinkMarkdown, singleLinkTargets[0])).toBe("[](http://example.com)");
312
- });
313
-
314
- test('should handle special link with closing comment', () => {
315
- const linkWithCommentMarkdown =
316
- "# Main heading\n\n" +
317
- "## Section\n\n" +
318
- "[](./file.md)\n" +
319
- specialComment.begin + "\n" +
320
- "Some content\n" +
321
- specialComment.end;
322
- const linkWithCommentAst = parse.md(linkWithCommentMarkdown);
323
- const linkWithCommentTargets = getReplacementTargets(linkWithCommentMarkdown, linkWithCommentAst);
324
- expect(linkWithCommentTargets.length).toBe(1);
325
- expect(linkWithCommentTargets[0].url).toBe("./file.md");
326
- expect(linkWithCommentTargets[0].headingDepth).toBe(2);
327
- expect(
328
- extractContent(linkWithCommentMarkdown, linkWithCommentTargets[0])
329
- ).toBe(`[](./file.md)\n${specialComment.begin}\nSome content\n${specialComment.end}`);
330
- });
331
-
332
- test('should handle multiple links and comments', () => {
333
- const complexMarkdown =
334
- "# Main heading\n\n" +
335
- "## First section\n\n" +
336
- "[](./first.md)\n" +
337
- specialComment.begin + "\n" +
338
- "Some content\n" +
339
- specialComment.end + "\n\n" +
340
- "## Second section\n\n" +
341
- "[](./second.md)\n" +
342
- specialComment.begin + "\n" +
343
- "More content\n" +
344
- specialComment.end + "\n\n" +
345
- "## Third section\n\n" +
346
- "[](http://example.com)";
347
- const complexAst = parse.md(complexMarkdown);
348
- const complexTargets = getReplacementTargets(complexMarkdown, complexAst);
349
- expect(complexTargets.length).toBe(3);
350
-
351
- expect(complexTargets[0].url).toBe("./first.md");
352
- expect(complexTargets[0].headingDepth).toBe(2);
353
- expect(
354
- extractContent(complexMarkdown, complexTargets[0])
355
- ).toBe(`[](./first.md)\n${specialComment.begin}\nSome content\n${specialComment.end}`);
356
-
357
- expect(complexTargets[1].url).toBe("./second.md");
358
- expect(complexTargets[1].headingDepth).toBe(2);
359
- expect(
360
- extractContent(complexMarkdown, complexTargets[1])
361
- ).toBe(`[](./second.md)\n${specialComment.begin}\nMore content\n${specialComment.end}`);
362
-
363
- expect(complexTargets[2].url).toBe("http://example.com");
364
- expect(complexTargets[2].headingDepth).toBe(2);
365
- expect(
366
- extractContent(complexMarkdown, complexTargets[2])
367
- ).toBe("[](http://example.com)");
368
- });
369
- });