ts-ag 0.0.1-dev.1 → 0.0.1-dev.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser.d.ts +1 -0
- package/dist/browser.d.ts.map +1 -1
- package/dist/browser.js +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -0
- package/dist/lambda/client-types.d.ts +49 -35
- package/dist/lambda/client-types.d.ts.map +1 -1
- package/dist/lambda/client-types.js +3 -1
- package/dist/lambda/client.d.ts +4 -15
- package/dist/lambda/client.d.ts.map +1 -1
- package/dist/lambda/client.js +10 -6
- package/dist/lambda/errors.d.ts +46 -0
- package/dist/lambda/errors.d.ts.map +1 -0
- package/dist/lambda/errors.js +38 -0
- package/dist/lambda/handlerUtils.d.ts +18 -1
- package/dist/lambda/handlerUtils.d.ts.map +1 -1
- package/dist/lambda/index.d.ts +2 -0
- package/dist/lambda/index.d.ts.map +1 -1
- package/dist/lambda/index.js +2 -0
- package/dist/lambda/response.d.ts +127 -0
- package/dist/lambda/response.d.ts.map +1 -0
- package/dist/lambda/response.js +76 -0
- package/dist/rehype/browser.d.ts +2 -0
- package/dist/rehype/browser.d.ts.map +1 -0
- package/dist/rehype/browser.js +1 -0
- package/dist/rehype/flat-toc.d.ts +20 -0
- package/dist/rehype/flat-toc.d.ts.map +1 -0
- package/dist/rehype/flat-toc.js +58 -0
- package/dist/rehype/index.d.ts +2 -0
- package/dist/rehype/index.d.ts.map +1 -0
- package/dist/rehype/index.js +1 -0
- package/dist/scripts/clean.d.ts +3 -0
- package/dist/scripts/clean.d.ts.map +1 -0
- package/dist/scripts/clean.js +64 -0
- package/dist/ts-alias.js +0 -0
- package/dist/utils/fs.d.ts +9 -0
- package/dist/utils/fs.d.ts.map +1 -0
- package/dist/utils/fs.js +38 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +1 -0
- package/package.json +12 -2
- package/src/browser.ts +1 -0
- package/src/index.ts +2 -0
- package/src/lambda/client-types.ts +74 -32
- package/src/lambda/client.ts +18 -15
- package/src/lambda/errors.ts +58 -0
- package/src/lambda/handlerUtils.ts +34 -11
- package/src/lambda/index.ts +2 -0
- package/src/lambda/response.ts +81 -0
- package/src/rehype/browser.ts +1 -0
- package/src/rehype/flat-toc.ts +78 -0
- package/src/rehype/index.ts +1 -0
- package/src/scripts/clean.ts +75 -0
- package/src/utils/fs.ts +42 -0
- package/src/utils/index.ts +1 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import type { Root, RootContent, Element } from 'hast';
|
|
2
|
+
import type { Plugin } from 'unified';
|
|
3
|
+
import type { VFile } from 'vfile';
|
|
4
|
+
import { unified } from 'unified';
|
|
5
|
+
import rehypeParse from 'rehype-parse';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* This rehype plugin extracts the headings from the markdown elements but also the raw elements.
|
|
9
|
+
* So we get html headings in the TOC as well
|
|
10
|
+
*
|
|
11
|
+
* It sets the file.data.fm.toc to a flat map of the toc
|
|
12
|
+
*/
|
|
13
|
+
export const extractToc: Plugin<[], Root> = () => {
|
|
14
|
+
return (tree: Root, file: VFile) => {
|
|
15
|
+
const details = tree.children.flatMap(extractDetails);
|
|
16
|
+
if (file.data.fm === undefined) file.data.fm = {};
|
|
17
|
+
// @ts-expect-error its untyped but for svmdex it is there
|
|
18
|
+
file.data.fm.toc = details;
|
|
19
|
+
};
|
|
20
|
+
};
|
|
21
|
+
export type Toc = TocEntry[];
|
|
22
|
+
export type TocEntry = {
|
|
23
|
+
level: number;
|
|
24
|
+
id: string;
|
|
25
|
+
value: string;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
function extractDetails(
|
|
29
|
+
content:
|
|
30
|
+
| RootContent
|
|
31
|
+
| {
|
|
32
|
+
type: 'raw';
|
|
33
|
+
value: string;
|
|
34
|
+
}
|
|
35
|
+
): TocEntry[] {
|
|
36
|
+
if (content.type === 'element' && content.tagName.startsWith('h') && 'id' in content.properties) {
|
|
37
|
+
const value =
|
|
38
|
+
content.children.length === 1 && content.children[0].type === 'text'
|
|
39
|
+
? content.children[0].value
|
|
40
|
+
: content.properties.id;
|
|
41
|
+
return [
|
|
42
|
+
{
|
|
43
|
+
level: parseInt(content.tagName.slice(1)),
|
|
44
|
+
id: content.properties.id,
|
|
45
|
+
value
|
|
46
|
+
}
|
|
47
|
+
];
|
|
48
|
+
} else if (content.type === 'raw') {
|
|
49
|
+
const parsed = parseRaw(content.value);
|
|
50
|
+
return parsed.flatMap(extractDetails);
|
|
51
|
+
}
|
|
52
|
+
return [];
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Parses raw HTML and returns a flat array of all heading (h1-h6) elements as HAST nodes.
|
|
57
|
+
*/
|
|
58
|
+
export function parseRaw(raw: string): Element[] {
|
|
59
|
+
// Parse the HTML string into a HAST Root node
|
|
60
|
+
const tree = unified()
|
|
61
|
+
.use(rehypeParse, { fragment: true }) // allow parsing HTML fragments
|
|
62
|
+
.parse(raw) as Root;
|
|
63
|
+
|
|
64
|
+
// Helper function to recursively find heading elements
|
|
65
|
+
function collectHeadings(node: RootContent): Element[] {
|
|
66
|
+
if (node.type === 'element' && /^h[1-6]$/.test(node.tagName)) {
|
|
67
|
+
return [node];
|
|
68
|
+
}
|
|
69
|
+
// Check children recursively
|
|
70
|
+
if ('children' in node && Array.isArray(node.children)) {
|
|
71
|
+
return node.children.flatMap(collectHeadings);
|
|
72
|
+
}
|
|
73
|
+
return [];
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Flatten all headings found in the tree
|
|
77
|
+
return tree.children.flatMap(collectHeadings);
|
|
78
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './browser.js'
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import * as fs from 'fs';
|
|
4
|
+
import * as path from 'path';
|
|
5
|
+
|
|
6
|
+
interface Args {
|
|
7
|
+
dirExcludes: string[];
|
|
8
|
+
dirIncludes: string[];
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function parseArgs(): Args {
|
|
12
|
+
const args = process.argv.slice(2);
|
|
13
|
+
const dirExcludes: string[] = [];
|
|
14
|
+
const dirIncludes: string[] = [];
|
|
15
|
+
for (let i = 0; i < args.length; i++) {
|
|
16
|
+
if (args[i] === '--dir-excludes' && args[i + 1]) {
|
|
17
|
+
dirExcludes.push(args[i + 1]);
|
|
18
|
+
i++;
|
|
19
|
+
} else if (args[i] === '--dir-includes' && args[i + 1]) {
|
|
20
|
+
dirIncludes.push(args[i + 1]);
|
|
21
|
+
i++;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
return { dirExcludes, dirIncludes };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function shouldExclude(dir: string, excludes: string[]): boolean {
|
|
28
|
+
return excludes.some((ex) => path.basename(dir) === ex);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function shouldInclude(dir: string, includes: string[]): boolean {
|
|
32
|
+
if (includes.length === 0) return true;
|
|
33
|
+
return includes.some((inc) => dir.includes(inc));
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function removeDir(targetPath: string) {
|
|
37
|
+
if (fs.existsSync(targetPath)) {
|
|
38
|
+
fs.rmSync(targetPath, { recursive: true, force: true });
|
|
39
|
+
console.log(`Removed: ${targetPath}`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function clean(root: string, args: Args) {
|
|
44
|
+
const entries = fs.readdirSync(root, { withFileTypes: true });
|
|
45
|
+
for (const entry of entries) {
|
|
46
|
+
const fullPath = path.join(root, entry.name);
|
|
47
|
+
if (entry.isDirectory()) {
|
|
48
|
+
if (entry.name === 'node_modules') continue;
|
|
49
|
+
if (entry.name === 'dist') {
|
|
50
|
+
const distParentDir = path.dirname(fullPath);
|
|
51
|
+
const siblingDirs = fs
|
|
52
|
+
.readdirSync(distParentDir, { withFileTypes: true })
|
|
53
|
+
.filter((d) => d.isDirectory())
|
|
54
|
+
.map((d) => d.name);
|
|
55
|
+
|
|
56
|
+
const shouldExcludeSibling = args.dirExcludes.some((ex) => siblingDirs.includes(ex));
|
|
57
|
+
const shouldIncludeSibling =
|
|
58
|
+
args.dirIncludes.length === 0 || args.dirIncludes.some((inc) => siblingDirs.includes(inc));
|
|
59
|
+
|
|
60
|
+
if (!shouldExcludeSibling && shouldIncludeSibling) {
|
|
61
|
+
removeDir(fullPath);
|
|
62
|
+
}
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
if (!shouldExclude(fullPath, args.dirExcludes)) {
|
|
66
|
+
clean(fullPath, args);
|
|
67
|
+
}
|
|
68
|
+
} else if (entry.name === 'tsconfig.tsbuildinfo') {
|
|
69
|
+
removeDir(fullPath);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const args = parseArgs();
|
|
75
|
+
clean(process.cwd(), args);
|
package/src/utils/fs.ts
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { lstat, mkdir, readFile, writeFile } from 'fs/promises';
|
|
2
|
+
import { dirname } from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* @returns true if a filepath exists
|
|
7
|
+
*/
|
|
8
|
+
export async function exists(filePath: string): Promise<boolean> {
|
|
9
|
+
try {
|
|
10
|
+
await lstat(filePath);
|
|
11
|
+
return true;
|
|
12
|
+
} catch {
|
|
13
|
+
return false;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Writes data to a filepath if it is different
|
|
19
|
+
*/
|
|
20
|
+
export async function writeIfDifferent(filePath: string, newData: string) {
|
|
21
|
+
// Ensure the directory exists
|
|
22
|
+
const directory = dirname(filePath);
|
|
23
|
+
if (!(await exists(directory))) {
|
|
24
|
+
await mkdir(directory, { recursive: true });
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Check if the file exists
|
|
28
|
+
if (await exists(filePath)) {
|
|
29
|
+
// Read the existing file content
|
|
30
|
+
const existingData = await readFile(filePath, 'utf8');
|
|
31
|
+
|
|
32
|
+
// Compare the existing data with the new data
|
|
33
|
+
if (existingData === newData) {
|
|
34
|
+
// console.log('File contents are identical. No write needed.');
|
|
35
|
+
return;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Write the new data if it's different or the file doesn't exist
|
|
40
|
+
await writeFile(filePath, newData, 'utf8');
|
|
41
|
+
console.log(chalk.green('Writing to'), filePath);
|
|
42
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './fs.js'
|