@byteatatime/mdstream 0.0.1 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +18 -1
- package/src/index.ts +47 -9
- package/test/finalization.test.ts +0 -115
- package/test/index.test.ts +0 -48
- package/test/plugins.test.ts +0 -46
- package/test/setext.test.ts +0 -103
- package/tsconfig.json +0 -29
package/package.json
CHANGED
|
@@ -1,8 +1,25 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@byteatatime/mdstream",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.3",
|
|
4
4
|
"module": "src/index.ts",
|
|
5
|
+
"types": "src/index.ts",
|
|
5
6
|
"type": "module",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"types": "./src/index.ts",
|
|
10
|
+
"import": "./src/index.ts"
|
|
11
|
+
}
|
|
12
|
+
},
|
|
13
|
+
"files": [
|
|
14
|
+
"src",
|
|
15
|
+
"package.json",
|
|
16
|
+
"README.md"
|
|
17
|
+
],
|
|
18
|
+
"author": "ByteAtATime",
|
|
19
|
+
"repository": {
|
|
20
|
+
"type": "git",
|
|
21
|
+
"url": "https://github.com/ByteAtATime/mdstream"
|
|
22
|
+
},
|
|
6
23
|
"devDependencies": {
|
|
7
24
|
"@types/bun": "latest",
|
|
8
25
|
"@types/mdast": "^4.0.4"
|
package/src/index.ts
CHANGED
|
@@ -3,6 +3,28 @@ import remarkParse from "remark-parse";
|
|
|
3
3
|
import type { Node, Root } from "mdast";
|
|
4
4
|
import type { PluggableList } from "unified";
|
|
5
5
|
|
|
6
|
+
/**
|
|
7
|
+
* A function that fixes up incomplete markdown syntax so it renders correctly.
|
|
8
|
+
* Applied only to the pending (active) block — finalized blocks are never mended.
|
|
9
|
+
*
|
|
10
|
+
* The `remend` package is the recommended choice:
|
|
11
|
+
*
|
|
12
|
+
* @example
|
|
13
|
+
* ```ts
|
|
14
|
+
* import remend from "remend";
|
|
15
|
+
* const parser = createStreamingParser({ mend: remend });
|
|
16
|
+
* ```
|
|
17
|
+
*
|
|
18
|
+
* @example With options:
|
|
19
|
+
* ```ts
|
|
20
|
+
* import remend from "remend";
|
|
21
|
+
* const parser = createStreamingParser({
|
|
22
|
+
* mend: (source) => remend(source, { links: false }),
|
|
23
|
+
* });
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
type MendFunction = (source: string) => string;
|
|
27
|
+
|
|
6
28
|
type Block = {
|
|
7
29
|
readonly ast: Node;
|
|
8
30
|
readonly start: number;
|
|
@@ -18,10 +40,21 @@ type ParserState = {
|
|
|
18
40
|
|
|
19
41
|
type ParserOptions = {
|
|
20
42
|
plugins?: PluggableList;
|
|
43
|
+
/**
|
|
44
|
+
* A mending function that fixes incomplete markdown syntax in the active block.
|
|
45
|
+
* Only applied to the pending block — finalized blocks are never touched.
|
|
46
|
+
*
|
|
47
|
+
* The `remend` package is the recommended mending function:
|
|
48
|
+
* ```ts
|
|
49
|
+
* import remend from "remend";
|
|
50
|
+
* const parser = createStreamingParser({ mend: remend });
|
|
51
|
+
* ```
|
|
52
|
+
*/
|
|
53
|
+
mend?: MendFunction;
|
|
21
54
|
};
|
|
22
55
|
|
|
23
56
|
const createStreamingParser = (options: ParserOptions = {}) => {
|
|
24
|
-
const { plugins = [] } = options;
|
|
57
|
+
const { plugins = [], mend } = options;
|
|
25
58
|
const state: ParserState = {
|
|
26
59
|
buffer: "",
|
|
27
60
|
finalizedBlocks: [],
|
|
@@ -60,10 +93,12 @@ const createStreamingParser = (options: ParserOptions = {}) => {
|
|
|
60
93
|
const secondToLastPosition = secondToLastNode?.position;
|
|
61
94
|
const lastPosition = lastNode?.position;
|
|
62
95
|
|
|
63
|
-
if (
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
96
|
+
if (
|
|
97
|
+
secondToLastNode?.type === "paragraph" &&
|
|
98
|
+
secondToLastPosition &&
|
|
99
|
+
lastPosition &&
|
|
100
|
+
secondToLastPosition.end.offset === lastPosition.start.offset
|
|
101
|
+
) {
|
|
67
102
|
finalizableNodes = finalizableNodes.slice(0, -1);
|
|
68
103
|
}
|
|
69
104
|
}
|
|
@@ -101,7 +136,9 @@ const createStreamingParser = (options: ParserOptions = {}) => {
|
|
|
101
136
|
const getPendingBlock = (): Block | undefined => {
|
|
102
137
|
if (!state.buffer) return undefined;
|
|
103
138
|
|
|
104
|
-
const
|
|
139
|
+
const mendedBuffer = mend ? mend(state.buffer) : state.buffer;
|
|
140
|
+
|
|
141
|
+
const ast = processor.parse(mendedBuffer) as Root;
|
|
105
142
|
const tree = processor.runSync(ast) as Root;
|
|
106
143
|
const children = tree.children;
|
|
107
144
|
|
|
@@ -113,13 +150,14 @@ const createStreamingParser = (options: ParserOptions = {}) => {
|
|
|
113
150
|
if (!firstNode || !position) return undefined;
|
|
114
151
|
|
|
115
152
|
const start = state.totalOffset + position.start.offset!;
|
|
116
|
-
|
|
153
|
+
// end offset is relative to the original (unmended) buffer length
|
|
154
|
+
const end = state.totalOffset + state.buffer.length;
|
|
117
155
|
|
|
118
156
|
return {
|
|
119
157
|
ast: firstNode,
|
|
120
158
|
start,
|
|
121
159
|
end,
|
|
122
|
-
source:
|
|
160
|
+
source: mendedBuffer.slice(position.start.offset!, position.end.offset!),
|
|
123
161
|
};
|
|
124
162
|
};
|
|
125
163
|
|
|
@@ -135,4 +173,4 @@ const createStreamingParser = (options: ParserOptions = {}) => {
|
|
|
135
173
|
};
|
|
136
174
|
|
|
137
175
|
export { createStreamingParser };
|
|
138
|
-
export type { Block, ParserOptions };
|
|
176
|
+
export type { Block, ParserOptions, MendFunction };
|
|
@@ -1,115 +0,0 @@
|
|
|
1
|
-
import { test, expect, describe } from 'bun:test';
|
|
2
|
-
import { createStreamingParser } from '../src/index.ts';
|
|
3
|
-
|
|
4
|
-
describe('Block Finalization', () => {
|
|
5
|
-
test('finalizes blocks when 2+ top-level nodes exist', () => {
|
|
6
|
-
const parser = createStreamingParser();
|
|
7
|
-
parser.append('# Hello\n\nThis is a paragraph.\n\n');
|
|
8
|
-
expect(parser.blocks.length).toBe(2);
|
|
9
|
-
expect(parser.blocks.map(b => b.ast.type)).toEqual(['heading', 'paragraph']);
|
|
10
|
-
|
|
11
|
-
const headingRef = parser.blocks[0];
|
|
12
|
-
parser.append('Another paragraph');
|
|
13
|
-
|
|
14
|
-
expect(parser.blocks.length).toBe(3);
|
|
15
|
-
expect(parser.blocks[0]).toBe(headingRef);
|
|
16
|
-
expect(parser.blocks.map(b => b.ast.type)).toEqual(['heading', 'paragraph', 'paragraph']);
|
|
17
|
-
});
|
|
18
|
-
});
|
|
19
|
-
|
|
20
|
-
describe('Block Stability', () => {
|
|
21
|
-
test('finalized blocks maintain stable object references across multiple appends', () => {
|
|
22
|
-
const parser = createStreamingParser();
|
|
23
|
-
|
|
24
|
-
parser.append('# First\n\nPara 1\n\nPara 2\n\n');
|
|
25
|
-
const block0 = parser.blocks[0];
|
|
26
|
-
const block1 = parser.blocks[1];
|
|
27
|
-
|
|
28
|
-
expect(parser.blocks.length).toBe(3);
|
|
29
|
-
|
|
30
|
-
parser.append('Para 3');
|
|
31
|
-
const newBlock2 = parser.blocks[2]!;
|
|
32
|
-
|
|
33
|
-
expect(parser.blocks.length).toBe(4);
|
|
34
|
-
expect(parser.blocks[0]).toBe(block0);
|
|
35
|
-
expect(parser.blocks[1]).toBe(block1);
|
|
36
|
-
expect(newBlock2.ast.type).toBe('paragraph');
|
|
37
|
-
|
|
38
|
-
parser.append('\n\nPara 4');
|
|
39
|
-
expect(parser.blocks.length).toBe(5);
|
|
40
|
-
expect(parser.blocks[0]).toBe(block0);
|
|
41
|
-
expect(parser.blocks[1]).toBe(block1);
|
|
42
|
-
expect(parser.blocks[2]).toBe(newBlock2);
|
|
43
|
-
});
|
|
44
|
-
|
|
45
|
-
test('pending block gets new reference each cycle while finalized blocks stay stable', () => {
|
|
46
|
-
const parser = createStreamingParser();
|
|
47
|
-
|
|
48
|
-
parser.append('# Heading\n\nPara 1');
|
|
49
|
-
const block0 = parser.blocks[0]!;
|
|
50
|
-
const pending1 = parser.blocks[1]!;
|
|
51
|
-
|
|
52
|
-
expect(parser.blocks.length).toBe(2);
|
|
53
|
-
|
|
54
|
-
parser.append(' continues');
|
|
55
|
-
const pending2 = parser.blocks[1]!;
|
|
56
|
-
|
|
57
|
-
expect(parser.blocks.length).toBe(2);
|
|
58
|
-
expect(parser.blocks[0]).toBe(block0);
|
|
59
|
-
expect(pending1).not.toBe(pending2);
|
|
60
|
-
expect(pending1.source).toBe('Para 1');
|
|
61
|
-
expect(pending2.source).toBe('Para 1 continues');
|
|
62
|
-
});
|
|
63
|
-
|
|
64
|
-
test('blocks array can be recreated but finalized block references remain stable', () => {
|
|
65
|
-
const parser = createStreamingParser();
|
|
66
|
-
|
|
67
|
-
parser.append('# First\n\nSecond\n\nThird\n\n');
|
|
68
|
-
const firstRef = parser.blocks[0];
|
|
69
|
-
const secondRef = parser.blocks[1];
|
|
70
|
-
|
|
71
|
-
const firstArray = parser.blocks;
|
|
72
|
-
parser.append('Fourth');
|
|
73
|
-
|
|
74
|
-
const secondArray = parser.blocks;
|
|
75
|
-
|
|
76
|
-
expect(firstArray).not.toBe(secondArray);
|
|
77
|
-
expect(secondArray[0]).toBe(firstRef);
|
|
78
|
-
expect(secondArray[1]).toBe(secondRef);
|
|
79
|
-
expect(secondArray.length).toBe(4);
|
|
80
|
-
});
|
|
81
|
-
|
|
82
|
-
test('all finalized blocks keep references when many blocks accumulate', () => {
|
|
83
|
-
const parser = createStreamingParser();
|
|
84
|
-
const refs: any[] = [];
|
|
85
|
-
|
|
86
|
-
parser.append('# 1\n\n2\n\n3\n\n4\n\n5\n\n6\n\n');
|
|
87
|
-
for (let i = 0; i < 5; i++) {
|
|
88
|
-
refs.push(parser.blocks[i]);
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
parser.append('7\n\n8\n\n9\n\n');
|
|
92
|
-
|
|
93
|
-
for (let i = 0; i < 5; i++) {
|
|
94
|
-
expect(parser.blocks[i]).toBe(refs[i]);
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
expect(parser.blocks.length).toBe(9);
|
|
98
|
-
});
|
|
99
|
-
|
|
100
|
-
test('block stability with different markdown types', () => {
|
|
101
|
-
const parser = createStreamingParser();
|
|
102
|
-
|
|
103
|
-
parser.append('# Heading\n\n- List item\n\n```\ncode\n```\n\nParagraph\n\n');
|
|
104
|
-
const heading = parser.blocks[0];
|
|
105
|
-
const list = parser.blocks[1];
|
|
106
|
-
const code = parser.blocks[2];
|
|
107
|
-
|
|
108
|
-
parser.append('More text');
|
|
109
|
-
|
|
110
|
-
expect(parser.blocks[0]).toBe(heading);
|
|
111
|
-
expect(parser.blocks[1]).toBe(list);
|
|
112
|
-
expect(parser.blocks[2]).toBe(code);
|
|
113
|
-
expect(parser.blocks[3]!.ast.type).toBe('paragraph');
|
|
114
|
-
});
|
|
115
|
-
});
|
package/test/index.test.ts
DELETED
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
import { test, expect, describe } from 'bun:test';
|
|
2
|
-
import { createStreamingParser } from '../src/index.ts';
|
|
3
|
-
|
|
4
|
-
describe('Initialization', () => {
|
|
5
|
-
test('creates empty blocks initially', () => {
|
|
6
|
-
const parser = createStreamingParser();
|
|
7
|
-
expect(parser.blocks).toEqual([]);
|
|
8
|
-
});
|
|
9
|
-
});
|
|
10
|
-
|
|
11
|
-
describe('Basic Parsing', () => {
|
|
12
|
-
test('parses heading', () => {
|
|
13
|
-
const parser = createStreamingParser();
|
|
14
|
-
parser.append('# Hello');
|
|
15
|
-
|
|
16
|
-
const block = parser.blocks[0];
|
|
17
|
-
expect(block?.ast.type).toBe('heading');
|
|
18
|
-
expect(block?.source).toBe('# Hello');
|
|
19
|
-
});
|
|
20
|
-
|
|
21
|
-
test('parses multiple blocks', () => {
|
|
22
|
-
const parser = createStreamingParser();
|
|
23
|
-
parser.append('# Hello\n\nParagraph one\n\nParagraph two');
|
|
24
|
-
|
|
25
|
-
const types = parser.blocks.map((b) => b.ast.type);
|
|
26
|
-
expect(types).toEqual(['heading', 'paragraph', 'paragraph']);
|
|
27
|
-
});
|
|
28
|
-
|
|
29
|
-
test('calculates correct block offsets', () => {
|
|
30
|
-
const parser = createStreamingParser();
|
|
31
|
-
parser.append('# Hello\n\nWorld');
|
|
32
|
-
|
|
33
|
-
const [heading, paragraph] = parser.blocks;
|
|
34
|
-
expect(heading?.start).toBe(0);
|
|
35
|
-
expect(heading?.end).toBe(7);
|
|
36
|
-
expect(paragraph?.start).toBe(9);
|
|
37
|
-
expect(paragraph?.end).toBe(14);
|
|
38
|
-
});
|
|
39
|
-
|
|
40
|
-
test('captures heading depth', () => {
|
|
41
|
-
const parser = createStreamingParser();
|
|
42
|
-
parser.append('# Heading');
|
|
43
|
-
|
|
44
|
-
const heading = parser.blocks[0]?.ast as any;
|
|
45
|
-
expect(heading?.type).toBe('heading');
|
|
46
|
-
expect(heading?.depth).toBe(1);
|
|
47
|
-
});
|
|
48
|
-
});
|
package/test/plugins.test.ts
DELETED
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
import { test, expect, describe } from "bun:test";
|
|
2
|
-
import { createStreamingParser } from "../src/index.ts";
|
|
3
|
-
|
|
4
|
-
describe("Custom Plugin Support", () => {
|
|
5
|
-
test("accepts plugins in options", () => {
|
|
6
|
-
const customPlugin = () => (tree: any) => {
|
|
7
|
-
for (const child of tree.children) {
|
|
8
|
-
(child as any).custom = true;
|
|
9
|
-
}
|
|
10
|
-
};
|
|
11
|
-
|
|
12
|
-
const parser = createStreamingParser({
|
|
13
|
-
plugins: [customPlugin],
|
|
14
|
-
});
|
|
15
|
-
|
|
16
|
-
parser.append("# Hello");
|
|
17
|
-
|
|
18
|
-
const block = parser.blocks[0];
|
|
19
|
-
expect(block?.ast.type).toBe("heading");
|
|
20
|
-
expect((block?.ast as any).custom).toBe(true);
|
|
21
|
-
});
|
|
22
|
-
|
|
23
|
-
test("accepts multiple plugins", () => {
|
|
24
|
-
const plugin1 = () => (tree: any) => {
|
|
25
|
-
for (const child of tree.children) {
|
|
26
|
-
(child as any).plugin1Applied = true;
|
|
27
|
-
}
|
|
28
|
-
};
|
|
29
|
-
|
|
30
|
-
const plugin2 = () => (tree: any) => {
|
|
31
|
-
for (const child of tree.children) {
|
|
32
|
-
(child as any).plugin2Applied = true;
|
|
33
|
-
}
|
|
34
|
-
};
|
|
35
|
-
|
|
36
|
-
const parser = createStreamingParser({
|
|
37
|
-
plugins: [plugin1, plugin2],
|
|
38
|
-
});
|
|
39
|
-
|
|
40
|
-
parser.append("# Hello");
|
|
41
|
-
|
|
42
|
-
const block = parser.blocks[0];
|
|
43
|
-
expect((block?.ast as any).plugin1Applied).toBe(true);
|
|
44
|
-
expect((block?.ast as any).plugin2Applied).toBe(true);
|
|
45
|
-
});
|
|
46
|
-
});
|
package/test/setext.test.ts
DELETED
|
@@ -1,103 +0,0 @@
|
|
|
1
|
-
import { test, expect, describe } from 'bun:test';
|
|
2
|
-
import { createStreamingParser } from '../src/index.ts';
|
|
3
|
-
|
|
4
|
-
describe('Setext Headings', () => {
|
|
5
|
-
test('setext heading with === creates h1', () => {
|
|
6
|
-
const parser = createStreamingParser();
|
|
7
|
-
parser.append('Hello World\n===');
|
|
8
|
-
|
|
9
|
-
const block = parser.blocks[0];
|
|
10
|
-
expect(block?.ast.type).toBe('heading');
|
|
11
|
-
const heading = block?.ast as any;
|
|
12
|
-
expect(heading.depth).toBe(1);
|
|
13
|
-
});
|
|
14
|
-
|
|
15
|
-
test('setext heading with --- creates h2', () => {
|
|
16
|
-
const parser = createStreamingParser();
|
|
17
|
-
parser.append('Hello World\n---');
|
|
18
|
-
|
|
19
|
-
const block = parser.blocks[0];
|
|
20
|
-
expect(block?.ast.type).toBe('heading');
|
|
21
|
-
const heading = block?.ast as any;
|
|
22
|
-
expect(heading.depth).toBe(2);
|
|
23
|
-
});
|
|
24
|
-
|
|
25
|
-
test('setext heading does not prematurely finalize paragraph when underline arrives', () => {
|
|
26
|
-
const parser = createStreamingParser();
|
|
27
|
-
parser.append('Hello World\n');
|
|
28
|
-
|
|
29
|
-
expect(parser.blocks[0]?.ast.type).toBe('paragraph');
|
|
30
|
-
|
|
31
|
-
parser.append('===');
|
|
32
|
-
|
|
33
|
-
const block = parser.blocks[0];
|
|
34
|
-
expect(block?.ast.type).toBe('heading');
|
|
35
|
-
const heading = block?.ast as any;
|
|
36
|
-
expect(heading.depth).toBe(1);
|
|
37
|
-
});
|
|
38
|
-
|
|
39
|
-
test('setext heading with preceding content finalizes correctly', () => {
|
|
40
|
-
const parser = createStreamingParser();
|
|
41
|
-
parser.append('# First\n\nHello World\n');
|
|
42
|
-
|
|
43
|
-
expect(parser.blocks[0]?.ast.type).toBe('heading');
|
|
44
|
-
expect(parser.blocks[1]?.ast.type).toBe('paragraph');
|
|
45
|
-
|
|
46
|
-
parser.append('===\n\nSecond paragraph');
|
|
47
|
-
|
|
48
|
-
expect(parser.blocks.length).toBe(3);
|
|
49
|
-
expect(parser.blocks[0]?.ast.type).toBe('heading');
|
|
50
|
-
expect(parser.blocks[1]?.ast.type).toBe('heading');
|
|
51
|
-
expect(parser.blocks[2]?.ast.type).toBe('paragraph');
|
|
52
|
-
});
|
|
53
|
-
|
|
54
|
-
test('setext underline only affects preceding paragraph', () => {
|
|
55
|
-
const parser = createStreamingParser();
|
|
56
|
-
parser.append('First paragraph\n\nSecond paragraph\n===\n\nThird paragraph');
|
|
57
|
-
|
|
58
|
-
expect(parser.blocks.length).toBe(3);
|
|
59
|
-
expect(parser.blocks[0]?.ast.type).toBe('paragraph');
|
|
60
|
-
expect(parser.blocks[1]?.ast.type).toBe('heading');
|
|
61
|
-
expect(parser.blocks[2]?.ast.type).toBe('paragraph');
|
|
62
|
-
});
|
|
63
|
-
|
|
64
|
-
test('setext underline after blank line is separate paragraph', () => {
|
|
65
|
-
const parser = createStreamingParser();
|
|
66
|
-
parser.append('Paragraph 1\n\n');
|
|
67
|
-
|
|
68
|
-
expect(parser.blocks.length).toBe(1);
|
|
69
|
-
expect(parser.blocks[0]?.ast.type).toBe('paragraph');
|
|
70
|
-
expect(parser.blocks[0]?.source).toBe('Paragraph 1');
|
|
71
|
-
|
|
72
|
-
parser.append('===');
|
|
73
|
-
|
|
74
|
-
expect(parser.blocks.length).toBe(2);
|
|
75
|
-
expect(parser.blocks[0]?.ast.type).toBe('paragraph');
|
|
76
|
-
expect(parser.blocks[0]?.source).toBe('Paragraph 1');
|
|
77
|
-
expect(parser.blocks[1]?.ast.type).toBe('paragraph');
|
|
78
|
-
expect(parser.blocks[1]?.source).toBe('===');
|
|
79
|
-
});
|
|
80
|
-
|
|
81
|
-
test('setext underline after blank line does not cause data loss', () => {
|
|
82
|
-
const parser = createStreamingParser();
|
|
83
|
-
parser.append('First paragraph\n\n');
|
|
84
|
-
parser.append('===');
|
|
85
|
-
|
|
86
|
-
const blocks = parser.blocks;
|
|
87
|
-
expect(blocks.length).toBe(2);
|
|
88
|
-
|
|
89
|
-
const hasFirstPara = blocks.some((b) => b.source.includes('First paragraph'));
|
|
90
|
-
expect(hasFirstPara).toBe(true);
|
|
91
|
-
});
|
|
92
|
-
|
|
93
|
-
test('setext underline with multiple blank lines', () => {
|
|
94
|
-
const parser = createStreamingParser();
|
|
95
|
-
parser.append('First\n\n\n===');
|
|
96
|
-
|
|
97
|
-
expect(parser.blocks.length).toBe(2);
|
|
98
|
-
expect(parser.blocks[0]?.ast.type).toBe('paragraph');
|
|
99
|
-
expect(parser.blocks[0]?.source).toBe('First');
|
|
100
|
-
expect(parser.blocks[1]?.ast.type).toBe('paragraph');
|
|
101
|
-
expect(parser.blocks[1]?.source).toBe('===');
|
|
102
|
-
});
|
|
103
|
-
});
|
package/tsconfig.json
DELETED
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"compilerOptions": {
|
|
3
|
-
// Environment setup & latest features
|
|
4
|
-
"lib": ["ESNext"],
|
|
5
|
-
"target": "ESNext",
|
|
6
|
-
"module": "Preserve",
|
|
7
|
-
"moduleDetection": "force",
|
|
8
|
-
"jsx": "react-jsx",
|
|
9
|
-
"allowJs": true,
|
|
10
|
-
|
|
11
|
-
// Bundler mode
|
|
12
|
-
"moduleResolution": "bundler",
|
|
13
|
-
"allowImportingTsExtensions": true,
|
|
14
|
-
"verbatimModuleSyntax": true,
|
|
15
|
-
"noEmit": true,
|
|
16
|
-
|
|
17
|
-
// Best practices
|
|
18
|
-
"strict": true,
|
|
19
|
-
"skipLibCheck": true,
|
|
20
|
-
"noFallthroughCasesInSwitch": true,
|
|
21
|
-
"noUncheckedIndexedAccess": true,
|
|
22
|
-
"noImplicitOverride": true,
|
|
23
|
-
|
|
24
|
-
// Some stricter flags (disabled by default)
|
|
25
|
-
"noUnusedLocals": false,
|
|
26
|
-
"noUnusedParameters": false,
|
|
27
|
-
"noPropertyAccessFromIndexSignature": false
|
|
28
|
-
}
|
|
29
|
-
}
|