@funcstache/stache-stream 0.2.2 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/.eslintrc.json +30 -0
  2. package/.swcrc +29 -0
  3. package/DEV.md +84 -0
  4. package/README.md +145 -0
  5. package/TASKS.md +13 -0
  6. package/TODO.md +28 -0
  7. package/docs/.nojekyll +1 -0
  8. package/docs/assets/hierarchy.js +1 -0
  9. package/docs/assets/highlight.css +120 -0
  10. package/docs/assets/icons.js +18 -0
  11. package/docs/assets/icons.svg +1 -0
  12. package/docs/assets/main.js +60 -0
  13. package/docs/assets/navigation.js +1 -0
  14. package/docs/assets/search.js +1 -0
  15. package/docs/assets/style.css +1633 -0
  16. package/docs/classes/StacheTransformStream.html +13 -0
  17. package/docs/hierarchy.html +1 -0
  18. package/docs/index.html +73 -0
  19. package/docs/interfaces/Context.html +3 -0
  20. package/docs/interfaces/ContextProvider.html +10 -0
  21. package/docs/interfaces/PartialTagContextLambda.html +11 -0
  22. package/docs/interfaces/PartialTagContextLambdaResult.html +7 -0
  23. package/docs/interfaces/SectionTagCallback.html +12 -0
  24. package/docs/interfaces/SectionTagContextRecord.html +4 -0
  25. package/docs/interfaces/Tag.html +45 -0
  26. package/docs/interfaces/VariableTagContextLambda.html +4 -0
  27. package/docs/interfaces/VariableTagContextRecord.html +3 -0
  28. package/docs/media/StacheStream.ts +79 -0
  29. package/docs/modules.html +1 -0
  30. package/docs/types/ContextTypes.html +3 -0
  31. package/docs/types/JsonType.html +2 -0
  32. package/docs/types/PartialTagContext.html +4 -0
  33. package/docs/types/SectionTagContext.html +4 -0
  34. package/docs/types/TemplateName.html +9 -0
  35. package/docs/types/VariableTagContext.html +4 -0
  36. package/docs/types/VariableTagContextPrimitive.html +3 -0
  37. package/docs-assets/images/context-dotted-found.png +0 -0
  38. package/docs-assets/images/context-dotted-not-found.png +0 -0
  39. package/docs-assets/images/context-not-found.png +0 -0
  40. package/package.json +3 -6
  41. package/project.json +26 -0
  42. package/src/global.d.ts +10 -0
  43. package/src/index.ts +67 -0
  44. package/src/lib/parse/Parse.spec.ts +50 -0
  45. package/src/lib/parse/Parse.ts +92 -0
  46. package/src/lib/parse/README.md +62 -0
  47. package/src/lib/plan_base_v2.md +33 -0
  48. package/src/lib/plan_comment.md +53 -0
  49. package/src/lib/plan_implicit-iterator.md +213 -0
  50. package/src/lib/plan_inverted-sections.md +160 -0
  51. package/src/lib/plan_partials.md +237 -0
  52. package/src/lib/plan_sections.md +167 -0
  53. package/src/lib/plan_stache-stream.md +110 -0
  54. package/src/lib/plan_whitespace.md +98 -0
  55. package/src/lib/queue/Queue.spec.ts +275 -0
  56. package/src/lib/queue/Queue.ts +253 -0
  57. package/src/lib/queue/README.md +110 -0
  58. package/src/lib/stache-stream/README.md +45 -0
  59. package/src/lib/stache-stream/StacheStream.spec.ts +107 -0
  60. package/src/lib/stache-stream/StacheStream.ts +79 -0
  61. package/src/lib/tag/README.md +95 -0
  62. package/src/lib/tag/Tag.spec.ts +212 -0
  63. package/src/lib/tag/Tag.ts +295 -0
  64. package/src/lib/template/README.md +102 -0
  65. package/src/lib/template/Template-comment.spec.ts +76 -0
  66. package/src/lib/template/Template-inverted-section.spec.ts +85 -0
  67. package/src/lib/template/Template-partials.spec.ts +125 -0
  68. package/src/lib/template/Template-section.spec.ts +142 -0
  69. package/src/lib/template/Template.spec.ts +178 -0
  70. package/src/lib/template/Template.ts +614 -0
  71. package/src/lib/test/streams.ts +36 -0
  72. package/src/lib/tokenize/README.md +97 -0
  73. package/src/lib/tokenize/Tokenize.spec.ts +364 -0
  74. package/src/lib/tokenize/Tokenize.ts +374 -0
  75. package/src/lib/{types.d.ts → types.ts} +73 -25
  76. package/tsconfig.json +21 -0
  77. package/tsconfig.lib.json +16 -0
  78. package/tsconfig.spec.json +21 -0
  79. package/typedoc.mjs +15 -0
  80. package/vite.config.ts +27 -0
  81. package/vitest.setup.ts +6 -0
  82. package/src/global.d.js +0 -8
  83. package/src/global.d.js.map +0 -1
  84. package/src/index.d.ts +0 -7
  85. package/src/index.js +0 -24
  86. package/src/index.js.map +0 -1
  87. package/src/lib/parse/Parse.d.ts +0 -14
  88. package/src/lib/parse/Parse.js +0 -79
  89. package/src/lib/parse/Parse.js.map +0 -1
  90. package/src/lib/queue/Queue.d.ts +0 -32
  91. package/src/lib/queue/Queue.js +0 -181
  92. package/src/lib/queue/Queue.js.map +0 -1
  93. package/src/lib/stache-stream/StacheStream.d.ts +0 -22
  94. package/src/lib/stache-stream/StacheStream.js +0 -71
  95. package/src/lib/stache-stream/StacheStream.js.map +0 -1
  96. package/src/lib/tag/Tag.d.ts +0 -33
  97. package/src/lib/tag/Tag.js +0 -231
  98. package/src/lib/tag/Tag.js.map +0 -1
  99. package/src/lib/template/Template.d.ts +0 -18
  100. package/src/lib/template/Template.js +0 -428
  101. package/src/lib/template/Template.js.map +0 -1
  102. package/src/lib/test/streams.d.ts +0 -2
  103. package/src/lib/test/streams.js +0 -39
  104. package/src/lib/test/streams.js.map +0 -1
  105. package/src/lib/tokenize/Tokenize.d.ts +0 -22
  106. package/src/lib/tokenize/Tokenize.js +0 -268
  107. package/src/lib/tokenize/Tokenize.js.map +0 -1
  108. package/src/lib/types.js +0 -33
  109. package/src/lib/types.js.map +0 -1
@@ -0,0 +1,275 @@
1
+ import { TextDecoderStream } from "node:stream/web";
2
+ import { createReadableStream } from "../test/streams";
3
+ import { Parse } from "../parse/Parse";
4
+ import {
5
+ Tokenize,
6
+ type TokenizeTagEvent,
7
+ type TokenizeTextEvent,
8
+ } from "../tokenize/Tokenize";
9
+ import { Queue, type TokenizeAllEvent } from "./Queue";
10
+
11
+ // (process.env as any).LOG_LEVEL = "debug";
12
+
13
+ describe("Queue", () => {
14
+ it("queues variable", async () => {
15
+ let resolve: () => void;
16
+ const mockHandler = vitest.fn();
17
+ const q = new Queue()
18
+ .on("inactive", () => Promise.resolve(resolve()))
19
+ .on("text", mockHandler)
20
+ .on("variable", mockHandler);
21
+
22
+ const t = new Tokenize().on("token", q.push.bind(q));
23
+
24
+ new Parse({ onChar: t.push.bind(t) }).read(
25
+ createReadableStream("ab{{m}}yz").pipeThrough(new TextDecoderStream())
26
+ );
27
+
28
+ await new Promise<void>((res) => {
29
+ resolve = res;
30
+ });
31
+
32
+ expect(mockHandler).toHaveBeenCalledTimes(3);
33
+
34
+ let [textEvt] = mockHandler.mock.calls[0] as [TokenizeTextEvent];
35
+ expect(textEvt.data).toBe("ab");
36
+ expect(textEvt.type).toBe("text");
37
+
38
+ const [evt] = mockHandler.mock.calls[1] as [TokenizeTagEvent];
39
+ expect(evt.data.value).toBe("m");
40
+ expect(evt.data.type).toBe("variable");
41
+
42
+ [textEvt] = mockHandler.mock.calls[2] as [TokenizeTextEvent];
43
+ expect(textEvt.data).toBe("yz");
44
+ expect(textEvt.type).toBe("text");
45
+ });
46
+
47
+ it("queues partial", async () => {
48
+ let resolve: () => void;
49
+ const mockHandler = vitest.fn();
50
+ const q = new Queue()
51
+ .on("inactive", () => Promise.resolve(resolve()))
52
+ .on("text", mockHandler)
53
+ .on("partial", mockHandler);
54
+
55
+ const t = new Tokenize().on("token", q.push.bind(q));
56
+
57
+ new Parse({ onChar: t.push.bind(t) }).read(
58
+ createReadableStream("ab{{>m}}yz").pipeThrough(new TextDecoderStream())
59
+ );
60
+
61
+ await new Promise<void>((res) => {
62
+ resolve = res;
63
+ });
64
+
65
+ expect(mockHandler).toHaveBeenCalledTimes(3);
66
+
67
+ let [textEvt] = mockHandler.mock.calls[0] as [TokenizeTextEvent];
68
+ expect(textEvt.data).toBe("ab");
69
+ expect(textEvt.type).toBe("text");
70
+
71
+ const [evt] = mockHandler.mock.calls[1] as [TokenizeTagEvent];
72
+ expect(evt.data.value).toBe(">m");
73
+ expect(evt.data.type).toBe("partial");
74
+
75
+ [textEvt] = mockHandler.mock.calls[2] as [TokenizeTextEvent];
76
+ expect(textEvt.data).toBe("yz");
77
+ expect(textEvt.type).toBe("text");
78
+ });
79
+
80
+ it("queues two simple tags", async () => {
81
+ let resolve: () => void;
82
+ const mockHandler = vitest.fn();
83
+ const q = new Queue()
84
+ .on("inactive", () => Promise.resolve(resolve()))
85
+ .on("text", mockHandler)
86
+ .on("partial", mockHandler)
87
+ .on("variable", mockHandler);
88
+
89
+ const t = new Tokenize().on("token", q.push.bind(q));
90
+
91
+ new Parse({ onChar: t.push.bind(t) }).read(
92
+ createReadableStream("ab{{m}}{{>n}}yz").pipeThrough(
93
+ new TextDecoderStream()
94
+ )
95
+ );
96
+
97
+ await new Promise<void>((res) => {
98
+ resolve = res;
99
+ });
100
+
101
+ expect(mockHandler).toHaveBeenCalledTimes(4);
102
+
103
+ let [textEvt] = mockHandler.mock.calls[0] as [TokenizeTextEvent];
104
+ expect(textEvt.data).toBe("ab");
105
+ expect(textEvt.type).toBe("text");
106
+
107
+ let [evt] = mockHandler.mock.calls[1] as [TokenizeTagEvent];
108
+ expect(evt.data.value).toBe("m");
109
+ expect(evt.data.type).toBe("variable");
110
+
111
+ [evt] = mockHandler.mock.calls[2] as [TokenizeTagEvent];
112
+ expect(evt.data.value).toBe(">n");
113
+ expect(evt.data.type).toBe("partial");
114
+
115
+ [textEvt] = mockHandler.mock.calls[3] as [TokenizeTextEvent];
116
+ expect(textEvt.data).toBe("yz");
117
+ expect(textEvt.type).toBe("text");
118
+ });
119
+
120
+ it("queues section tag", async () => {
121
+ let resolve: () => void;
122
+ const mockHandler = vitest.fn();
123
+ const q = new Queue()
124
+ .on("inactive", () => Promise.resolve(resolve()))
125
+ .on("text", mockHandler)
126
+ .on("partial", mockHandler)
127
+ .on("section", mockHandler)
128
+ .on("variable", mockHandler);
129
+
130
+ const t = new Tokenize().on("token", q.push.bind(q));
131
+
132
+ new Parse({ onChar: t.push.bind(t) }).read(
133
+ createReadableStream("ab{{#m}}{{n}}{{/m}}yz").pipeThrough(
134
+ new TextDecoderStream()
135
+ )
136
+ );
137
+
138
+ await new Promise<void>((res) => {
139
+ resolve = res;
140
+ });
141
+
142
+ expect(mockHandler).toHaveBeenCalledTimes(3);
143
+
144
+ let [textEvt] = mockHandler.mock.calls[0] as [TokenizeTextEvent];
145
+ expect(textEvt.data).toBe("ab");
146
+ expect(textEvt.type).toBe("text");
147
+
148
+ const [acc] = mockHandler.mock.calls[1] as [TokenizeTagEvent[]];
149
+ expect(acc).toHaveLength(3);
150
+
151
+ const [one, two, three] = acc;
152
+ expect(one.data.value).toBe("#m");
153
+ expect(one.data.type).toBe("section");
154
+ expect(two.data.value).toBe("n");
155
+ expect(two.data.type).toBe("variable");
156
+ expect(three.data.value).toBe("/m");
157
+ expect(three.data.type).toBe("end");
158
+
159
+ [textEvt] = mockHandler.mock.calls[2] as [TokenizeTextEvent];
160
+ expect(textEvt.data).toBe("yz");
161
+ expect(textEvt.type).toBe("text");
162
+ });
163
+
164
+ it("accumulates text inside a section", async () => {
165
+ let resolve: () => void;
166
+ const mockHandler = vitest.fn();
167
+ const q = new Queue()
168
+ .on("inactive", () => Promise.resolve(resolve()))
169
+ .on("section", mockHandler)
170
+ .on("text", mockHandler);
171
+
172
+ const t = new Tokenize().on("token", q.push.bind(q));
173
+
174
+ new Parse({ onChar: t.push.bind(t) }).read(
175
+ createReadableStream("{{#m}}text{{/m}}").pipeThrough(
176
+ new TextDecoderStream()
177
+ )
178
+ );
179
+
180
+ await new Promise<void>((res) => {
181
+ resolve = res;
182
+ });
183
+
184
+ expect(mockHandler).toHaveBeenCalledTimes(1);
185
+
186
+ const [acc] = mockHandler.mock.calls[0] as [TokenizeAllEvent[]];
187
+ expect(acc).toHaveLength(3);
188
+
189
+ const [one, two, three] = acc;
190
+ expect(one.data.toString()).toBe("{{#m}}");
191
+ expect(two.data).toBe("text");
192
+ expect(two.type).toBe("text");
193
+ expect(three.data.toString()).toBe("{{/m}}");
194
+ });
195
+
196
+ it("accumulates mixed text and tags inside a section", async () => {
197
+ let resolve: () => void;
198
+ const mockHandler = vitest.fn();
199
+ const q = new Queue()
200
+ .on("inactive", () => Promise.resolve(resolve()))
201
+ .on("section", mockHandler)
202
+ .on("text", mockHandler);
203
+
204
+ const t = new Tokenize().on("token", q.push.bind(q));
205
+
206
+ new Parse({ onChar: t.push.bind(t) }).read(
207
+ createReadableStream("{{#m}} L {{n}} R {{/m}}").pipeThrough(
208
+ new TextDecoderStream()
209
+ )
210
+ );
211
+
212
+ await new Promise<void>((res) => {
213
+ resolve = res;
214
+ });
215
+
216
+ expect(mockHandler).toHaveBeenCalledTimes(1);
217
+
218
+ const [acc] = mockHandler.mock.calls[0] as [TokenizeAllEvent[]];
219
+ expect(acc).toHaveLength(5);
220
+
221
+ const [one, two, three, four, five] = acc;
222
+ expect(one.data.toString()).toBe("{{#m}}");
223
+ expect(two.data).toBe(" L ");
224
+ expect(two.type).toBe("text");
225
+ expect(three.data.toString()).toBe("{{n}}");
226
+ expect(four.data).toBe(" R ");
227
+ expect(four.type).toBe("text");
228
+ expect(five.data.toString()).toBe("{{/m}}");
229
+ });
230
+
231
+ it("queues implicit tag", async () => {
232
+ let resolve: () => void;
233
+ const mockHandler = vitest.fn();
234
+ const q = new Queue()
235
+ .on("implicit", mockHandler)
236
+ .on("inactive", () => Promise.resolve(resolve()))
237
+ .on("text", mockHandler)
238
+ .on("partial", mockHandler)
239
+ .on("section", mockHandler)
240
+ .on("variable", mockHandler);
241
+
242
+ const t = new Tokenize().on("token", q.push.bind(q));
243
+
244
+ new Parse({ onChar: t.push.bind(t) }).read(
245
+ createReadableStream("ab{{#m}}{{.}}{{/m}}yz").pipeThrough(
246
+ new TextDecoderStream()
247
+ )
248
+ );
249
+
250
+ await new Promise<void>((res) => {
251
+ resolve = res;
252
+ });
253
+
254
+ expect(mockHandler).toHaveBeenCalledTimes(3);
255
+
256
+ let [textEvt] = mockHandler.mock.calls[0] as [TokenizeTextEvent];
257
+ expect(textEvt.data).toBe("ab");
258
+ expect(textEvt.type).toBe("text");
259
+
260
+ const [acc] = mockHandler.mock.calls[1] as [TokenizeTagEvent[]];
261
+ expect(acc).toHaveLength(3);
262
+
263
+ const [one, two, three] = acc;
264
+ expect(one.data.value).toBe("#m");
265
+ expect(one.data.type).toBe("section");
266
+ expect(two.data.value).toBe(".");
267
+ expect(two.data.type).toBe("implicit");
268
+ expect(three.data.value).toBe("/m");
269
+ expect(three.data.type).toBe("end");
270
+
271
+ [textEvt] = mockHandler.mock.calls[2] as [TokenizeTextEvent];
272
+ expect(textEvt.data).toBe("yz");
273
+ expect(textEvt.type).toBe("text");
274
+ });
275
+ });
@@ -0,0 +1,253 @@
1
+ import { Log } from "@funcstache/logger";
2
+ import { TokenizeTagEvent, TokenizeTextEvent } from "../tokenize/Tokenize";
3
+
4
+ export class Queue implements AsyncEventEmitter<QueueEventMap> {
5
+ #logger = new Log({
6
+ category: "QUE",
7
+ level: (process.env as any).LOG_LEVEL || "warn",
8
+ });
9
+
10
+ #active = false;
11
+ #listeners: Listeners<QueueEventMap>[] = [];
12
+ #accumulator: Accumulator | undefined;
13
+ #tokenQueue: TokenizeEvent[] = [];
14
+
15
+ get tokenQueue() {
16
+ return Array.from(this.#tokenQueue);
17
+ }
18
+
19
+ on<E extends keyof QueueEventMap>(
20
+ eventName: Listeners<QueueEventMap, E>["eventName"],
21
+ listener: Listeners<QueueEventMap, E>["listener"]
22
+ ) {
23
+ if (
24
+ !this.#listeners.some(
25
+ ({ eventName: evn, listener: lst }) =>
26
+ eventName === evn && listener === lst
27
+ )
28
+ ) {
29
+ const l = listener as any;
30
+ this.#listeners.push({ eventName, listener: l });
31
+ }
32
+
33
+ return this;
34
+ }
35
+
36
+ async push(token: TokenizeEvent): Promise<void> {
37
+ // this.#logger.debug(() => [`••• push: token=`, token]);
38
+
39
+ this.#tokenQueue.push(token);
40
+ this.#processToken();
41
+ }
42
+
43
+ async #emit<K extends keyof QueueEventMap>(
44
+ eventName: K,
45
+ event: QueueEventMap[K]
46
+ ): Promise<void> {
47
+ for (const item of this.#listeners) {
48
+ if (item.eventName !== eventName) {
49
+ continue;
50
+ }
51
+
52
+ this.#logger.debug(() => [`••• #emit: event=`, event]);
53
+ await item.listener(event);
54
+ }
55
+ }
56
+
57
+ async #processToken(): Promise<void> {
58
+ if (this.#active) {
59
+ return;
60
+ }
61
+
62
+ this.#active = true;
63
+
64
+ const token = this.#tokenQueue.shift();
65
+ if (token === undefined) {
66
+ return;
67
+ }
68
+
69
+ // this.#logger.debug(() => [
70
+ // `••• #processToken: token='${JSON.stringify(token)}'`,
71
+ // ]);
72
+
73
+ if (token === null) {
74
+ await this.#emit("inactive", undefined);
75
+ return;
76
+ }
77
+
78
+ if (token.type === "text") {
79
+ if (this.#accumulator) {
80
+ this.#accumulator.push(token);
81
+ } else {
82
+ await this.#emit("text", token);
83
+ }
84
+ } else if (token.type === "tag") {
85
+ // this.#logger.debug(() => [
86
+ // ` #processToken: token type='${token.data.type}'`,
87
+ // ]);
88
+
89
+ switch (token.data.type) {
90
+ case "comment":
91
+ case "implicit":
92
+ case "partial":
93
+ case "variable": {
94
+ if (this.#accumulator) {
95
+ this.#accumulator.push(token);
96
+ } else {
97
+ await this.#emit(token.data.type, token);
98
+ }
99
+
100
+ break;
101
+ }
102
+
103
+ case "inverted":
104
+ case "section": {
105
+ if (!this.#accumulator) {
106
+ const acc: Accumulator = [token] as any;
107
+ acc.level = 0;
108
+ this.#accumulator = acc;
109
+ } else {
110
+ this.#accumulator.level++;
111
+ this.#accumulator.push(token);
112
+ }
113
+
114
+ break;
115
+ }
116
+
117
+ case "end": {
118
+ if (!this.#accumulator) {
119
+ this.#emit(
120
+ "error",
121
+ new QueueError<TokenizeAllEvent>(
122
+ `Received end tag '${token.data.value}' without a start tag.`,
123
+ token
124
+ )
125
+ );
126
+ break;
127
+ }
128
+
129
+ // Received an end tag, but we are not at the lowest level. Add this tag to the
130
+ // accumulator and reduce the level by one then exit.
131
+ if (0 < this.#accumulator.level) {
132
+ this.#accumulator.level--;
133
+ this.#accumulator.push(token);
134
+ break;
135
+ }
136
+
137
+ // We are at accumulator level 0 so if we've done everything right thing we should emit a
138
+ // "section" event with all the accumulated tags...
139
+ const start = this.#accumulator[0];
140
+ if (!isTagEvent(start)) {
141
+ this.#emit(
142
+ "error",
143
+ new QueueError<TokenizeAllEvent>(
144
+ `The start tag '${JSON.stringify(
145
+ start
146
+ )}' is not a TokenizeTagEvent.`,
147
+ start
148
+ )
149
+ );
150
+ break;
151
+ }
152
+
153
+ if (start.data.key !== token.data.key) {
154
+ this.#emit(
155
+ "error",
156
+ new QueueError<TokenizeAllEvent[]>(
157
+ `The start tag '${JSON.stringify(
158
+ start
159
+ )}' does not match the end tag '${JSON.stringify(token)}'`,
160
+ [start, token]
161
+ )
162
+ );
163
+ break;
164
+ }
165
+
166
+ if (
167
+ start.data.type === "inverted" ||
168
+ start.data.type === "section"
169
+ ) {
170
+ await this.#emit(start.data.type, [
171
+ ...Array.from(this.#accumulator),
172
+ token,
173
+ ]);
174
+
175
+ this.#accumulator = undefined;
176
+ } else {
177
+ this.#emit(
178
+ "error",
179
+ new QueueError<TokenizeAllEvent>(
180
+ `The start tag '${JSON.stringify(start)}' is not a section tag`,
181
+ start
182
+ )
183
+ );
184
+ }
185
+
186
+ break;
187
+ }
188
+ }
189
+ }
190
+
191
+ // this.#logger.debug(() => [
192
+ // ` #processToken: this.#accumulator.level=${
193
+ // this.#accumulator?.level
194
+ // }, this.#accumulator='${JSON.stringify(this.#accumulator)}'`,
195
+ // ,
196
+ // ]);
197
+
198
+ this.#active = false;
199
+
200
+ if (this.#tokenQueue.length) {
201
+ this.#processToken();
202
+ }
203
+ }
204
+ }
205
+
206
+ export class QueueError<
207
+ D extends TokenizeAllEvent | TokenizeAllEvent[] = TokenizeAllEvent
208
+ > extends Error {
209
+ readonly data: D;
210
+
211
+ constructor(message: string, token: D) {
212
+ super(message);
213
+
214
+ this.data = token;
215
+ }
216
+ }
217
+
218
+ function isTagEvent(obj: any): obj is TokenizeTagEvent {
219
+ return obj && "type" in obj && obj.type === "tag";
220
+ }
221
+
222
+ interface AsyncEventEmitter<T extends object, K extends keyof T = keyof T> {
223
+ on: (
224
+ eventName: K,
225
+ listener: (event: T[K]) => Promise<void>
226
+ ) => AsyncEventEmitter<T>;
227
+ }
228
+
229
+ type Accumulator = TokenizeAllEvent[] & {
230
+ level: number;
231
+ };
232
+
233
+ interface Listeners<T extends object, K extends keyof T = keyof T> {
234
+ eventName: K;
235
+ listener: (event: T[K]) => Promise<void>;
236
+ }
237
+
238
+ export type TokenizeAllEvent = TokenizeTagEvent | TokenizeTextEvent;
239
+
240
+ type TokenizeEvent = TokenizeAllEvent | null;
241
+
242
+ type QueueEventMap = {
243
+ comment: TokenizeTagEvent;
244
+ // end: TokenizeTagEvent;
245
+ error: QueueError<TokenizeAllEvent | TokenizeAllEvent[]>;
246
+ implicit: TokenizeTagEvent;
247
+ inactive: undefined;
248
+ inverted: TokenizeAllEvent[];
249
+ partial: TokenizeTagEvent;
250
+ section: TokenizeAllEvent[];
251
+ text: TokenizeTextEvent;
252
+ variable: TokenizeTagEvent;
253
+ };
@@ -0,0 +1,110 @@
1
+ # Queue
2
+
3
+ `Queue` receives a stream of tokenized mustache events from `Tokenize` and re-emits them as higher-level, typed events. Its primary job is to accumulate the tokens that belong to a section or inverted section and emit them together as a single event once the closing tag is seen.
4
+
5
+ ## How it works
6
+
7
+ ### Token input
8
+
9
+ Tokens are pushed one at a time via `push(token)`. `null` signals end-of-stream. Tokens are added to an internal array (`#tokenQueue`) and processed serially — `#processToken` sets `#active = true` while running and recurses at the end if more tokens arrived in the meantime, preventing concurrent processing.
10
+
11
+ ### Simple tokens (no accumulator active)
12
+
13
+ When no section is open, simple tokens are emitted immediately:
14
+
15
+ | Token type | Event emitted | Payload |
16
+ | ---------- | ------------- | ------------------- |
17
+ | `text` | `text` | `TokenizeTextEvent` |
18
+ | `variable` | `variable` | `TokenizeTagEvent` |
19
+ | `implicit` | `implicit` | `TokenizeTagEvent` |
20
+ | `partial` | `partial` | `TokenizeTagEvent` |
21
+ | `null` | `inactive` | `undefined` |
22
+
23
+ ### Section / inverted-section accumulation
24
+
25
+ When a `section` or `inverted` opening tag is received, `Queue` creates an `Accumulator` — an array of `TokenizeAllEvent` with an extra `level` counter — and adds the opening tag to it. All subsequent tokens (text, variables, nested sections, etc.) are pushed into the accumulator instead of being emitted.
26
+
27
+ Nested sections increment `level`; their matching `end` tags decrement it. When an `end` tag is received at `level === 0`, the accumulator (opening tag through closing tag inclusive) is emitted as a single `section` or `inverted` event and the accumulator is cleared.
28
+
29
+ ### Error cases
30
+
31
+ `Queue` emits an `error` event (with a `QueueError` payload) for:
32
+
33
+ - An `end` tag received when no accumulator is active (unmatched close).
34
+ - The first element of the accumulator is not a tag event.
35
+ - The `end` tag key does not match the opening tag key.
36
+ - The opening tag stored in the accumulator is not a `section` or `inverted` type.
37
+
38
+ ### Event registration
39
+
40
+ `on(eventName, listener)` stores async listeners. Duplicate registrations (same event name and same listener reference) are silently ignored. All listener calls are `await`ed in registration order.
41
+
42
+ ## Usage
43
+
44
+ ```ts
45
+ import { Queue } from "./Queue";
46
+ import { Tokenize } from "../tokenize/Tokenize";
47
+ import { Parse } from "../parse/Parse";
48
+
49
+ const queue = new Queue()
50
+ .on("text", async (event) => {
51
+ /* plain text node */
52
+ })
53
+ .on("variable", async (event) => {
54
+ /* {{variable}} */
55
+ })
56
+ .on("partial", async (event) => {
57
+ /* {{>partial}} */
58
+ })
59
+ .on("implicit", async (event) => {
60
+ /* {{.}} */
61
+ })
62
+ .on("section", async (events) => {
63
+ /* {{#section}}...{{/section}} */
64
+ })
65
+ .on("inverted", async (events) => {
66
+ /* {{^inverted}}...{{/inverted}} */
67
+ })
68
+ .on("error", async (err) => {
69
+ /* QueueError */
70
+ })
71
+ .on("inactive", async () => {
72
+ /* end of stream */
73
+ });
74
+
75
+ const tokenize = new Tokenize().on("token", queue.push.bind(queue));
76
+ const parse = new Parse({ onChar: tokenize.push.bind(tokenize) });
77
+
78
+ parse.read(myReadableStream);
79
+ ```
80
+
81
+ ## API
82
+
83
+ ### `queue.on(eventName, listener)`
84
+
85
+ Registers an async listener for a queue event. Returns `this` for chaining. Safe to call multiple times with the same arguments — duplicates are ignored.
86
+
87
+ ### `queue.push(token: TokenizeAllEvent | null): Promise<void>`
88
+
89
+ Pushes a token into the queue for processing. Pass `null` to signal end-of-stream.
90
+
91
+ ### `queue.tokenQueue`
92
+
93
+ Read-only snapshot of the pending token queue (copy of the internal array).
94
+
95
+ ### Events
96
+
97
+ | Event | Payload | Description |
98
+ | ---------- | -------------------- | -------------------------------------------------------- |
99
+ | `text` | `TokenizeTextEvent` | A plain-text node outside any section. |
100
+ | `variable` | `TokenizeTagEvent` | A `{{variable}}` tag outside any section. |
101
+ | `implicit` | `TokenizeTagEvent` | A `{{.}}` tag outside any section. |
102
+ | `partial` | `TokenizeTagEvent` | A `{{>partial}}` tag outside any section. |
103
+ | `section` | `TokenizeAllEvent[]` | All tokens from `{{#key}}` through `{{/key}}` inclusive. |
104
+ | `inverted` | `TokenizeAllEvent[]` | All tokens from `{{^key}}` through `{{/key}}` inclusive. |
105
+ | `error` | `QueueError` | A structural error (unmatched tags, key mismatch). |
106
+ | `inactive` | `undefined` | End of stream; all tokens have been processed. |
107
+
108
+ ### `QueueError`
109
+
110
+ Extends `Error`. Has a `data` property containing the offending `TokenizeAllEvent` or `TokenizeAllEvent[]`.
@@ -0,0 +1,45 @@
1
+ # StacheTransformStream
2
+
3
+ `StacheTransformStream` is a `TransformStream<string, string>` that renders a mustache template from a stream of string chunks. Incoming chunks are fed into the `Template` pipeline (`Parse → Tokenize → Queue → Template`) and rendered output chunks are enqueued to the readable side as they are produced.
4
+
5
+ ## How it works
6
+
7
+ ### Internal pipeline
8
+
9
+ The constructor creates an internal `ReadableStream` whose controller is captured as `push` and `close` functions. On `start`, a `Template` is instantiated with that stream and a `writeToOutput` that enqueues rendered text to the outer `TransformStream` controller.
10
+
11
+ Each chunk received by `transform` is pushed into the internal stream. When the writable side closes, `flush` closes the internal stream and waits for the `Template` to emit `"inactive"` before resolving, ensuring all rendered output is flushed before the readable side ends.
12
+
13
+ ### Error propagation
14
+
15
+ If `controller.enqueue` throws (e.g. the readable side has been cancelled), the error is captured and re-thrown so that the `inactive` promise rejects. `flush` catches that rejection and forwards it to `controller.error`, surfacing it to the pipeline consumer.
16
+
17
+ ## Usage
18
+
19
+ Pipe any `ReadableStream<string>` through `StacheTransformStream` to get a rendered `ReadableStream<string>`:
20
+
21
+ ```ts
22
+ import { StacheTransformStream } from "./StacheStream";
23
+
24
+ const stache = new StacheTransformStream({
25
+ contextProvider: { context: { name: "world" } },
26
+ });
27
+
28
+ const rendered = templateStream.pipeThrough(stache);
29
+
30
+ for await (const chunk of rendered) {
31
+ process.stdout.write(chunk);
32
+ }
33
+ ```
34
+
35
+ ## API
36
+
37
+ ### `new StacheTransformStream(options?)`
38
+
39
+ | Option | Type | Description |
40
+ | ----------------- | ---------------------------- | ------------------------------------------------------------- |
41
+ | `contextProvider` | `ContextProvider` (optional) | Supplies the data context and parent lookup for the template. |
42
+
43
+ ### Events
44
+
45
+ `StacheTransformStream` does not emit events directly. Use standard `ReadableStream` consumption (`for await`, `pipeTo`, etc.) to receive rendered output and detect completion.