opencode-snippets 1.3.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +42 -0
- package/index.ts +3 -2
- package/package.json +1 -1
- package/src/commands.ts +0 -1
- package/src/expander.test.ts +420 -48
- package/src/expander.ts +165 -7
- package/src/types.ts +24 -0
package/README.md
CHANGED
|
@@ -190,6 +190,48 @@ I reference I reference I reference ... (15 times) ... I reference #self
|
|
|
190
190
|
|
|
191
191
|
This generous limit supports complex snippet hierarchies while preventing infinite loops.
|
|
192
192
|
|
|
193
|
+
### Prepend and Append Blocks
|
|
194
|
+
|
|
195
|
+
For long reference material that would break your writing flow, use `<append>` blocks to place content at the end of your message:
|
|
196
|
+
|
|
197
|
+
```markdown
|
|
198
|
+
---
|
|
199
|
+
aliases: jira-mcp
|
|
200
|
+
---
|
|
201
|
+
Jira MCP server
|
|
202
|
+
<append>
|
|
203
|
+
## Jira MCP Usage
|
|
204
|
+
|
|
205
|
+
Use these custom field mappings when creating issues:
|
|
206
|
+
- customfield_16570 => Acceptance Criteria
|
|
207
|
+
- customfield_11401 => Team
|
|
208
|
+
</append>
|
|
209
|
+
```
|
|
210
|
+
|
|
211
|
+
**Input:** `Create a bug ticket in #jira-mcp about the memory leak`
|
|
212
|
+
|
|
213
|
+
**Output:**
|
|
214
|
+
```
|
|
215
|
+
Create a bug ticket in Jira MCP server about the memory leak
|
|
216
|
+
|
|
217
|
+
## Jira MCP Usage
|
|
218
|
+
|
|
219
|
+
Use these custom field mappings when creating issues:
|
|
220
|
+
- customfield_16570 => Acceptance Criteria
|
|
221
|
+
- customfield_11401 => Team
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
Write naturally—reference what you need mid-sentence—and the context follows at the bottom.
|
|
225
|
+
|
|
226
|
+
Use `<prepend>` for content that should appear at the top of your message. Multiple blocks of the same type are concatenated in order of appearance.
|
|
227
|
+
|
|
228
|
+
**Block behavior:**
|
|
229
|
+
- Content outside `<prepend>`/`<append>` blocks replaces the hashtag inline
|
|
230
|
+
- If a snippet has only blocks (no inline content), the hashtag is simply removed
|
|
231
|
+
- Blocks from nested snippets are collected and assembled in the final message
|
|
232
|
+
- Unclosed tags are handled leniently (rest of content becomes the block)
|
|
233
|
+
- Nested `<prepend>` inside `<append>` (or vice versa) is an error—the hashtag is left unchanged
|
|
234
|
+
|
|
193
235
|
## Example Snippets
|
|
194
236
|
|
|
195
237
|
### `~/.config/opencode/snippet/context.md`
|
package/index.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import type { Plugin } from "@opencode-ai/plugin";
|
|
2
2
|
import { createCommandExecuteHandler } from "./src/commands.js";
|
|
3
|
-
import { expandHashtags } from "./src/expander.js";
|
|
3
|
+
import { assembleMessage, expandHashtags } from "./src/expander.js";
|
|
4
4
|
import { loadSnippets } from "./src/loader.js";
|
|
5
5
|
import { logger } from "./src/logger.js";
|
|
6
6
|
import { executeShellCommands, type ShellContext } from "./src/shell.js";
|
|
@@ -53,7 +53,8 @@ export const SnippetsPlugin: Plugin = async (ctx) => {
|
|
|
53
53
|
if (part.type === "text" && part.text) {
|
|
54
54
|
// 1. Expand hashtags recursively with loop detection
|
|
55
55
|
const expandStart = performance.now();
|
|
56
|
-
|
|
56
|
+
const expansionResult = expandHashtags(part.text, snippets);
|
|
57
|
+
part.text = assembleMessage(expansionResult);
|
|
57
58
|
const expandTime = performance.now() - expandStart;
|
|
58
59
|
expandTimeTotal += expandTime;
|
|
59
60
|
|
package/package.json
CHANGED
package/src/commands.ts
CHANGED
package/src/expander.test.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { expandHashtags } from "../src/expander.js";
|
|
1
|
+
import { assembleMessage, expandHashtags, parseSnippetBlocks } from "../src/expander.js";
|
|
2
2
|
import type { SnippetInfo, SnippetRegistry } from "../src/types.js";
|
|
3
3
|
|
|
4
4
|
/** Helper to create a SnippetInfo from just content */
|
|
@@ -18,7 +18,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
18
18
|
|
|
19
19
|
const result = expandHashtags("Say #greeting", registry);
|
|
20
20
|
|
|
21
|
-
expect(result).toBe("Say Hello, World!");
|
|
21
|
+
expect(result.text).toBe("Say Hello, World!");
|
|
22
22
|
});
|
|
23
23
|
|
|
24
24
|
it("should expand multiple hashtags in one text", () => {
|
|
@@ -29,7 +29,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
29
29
|
|
|
30
30
|
const result = expandHashtags("#greeting, #name!", registry);
|
|
31
31
|
|
|
32
|
-
expect(result).toBe("Hello, Alice!");
|
|
32
|
+
expect(result.text).toBe("Hello, Alice!");
|
|
33
33
|
});
|
|
34
34
|
|
|
35
35
|
it("should leave unknown hashtags unchanged", () => {
|
|
@@ -37,7 +37,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
37
37
|
|
|
38
38
|
const result = expandHashtags("This is #known and #unknown", registry);
|
|
39
39
|
|
|
40
|
-
expect(result).toBe("This is content and #unknown");
|
|
40
|
+
expect(result.text).toBe("This is content and #unknown");
|
|
41
41
|
});
|
|
42
42
|
|
|
43
43
|
it("should handle empty text", () => {
|
|
@@ -45,7 +45,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
45
45
|
|
|
46
46
|
const result = expandHashtags("", registry);
|
|
47
47
|
|
|
48
|
-
expect(result).toBe("");
|
|
48
|
+
expect(result.text).toBe("");
|
|
49
49
|
});
|
|
50
50
|
|
|
51
51
|
it("should handle text with no hashtags", () => {
|
|
@@ -53,7 +53,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
53
53
|
|
|
54
54
|
const result = expandHashtags("No hashtags here", registry);
|
|
55
55
|
|
|
56
|
-
expect(result).toBe("No hashtags here");
|
|
56
|
+
expect(result.text).toBe("No hashtags here");
|
|
57
57
|
});
|
|
58
58
|
|
|
59
59
|
it("should handle case-insensitive hashtags", () => {
|
|
@@ -61,7 +61,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
61
61
|
|
|
62
62
|
const result = expandHashtags("#Greeting #GREETING #greeting", registry);
|
|
63
63
|
|
|
64
|
-
expect(result).toBe("Hello Hello Hello");
|
|
64
|
+
expect(result.text).toBe("Hello Hello Hello");
|
|
65
65
|
});
|
|
66
66
|
});
|
|
67
67
|
|
|
@@ -74,7 +74,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
74
74
|
|
|
75
75
|
const result = expandHashtags("#outer", registry);
|
|
76
76
|
|
|
77
|
-
expect(result).toBe("Start Middle End");
|
|
77
|
+
expect(result.text).toBe("Start Middle End");
|
|
78
78
|
});
|
|
79
79
|
|
|
80
80
|
it("should expand nested hashtags multiple levels deep", () => {
|
|
@@ -87,7 +87,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
87
87
|
|
|
88
88
|
const result = expandHashtags("#level1", registry);
|
|
89
89
|
|
|
90
|
-
expect(result).toBe("L1 L2 L3 L4");
|
|
90
|
+
expect(result.text).toBe("L1 L2 L3 L4");
|
|
91
91
|
});
|
|
92
92
|
|
|
93
93
|
it("should expand multiple nested hashtags in one snippet", () => {
|
|
@@ -99,7 +99,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
99
99
|
|
|
100
100
|
const result = expandHashtags("#main", registry);
|
|
101
101
|
|
|
102
|
-
expect(result).toBe("Start Content A and Content B End");
|
|
102
|
+
expect(result.text).toBe("Start Content A and Content B End");
|
|
103
103
|
});
|
|
104
104
|
|
|
105
105
|
it("should expand complex nested structure", () => {
|
|
@@ -113,7 +113,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
113
113
|
|
|
114
114
|
const result = expandHashtags("#greeting", registry);
|
|
115
115
|
|
|
116
|
-
expect(result).toBe("Hello John Doe");
|
|
116
|
+
expect(result.text).toBe("Hello John Doe");
|
|
117
117
|
});
|
|
118
118
|
});
|
|
119
119
|
|
|
@@ -125,7 +125,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
125
125
|
|
|
126
126
|
// Loop detected after 15 expansions, #self left as-is
|
|
127
127
|
const expected = `${"I reference ".repeat(15)}#self`;
|
|
128
|
-
expect(result).toBe(expected);
|
|
128
|
+
expect(result.text).toBe(expected);
|
|
129
129
|
});
|
|
130
130
|
|
|
131
131
|
it("should detect and prevent two-way circular reference", () => {
|
|
@@ -138,7 +138,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
138
138
|
|
|
139
139
|
// Should expand alternating A and B 15 times then stop
|
|
140
140
|
const expected = `${"A references B references ".repeat(15)}#a`;
|
|
141
|
-
expect(result).toBe(expected);
|
|
141
|
+
expect(result.text).toBe(expected);
|
|
142
142
|
});
|
|
143
143
|
|
|
144
144
|
it("should detect and prevent three-way circular reference", () => {
|
|
@@ -152,7 +152,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
152
152
|
|
|
153
153
|
// Should expand cycling through A, B, C 15 times then stop
|
|
154
154
|
const expected = `${"A -> B -> C -> ".repeat(15)}#a`;
|
|
155
|
-
expect(result).toBe(expected);
|
|
155
|
+
expect(result.text).toBe(expected);
|
|
156
156
|
});
|
|
157
157
|
|
|
158
158
|
it("should detect loops in longer chains", () => {
|
|
@@ -167,7 +167,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
167
167
|
const result = expandHashtags("#a", registry);
|
|
168
168
|
|
|
169
169
|
// Should expand until loop detected
|
|
170
|
-
expect(result).toBe("#b");
|
|
170
|
+
expect(result.text).toBe("#b");
|
|
171
171
|
});
|
|
172
172
|
});
|
|
173
173
|
|
|
@@ -183,7 +183,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
183
183
|
const result = expandHashtags("#main", registry);
|
|
184
184
|
|
|
185
185
|
// #shared should be expanded in both branches
|
|
186
|
-
expect(result).toBe("B1 uses Shared content and B2 uses Shared content");
|
|
186
|
+
expect(result.text).toBe("B1 uses Shared content and B2 uses Shared content");
|
|
187
187
|
});
|
|
188
188
|
|
|
189
189
|
it("should handle partial loops with valid branches", () => {
|
|
@@ -197,7 +197,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
197
197
|
|
|
198
198
|
// Valid expands once, loop expands 15 times
|
|
199
199
|
const expected = `Valid content and ${"Loop ".repeat(15)}#loop`;
|
|
200
|
-
expect(result).toBe(expected);
|
|
200
|
+
expect(result.text).toBe(expected);
|
|
201
201
|
});
|
|
202
202
|
|
|
203
203
|
it("should handle multiple independent loops", () => {
|
|
@@ -211,7 +211,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
211
211
|
|
|
212
212
|
// Each loop expands 15 times independently
|
|
213
213
|
const expected = `${"L1 ".repeat(15)}#loop1 and ${"L2 ".repeat(15)}#loop2`;
|
|
214
|
-
expect(result).toBe(expected);
|
|
214
|
+
expect(result.text).toBe(expected);
|
|
215
215
|
});
|
|
216
216
|
|
|
217
217
|
it("should handle nested loops", () => {
|
|
@@ -225,10 +225,10 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
225
225
|
|
|
226
226
|
// Complex nested loop - outer/inner cycle 15 times, plus self cycles
|
|
227
227
|
// This is complex expansion behavior, just verify it doesn't hang
|
|
228
|
-
expect(result).toContain("Outer");
|
|
229
|
-
expect(result).toContain("Inner");
|
|
230
|
-
expect(result).toContain("#outer");
|
|
231
|
-
expect(result).toContain("#self");
|
|
228
|
+
expect(result.text).toContain("Outer");
|
|
229
|
+
expect(result.text).toContain("Inner");
|
|
230
|
+
expect(result.text).toContain("#outer");
|
|
231
|
+
expect(result.text).toContain("#self");
|
|
232
232
|
});
|
|
233
233
|
|
|
234
234
|
it("should handle diamond pattern (same snippet reached via multiple paths)", () => {
|
|
@@ -242,7 +242,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
242
242
|
const result = expandHashtags("#top", registry);
|
|
243
243
|
|
|
244
244
|
// Diamond: top -> left -> bottom, top -> right -> bottom
|
|
245
|
-
expect(result).toBe("Left Bottom Right Bottom");
|
|
245
|
+
expect(result.text).toBe("Left Bottom Right Bottom");
|
|
246
246
|
});
|
|
247
247
|
|
|
248
248
|
it("should handle loop after valid expansion", () => {
|
|
@@ -255,7 +255,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
255
255
|
|
|
256
256
|
const result = expandHashtags("#a", registry);
|
|
257
257
|
|
|
258
|
-
expect(result).toBe("Valid B #c");
|
|
258
|
+
expect(result.text).toBe("Valid B #c");
|
|
259
259
|
});
|
|
260
260
|
});
|
|
261
261
|
|
|
@@ -265,7 +265,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
265
265
|
|
|
266
266
|
const result = expandHashtags("#anything", registry);
|
|
267
267
|
|
|
268
|
-
expect(result).toBe("#anything");
|
|
268
|
+
expect(result.text).toBe("#anything");
|
|
269
269
|
});
|
|
270
270
|
|
|
271
271
|
it("should handle snippet with empty content", () => {
|
|
@@ -273,7 +273,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
273
273
|
|
|
274
274
|
const result = expandHashtags("Before #empty After", registry);
|
|
275
275
|
|
|
276
|
-
expect(result).toBe("Before After");
|
|
276
|
+
expect(result.text).toBe("Before After");
|
|
277
277
|
});
|
|
278
278
|
|
|
279
279
|
it("should handle snippet containing only hashtags", () => {
|
|
@@ -285,7 +285,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
285
285
|
|
|
286
286
|
const result = expandHashtags("#only-refs", registry);
|
|
287
287
|
|
|
288
|
-
expect(result).toBe("A B");
|
|
288
|
+
expect(result.text).toBe("A B");
|
|
289
289
|
});
|
|
290
290
|
|
|
291
291
|
it("should handle hashtags at start, middle, and end", () => {
|
|
@@ -297,7 +297,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
297
297
|
|
|
298
298
|
const result = expandHashtags("#start text #middle text #end", registry);
|
|
299
299
|
|
|
300
|
-
expect(result).toBe("Start text Middle text End");
|
|
300
|
+
expect(result.text).toBe("Start text Middle text End");
|
|
301
301
|
});
|
|
302
302
|
|
|
303
303
|
it("should handle consecutive hashtags", () => {
|
|
@@ -309,7 +309,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
309
309
|
|
|
310
310
|
const result = expandHashtags("#a#b#c", registry);
|
|
311
311
|
|
|
312
|
-
expect(result).toBe("ABC");
|
|
312
|
+
expect(result.text).toBe("ABC");
|
|
313
313
|
});
|
|
314
314
|
|
|
315
315
|
it("should handle hashtags with hyphens and underscores", () => {
|
|
@@ -321,7 +321,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
321
321
|
|
|
322
322
|
const result = expandHashtags("#my-snippet #my_snippet #my-complex_name", registry);
|
|
323
323
|
|
|
324
|
-
expect(result).toBe("Hyphenated Underscored Mixed");
|
|
324
|
+
expect(result.text).toBe("Hyphenated Underscored Mixed");
|
|
325
325
|
});
|
|
326
326
|
|
|
327
327
|
it("should handle hashtags with numbers", () => {
|
|
@@ -332,7 +332,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
332
332
|
|
|
333
333
|
const result = expandHashtags("#test123 #123test", registry);
|
|
334
334
|
|
|
335
|
-
expect(result).toBe("Test with numbers Numbers first");
|
|
335
|
+
expect(result.text).toBe("Test with numbers Numbers first");
|
|
336
336
|
});
|
|
337
337
|
|
|
338
338
|
it("should not expand hashtags in URLs", () => {
|
|
@@ -342,7 +342,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
342
342
|
// This test documents current behavior
|
|
343
343
|
const result = expandHashtags("See https://github.com/user/repo/issues/#issue", registry);
|
|
344
344
|
|
|
345
|
-
expect(result).toBe("See https://github.com/user/repo/issues/ISSUE");
|
|
345
|
+
expect(result.text).toBe("See https://github.com/user/repo/issues/ISSUE");
|
|
346
346
|
});
|
|
347
347
|
|
|
348
348
|
it("should handle multiline content", () => {
|
|
@@ -350,7 +350,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
350
350
|
|
|
351
351
|
const result = expandHashtags("Start\n#multiline\nEnd", registry);
|
|
352
352
|
|
|
353
|
-
expect(result).toBe("Start\nLine 1\nLine 2\nLine 3\nEnd");
|
|
353
|
+
expect(result.text).toBe("Start\nLine 1\nLine 2\nLine 3\nEnd");
|
|
354
354
|
});
|
|
355
355
|
|
|
356
356
|
it("should handle nested multiline content", () => {
|
|
@@ -361,7 +361,7 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
361
361
|
|
|
362
362
|
const result = expandHashtags("#outer", registry);
|
|
363
363
|
|
|
364
|
-
expect(result).toBe("Outer start\nInner line 1\nInner line 2\nOuter end");
|
|
364
|
+
expect(result.text).toBe("Outer start\nInner line 1\nInner line 2\nOuter end");
|
|
365
365
|
});
|
|
366
366
|
});
|
|
367
367
|
|
|
@@ -376,10 +376,10 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
376
376
|
|
|
377
377
|
const result = expandHashtags("#review", registry);
|
|
378
378
|
|
|
379
|
-
expect(result).toContain("Code Review Checklist:");
|
|
380
|
-
expect(result).toContain("Check for SQL injection");
|
|
381
|
-
expect(result).toContain("Check for N+1 queries");
|
|
382
|
-
expect(result).toContain("Unit tests present");
|
|
379
|
+
expect(result.text).toContain("Code Review Checklist:");
|
|
380
|
+
expect(result.text).toContain("Check for SQL injection");
|
|
381
|
+
expect(result.text).toContain("Check for N+1 queries");
|
|
382
|
+
expect(result.text).toContain("Unit tests present");
|
|
383
383
|
});
|
|
384
384
|
|
|
385
385
|
it("should expand documentation template with shared components", () => {
|
|
@@ -394,8 +394,8 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
394
394
|
const result = expandHashtags("#doc", registry);
|
|
395
395
|
|
|
396
396
|
// #author should be expanded in both header and footer
|
|
397
|
-
expect(result).toContain("Author: John Doe");
|
|
398
|
-
expect(result).toContain("Contact: John Doe");
|
|
397
|
+
expect(result.text).toContain("Author: John Doe");
|
|
398
|
+
expect(result.text).toContain("Contact: John Doe");
|
|
399
399
|
});
|
|
400
400
|
|
|
401
401
|
it("should handle instruction composition", () => {
|
|
@@ -407,7 +407,9 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
407
407
|
|
|
408
408
|
const result = expandHashtags("Instructions: #complete", registry);
|
|
409
409
|
|
|
410
|
-
expect(result).toBe(
|
|
410
|
+
expect(result.text).toBe(
|
|
411
|
+
"Instructions: Be thorough. Think step by step. Double-check your work.",
|
|
412
|
+
);
|
|
411
413
|
});
|
|
412
414
|
});
|
|
413
415
|
|
|
@@ -424,9 +426,9 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
424
426
|
|
|
425
427
|
const result = expandHashtags("#level0", registry);
|
|
426
428
|
|
|
427
|
-
expect(result).toContain("L0");
|
|
428
|
-
expect(result).toContain("End");
|
|
429
|
-
expect(result.split(" ").length).toBe(depth);
|
|
429
|
+
expect(result.text).toContain("L0");
|
|
430
|
+
expect(result.text).toContain("End");
|
|
431
|
+
expect(result.text.split(" ").length).toBe(depth);
|
|
430
432
|
});
|
|
431
433
|
|
|
432
434
|
it("should handle many snippets in one text", () => {
|
|
@@ -440,9 +442,9 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
440
442
|
const hashtags = Array.from({ length: count }, (_, i) => `#snippet${i}`).join(" ");
|
|
441
443
|
const result = expandHashtags(hashtags, registry);
|
|
442
444
|
|
|
443
|
-
expect(result.split(" ").length).toBe(count);
|
|
444
|
-
expect(result).toContain("Content0");
|
|
445
|
-
expect(result).toContain(`Content${count - 1}`);
|
|
445
|
+
expect(result.text.split(" ").length).toBe(count);
|
|
446
|
+
expect(result.text).toContain("Content0");
|
|
447
|
+
expect(result.text).toContain(`Content${count - 1}`);
|
|
446
448
|
});
|
|
447
449
|
|
|
448
450
|
it("should handle wide branching (many children)", () => {
|
|
@@ -459,8 +461,378 @@ describe("expandHashtags - Recursive Includes and Loop Detection", () => {
|
|
|
459
461
|
const result = expandHashtags("#parent", registry);
|
|
460
462
|
|
|
461
463
|
for (let i = 0; i < branches; i++) {
|
|
462
|
-
expect(result).toContain(`Child${i}`);
|
|
464
|
+
expect(result.text).toContain(`Child${i}`);
|
|
463
465
|
}
|
|
464
466
|
});
|
|
465
467
|
});
|
|
466
468
|
});
|
|
469
|
+
|
|
470
|
+
describe("parseSnippetBlocks", () => {
|
|
471
|
+
describe("Basic parsing", () => {
|
|
472
|
+
it("should return full content as inline when no blocks present", () => {
|
|
473
|
+
const result = parseSnippetBlocks("Just some content");
|
|
474
|
+
|
|
475
|
+
expect(result).toEqual({
|
|
476
|
+
inline: "Just some content",
|
|
477
|
+
prepend: [],
|
|
478
|
+
append: [],
|
|
479
|
+
});
|
|
480
|
+
});
|
|
481
|
+
|
|
482
|
+
it("should extract append block and inline content", () => {
|
|
483
|
+
const result = parseSnippetBlocks("Inline text\n<append>\nAppend content\n</append>");
|
|
484
|
+
|
|
485
|
+
expect(result).toEqual({
|
|
486
|
+
inline: "Inline text",
|
|
487
|
+
prepend: [],
|
|
488
|
+
append: ["Append content"],
|
|
489
|
+
});
|
|
490
|
+
});
|
|
491
|
+
|
|
492
|
+
it("should extract prepend block and inline content", () => {
|
|
493
|
+
const result = parseSnippetBlocks("<prepend>\nPrepend content\n</prepend>\nInline text");
|
|
494
|
+
|
|
495
|
+
expect(result).toEqual({
|
|
496
|
+
inline: "Inline text",
|
|
497
|
+
prepend: ["Prepend content"],
|
|
498
|
+
append: [],
|
|
499
|
+
});
|
|
500
|
+
});
|
|
501
|
+
|
|
502
|
+
it("should extract both prepend and append blocks", () => {
|
|
503
|
+
const content = `<prepend>
|
|
504
|
+
Before content
|
|
505
|
+
</prepend>
|
|
506
|
+
Inline text
|
|
507
|
+
<append>
|
|
508
|
+
After content
|
|
509
|
+
</append>`;
|
|
510
|
+
|
|
511
|
+
const result = parseSnippetBlocks(content);
|
|
512
|
+
|
|
513
|
+
expect(result).toEqual({
|
|
514
|
+
inline: "Inline text",
|
|
515
|
+
prepend: ["Before content"],
|
|
516
|
+
append: ["After content"],
|
|
517
|
+
});
|
|
518
|
+
});
|
|
519
|
+
|
|
520
|
+
it("should handle multiple blocks of the same type", () => {
|
|
521
|
+
const content = `<append>
|
|
522
|
+
First append
|
|
523
|
+
</append>
|
|
524
|
+
Inline
|
|
525
|
+
<append>
|
|
526
|
+
Second append
|
|
527
|
+
</append>`;
|
|
528
|
+
|
|
529
|
+
const result = parseSnippetBlocks(content);
|
|
530
|
+
|
|
531
|
+
expect(result).toEqual({
|
|
532
|
+
inline: "Inline",
|
|
533
|
+
prepend: [],
|
|
534
|
+
append: ["First append", "Second append"],
|
|
535
|
+
});
|
|
536
|
+
});
|
|
537
|
+
});
|
|
538
|
+
|
|
539
|
+
describe("Edge cases", () => {
|
|
540
|
+
it("should handle empty inline (only blocks)", () => {
|
|
541
|
+
const content = `<append>
|
|
542
|
+
Only append content
|
|
543
|
+
</append>`;
|
|
544
|
+
|
|
545
|
+
const result = parseSnippetBlocks(content);
|
|
546
|
+
|
|
547
|
+
expect(result).toEqual({
|
|
548
|
+
inline: "",
|
|
549
|
+
prepend: [],
|
|
550
|
+
append: ["Only append content"],
|
|
551
|
+
});
|
|
552
|
+
});
|
|
553
|
+
|
|
554
|
+
it("should handle unclosed tag leniently (rest is block content)", () => {
|
|
555
|
+
const content = "Inline\n<append>\nUnclosed append content";
|
|
556
|
+
|
|
557
|
+
const result = parseSnippetBlocks(content);
|
|
558
|
+
|
|
559
|
+
expect(result).toEqual({
|
|
560
|
+
inline: "Inline",
|
|
561
|
+
prepend: [],
|
|
562
|
+
append: ["Unclosed append content"],
|
|
563
|
+
});
|
|
564
|
+
});
|
|
565
|
+
|
|
566
|
+
it("should return null for nested tags", () => {
|
|
567
|
+
const content = "<append>\n<prepend>\nnested\n</prepend>\n</append>";
|
|
568
|
+
|
|
569
|
+
const result = parseSnippetBlocks(content);
|
|
570
|
+
|
|
571
|
+
expect(result).toBeNull();
|
|
572
|
+
});
|
|
573
|
+
|
|
574
|
+
it("should trim content inside blocks", () => {
|
|
575
|
+
const content = "<append>\n \n Content with whitespace \n \n</append>";
|
|
576
|
+
|
|
577
|
+
const result = parseSnippetBlocks(content);
|
|
578
|
+
|
|
579
|
+
expect(result?.append[0]).toBe("Content with whitespace");
|
|
580
|
+
});
|
|
581
|
+
|
|
582
|
+
it("should trim inline content", () => {
|
|
583
|
+
const content = " \n Inline with whitespace \n ";
|
|
584
|
+
|
|
585
|
+
const result = parseSnippetBlocks(content);
|
|
586
|
+
|
|
587
|
+
expect(result?.inline).toBe("Inline with whitespace");
|
|
588
|
+
});
|
|
589
|
+
|
|
590
|
+
it("should be case-insensitive for tags", () => {
|
|
591
|
+
const content = "<APPEND>\nContent\n</APPEND>";
|
|
592
|
+
|
|
593
|
+
const result = parseSnippetBlocks(content);
|
|
594
|
+
|
|
595
|
+
expect(result).toEqual({
|
|
596
|
+
inline: "",
|
|
597
|
+
prepend: [],
|
|
598
|
+
append: ["Content"],
|
|
599
|
+
});
|
|
600
|
+
});
|
|
601
|
+
|
|
602
|
+
it("should handle empty blocks", () => {
|
|
603
|
+
const content = "Inline<append></append>";
|
|
604
|
+
|
|
605
|
+
const result = parseSnippetBlocks(content);
|
|
606
|
+
|
|
607
|
+
expect(result).toEqual({
|
|
608
|
+
inline: "Inline",
|
|
609
|
+
prepend: [],
|
|
610
|
+
append: [],
|
|
611
|
+
});
|
|
612
|
+
});
|
|
613
|
+
});
|
|
614
|
+
|
|
615
|
+
describe("Real-world content", () => {
|
|
616
|
+
it("should parse Jira MCP example", () => {
|
|
617
|
+
const content = `Jira MCP server
|
|
618
|
+
<append>
|
|
619
|
+
## Jira MCP Usage
|
|
620
|
+
|
|
621
|
+
Use these custom field mappings when creating issues:
|
|
622
|
+
- customfield_16570 => Acceptance Criteria
|
|
623
|
+
- customfield_11401 => Team
|
|
624
|
+
</append>`;
|
|
625
|
+
|
|
626
|
+
const result = parseSnippetBlocks(content);
|
|
627
|
+
|
|
628
|
+
expect(result?.inline).toBe("Jira MCP server");
|
|
629
|
+
expect(result?.append).toHaveLength(1);
|
|
630
|
+
expect(result?.append[0]).toContain("Jira MCP Usage");
|
|
631
|
+
expect(result?.append[0]).toContain("customfield_16570");
|
|
632
|
+
});
|
|
633
|
+
});
|
|
634
|
+
});
|
|
635
|
+
|
|
636
|
+
describe("assembleMessage", () => {
|
|
637
|
+
it("should assemble text only", () => {
|
|
638
|
+
const result = assembleMessage({
|
|
639
|
+
text: "Main content",
|
|
640
|
+
prepend: [],
|
|
641
|
+
append: [],
|
|
642
|
+
});
|
|
643
|
+
|
|
644
|
+
expect(result).toBe("Main content");
|
|
645
|
+
});
|
|
646
|
+
|
|
647
|
+
it("should assemble with append blocks", () => {
|
|
648
|
+
const result = assembleMessage({
|
|
649
|
+
text: "Main content",
|
|
650
|
+
prepend: [],
|
|
651
|
+
append: ["Appended section"],
|
|
652
|
+
});
|
|
653
|
+
|
|
654
|
+
expect(result).toBe("Main content\n\nAppended section");
|
|
655
|
+
});
|
|
656
|
+
|
|
657
|
+
it("should assemble with prepend blocks", () => {
|
|
658
|
+
const result = assembleMessage({
|
|
659
|
+
text: "Main content",
|
|
660
|
+
prepend: ["Prepended section"],
|
|
661
|
+
append: [],
|
|
662
|
+
});
|
|
663
|
+
|
|
664
|
+
expect(result).toBe("Prepended section\n\nMain content");
|
|
665
|
+
});
|
|
666
|
+
|
|
667
|
+
it("should assemble with both prepend and append", () => {
|
|
668
|
+
const result = assembleMessage({
|
|
669
|
+
text: "Main content",
|
|
670
|
+
prepend: ["Before"],
|
|
671
|
+
append: ["After"],
|
|
672
|
+
});
|
|
673
|
+
|
|
674
|
+
expect(result).toBe("Before\n\nMain content\n\nAfter");
|
|
675
|
+
});
|
|
676
|
+
|
|
677
|
+
it("should join multiple prepend blocks", () => {
|
|
678
|
+
const result = assembleMessage({
|
|
679
|
+
text: "Main",
|
|
680
|
+
prepend: ["First", "Second"],
|
|
681
|
+
append: [],
|
|
682
|
+
});
|
|
683
|
+
|
|
684
|
+
expect(result).toBe("First\n\nSecond\n\nMain");
|
|
685
|
+
});
|
|
686
|
+
|
|
687
|
+
it("should join multiple append blocks", () => {
|
|
688
|
+
const result = assembleMessage({
|
|
689
|
+
text: "Main",
|
|
690
|
+
prepend: [],
|
|
691
|
+
append: ["First", "Second"],
|
|
692
|
+
});
|
|
693
|
+
|
|
694
|
+
expect(result).toBe("Main\n\nFirst\n\nSecond");
|
|
695
|
+
});
|
|
696
|
+
|
|
697
|
+
it("should handle empty text with blocks", () => {
|
|
698
|
+
const result = assembleMessage({
|
|
699
|
+
text: "",
|
|
700
|
+
prepend: ["Before"],
|
|
701
|
+
append: ["After"],
|
|
702
|
+
});
|
|
703
|
+
|
|
704
|
+
expect(result).toBe("Before\n\nAfter");
|
|
705
|
+
});
|
|
706
|
+
|
|
707
|
+
it("should handle whitespace-only text with blocks", () => {
|
|
708
|
+
const result = assembleMessage({
|
|
709
|
+
text: " ",
|
|
710
|
+
prepend: ["Before"],
|
|
711
|
+
append: ["After"],
|
|
712
|
+
});
|
|
713
|
+
|
|
714
|
+
expect(result).toBe("Before\n\nAfter");
|
|
715
|
+
});
|
|
716
|
+
});
|
|
717
|
+
|
|
718
|
+
describe("Prepend/Append integration with expandHashtags", () => {
|
|
719
|
+
it("should collect append blocks during expansion", () => {
|
|
720
|
+
const registry = createRegistry([
|
|
721
|
+
["jira", "Jira MCP server\n<append>\nJira reference docs\n</append>"],
|
|
722
|
+
]);
|
|
723
|
+
|
|
724
|
+
const result = expandHashtags("Create a ticket in #jira", registry);
|
|
725
|
+
|
|
726
|
+
expect(result.text).toBe("Create a ticket in Jira MCP server");
|
|
727
|
+
expect(result.append).toEqual(["Jira reference docs"]);
|
|
728
|
+
});
|
|
729
|
+
|
|
730
|
+
it("should collect prepend blocks during expansion", () => {
|
|
731
|
+
const registry = createRegistry([
|
|
732
|
+
["context", "<prepend>\nImportant context\n</prepend>\nUse the context"],
|
|
733
|
+
]);
|
|
734
|
+
|
|
735
|
+
const result = expandHashtags("#context please", registry);
|
|
736
|
+
|
|
737
|
+
expect(result.text).toBe("Use the context please");
|
|
738
|
+
expect(result.prepend).toEqual(["Important context"]);
|
|
739
|
+
});
|
|
740
|
+
|
|
741
|
+
it("should collect blocks from nested snippets", () => {
|
|
742
|
+
const registry = createRegistry([
|
|
743
|
+
["outer", "Outer #inner text"],
|
|
744
|
+
["inner", "Inner\n<append>\nInner's append\n</append>"],
|
|
745
|
+
]);
|
|
746
|
+
|
|
747
|
+
const result = expandHashtags("#outer", registry);
|
|
748
|
+
|
|
749
|
+
expect(result.text).toBe("Outer Inner text");
|
|
750
|
+
expect(result.append).toEqual(["Inner's append"]);
|
|
751
|
+
});
|
|
752
|
+
|
|
753
|
+
it("should collect blocks from multiple snippets", () => {
|
|
754
|
+
const registry = createRegistry([
|
|
755
|
+
["a", "A text\n<append>\nA's append\n</append>"],
|
|
756
|
+
["b", "B text\n<append>\nB's append\n</append>"],
|
|
757
|
+
]);
|
|
758
|
+
|
|
759
|
+
const result = expandHashtags("#a and #b", registry);
|
|
760
|
+
|
|
761
|
+
expect(result.text).toBe("A text and B text");
|
|
762
|
+
expect(result.append).toEqual(["A's append", "B's append"]);
|
|
763
|
+
});
|
|
764
|
+
|
|
765
|
+
it("should handle empty inline with only blocks", () => {
|
|
766
|
+
const registry = createRegistry([["ref", "<append>\nReference material\n</append>"]]);
|
|
767
|
+
|
|
768
|
+
const result = expandHashtags("Use #ref here", registry);
|
|
769
|
+
|
|
770
|
+
expect(result.text).toBe("Use here");
|
|
771
|
+
expect(result.append).toEqual(["Reference material"]);
|
|
772
|
+
});
|
|
773
|
+
|
|
774
|
+
it("should assemble full message correctly", () => {
|
|
775
|
+
const registry = createRegistry([
|
|
776
|
+
["jira", "Jira MCP server\n<append>\n## Jira Usage\n- Field mappings here\n</append>"],
|
|
777
|
+
]);
|
|
778
|
+
|
|
779
|
+
const result = expandHashtags("Create a bug ticket in #jira about the memory leak", registry);
|
|
780
|
+
const assembled = assembleMessage(result);
|
|
781
|
+
|
|
782
|
+
expect(assembled).toBe(
|
|
783
|
+
"Create a bug ticket in Jira MCP server about the memory leak\n\n## Jira Usage\n- Field mappings here",
|
|
784
|
+
);
|
|
785
|
+
});
|
|
786
|
+
|
|
787
|
+
it("should collect multiple append blocks from single snippet", () => {
|
|
788
|
+
const registry = createRegistry([
|
|
789
|
+
["multi", "Inline\n<append>\nFirst append\n</append>\n<append>\nSecond append\n</append>"],
|
|
790
|
+
]);
|
|
791
|
+
|
|
792
|
+
const result = expandHashtags("#multi", registry);
|
|
793
|
+
|
|
794
|
+
expect(result.text).toBe("Inline");
|
|
795
|
+
expect(result.append).toEqual(["First append", "Second append"]);
|
|
796
|
+
});
|
|
797
|
+
|
|
798
|
+
it("should collect multiple prepend blocks from single snippet", () => {
|
|
799
|
+
const registry = createRegistry([
|
|
800
|
+
[
|
|
801
|
+
"multi",
|
|
802
|
+
"<prepend>\nFirst prepend\n</prepend>\n<prepend>\nSecond prepend\n</prepend>\nInline",
|
|
803
|
+
],
|
|
804
|
+
]);
|
|
805
|
+
|
|
806
|
+
const result = expandHashtags("#multi", registry);
|
|
807
|
+
|
|
808
|
+
expect(result.text).toBe("Inline");
|
|
809
|
+
expect(result.prepend).toEqual(["First prepend", "Second prepend"]);
|
|
810
|
+
});
|
|
811
|
+
|
|
812
|
+
it("should assemble multiple prepends and appends in correct order", () => {
|
|
813
|
+
const registry = createRegistry([
|
|
814
|
+
["a", "<prepend>\nA prepend\n</prepend>\nA inline\n<append>\nA append\n</append>"],
|
|
815
|
+
["b", "<prepend>\nB prepend\n</prepend>\nB inline\n<append>\nB append\n</append>"],
|
|
816
|
+
]);
|
|
817
|
+
|
|
818
|
+
const result = expandHashtags("#a then #b", registry);
|
|
819
|
+
const assembled = assembleMessage(result);
|
|
820
|
+
|
|
821
|
+
// Prepends first (in order), then inline, then appends (in order)
|
|
822
|
+
expect(assembled).toBe(
|
|
823
|
+
"A prepend\n\nB prepend\n\nA inline then B inline\n\nA append\n\nB append",
|
|
824
|
+
);
|
|
825
|
+
});
|
|
826
|
+
|
|
827
|
+
it("should handle mix of snippets with and without blocks", () => {
|
|
828
|
+
const registry = createRegistry([
|
|
829
|
+
["plain", "Plain content"],
|
|
830
|
+
["withblocks", "Block inline\n<append>\nBlock append\n</append>"],
|
|
831
|
+
]);
|
|
832
|
+
|
|
833
|
+
const result = expandHashtags("#plain and #withblocks", registry);
|
|
834
|
+
const assembled = assembleMessage(result);
|
|
835
|
+
|
|
836
|
+
expect(assembled).toBe("Plain content and Block inline\n\nBlock append");
|
|
837
|
+
});
|
|
838
|
+
});
|
package/src/expander.ts
CHANGED
|
@@ -1,25 +1,125 @@
|
|
|
1
1
|
import { PATTERNS } from "./constants.js";
|
|
2
2
|
import { logger } from "./logger.js";
|
|
3
|
-
import type { SnippetRegistry } from "./types.js";
|
|
3
|
+
import type { ExpansionResult, ParsedSnippetContent, SnippetRegistry } from "./types.js";
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
6
|
* Maximum number of times a snippet can be expanded to prevent infinite loops
|
|
7
7
|
*/
|
|
8
8
|
const MAX_EXPANSION_COUNT = 15;
|
|
9
9
|
|
|
10
|
+
/**
|
|
11
|
+
* Tag types for parsing
|
|
12
|
+
*/
|
|
13
|
+
type BlockType = "prepend" | "append";
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Parses snippet content to extract inline text and prepend/append blocks
|
|
17
|
+
*
|
|
18
|
+
* Uses a lenient stack-based parser:
|
|
19
|
+
* - Unclosed tags → treat rest of content as block
|
|
20
|
+
* - Nesting → log error, return null (skip expansion)
|
|
21
|
+
* - Multiple blocks → collected in document order
|
|
22
|
+
*
|
|
23
|
+
* @param content - The raw snippet content to parse
|
|
24
|
+
* @returns Parsed content with inline, prepend, and append parts, or null on error
|
|
25
|
+
*/
|
|
26
|
+
export function parseSnippetBlocks(content: string): ParsedSnippetContent | null {
|
|
27
|
+
const prepend: string[] = [];
|
|
28
|
+
const append: string[] = [];
|
|
29
|
+
let inline = "";
|
|
30
|
+
|
|
31
|
+
// Regex to find opening and closing tags
|
|
32
|
+
const tagPattern = /<(\/?)(?<tagName>prepend|append)>/gi;
|
|
33
|
+
let lastIndex = 0;
|
|
34
|
+
let currentBlock: { type: BlockType; startIndex: number; contentStart: number } | null = null;
|
|
35
|
+
|
|
36
|
+
let match = tagPattern.exec(content);
|
|
37
|
+
while (match !== null) {
|
|
38
|
+
const isClosing = match[1] === "/";
|
|
39
|
+
const tagName = match.groups?.tagName?.toLowerCase() as BlockType;
|
|
40
|
+
const tagStart = match.index;
|
|
41
|
+
const tagEnd = tagStart + match[0].length;
|
|
42
|
+
|
|
43
|
+
if (isClosing) {
|
|
44
|
+
// Closing tag
|
|
45
|
+
if (currentBlock === null) {
|
|
46
|
+
// Closing tag without opening - ignore it, treat as inline content
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
if (currentBlock.type !== tagName) {
|
|
50
|
+
// Mismatched closing tag - this is a nesting error
|
|
51
|
+
logger.warn(
|
|
52
|
+
`Mismatched closing tag: expected </${currentBlock.type}>, found </${tagName}>`,
|
|
53
|
+
);
|
|
54
|
+
return null;
|
|
55
|
+
}
|
|
56
|
+
// Extract block content
|
|
57
|
+
const blockContent = content.slice(currentBlock.contentStart, tagStart).trim();
|
|
58
|
+
if (blockContent) {
|
|
59
|
+
if (currentBlock.type === "prepend") {
|
|
60
|
+
prepend.push(blockContent);
|
|
61
|
+
} else {
|
|
62
|
+
append.push(blockContent);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
lastIndex = tagEnd;
|
|
66
|
+
currentBlock = null;
|
|
67
|
+
} else {
|
|
68
|
+
// Opening tag
|
|
69
|
+
if (currentBlock !== null) {
|
|
70
|
+
// Nested opening tag - error
|
|
71
|
+
logger.warn(`Nested tags not allowed: found <${tagName}> inside <${currentBlock.type}>`);
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
// Add any inline content before this tag
|
|
75
|
+
const inlinePart = content.slice(lastIndex, tagStart);
|
|
76
|
+
inline += inlinePart;
|
|
77
|
+
currentBlock = { type: tagName, startIndex: tagStart, contentStart: tagEnd };
|
|
78
|
+
}
|
|
79
|
+
match = tagPattern.exec(content);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Handle unclosed tag (lenient: treat rest as block content)
|
|
83
|
+
if (currentBlock !== null) {
|
|
84
|
+
const blockContent = content.slice(currentBlock.contentStart).trim();
|
|
85
|
+
if (blockContent) {
|
|
86
|
+
if (currentBlock.type === "prepend") {
|
|
87
|
+
prepend.push(blockContent);
|
|
88
|
+
} else {
|
|
89
|
+
append.push(blockContent);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
} else {
|
|
93
|
+
// Add any remaining inline content
|
|
94
|
+
inline += content.slice(lastIndex);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return {
|
|
98
|
+
inline: inline.trim(),
|
|
99
|
+
prepend,
|
|
100
|
+
append,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
|
|
10
104
|
/**
|
|
11
105
|
* Expands hashtags in text recursively with loop detection
|
|
12
106
|
*
|
|
107
|
+
* Returns an ExpansionResult containing the inline-expanded text plus
|
|
108
|
+
* collected prepend/append blocks from all expanded snippets.
|
|
109
|
+
*
|
|
13
110
|
* @param text - The text containing hashtags to expand
|
|
14
111
|
* @param registry - The snippet registry to look up hashtags
|
|
15
112
|
* @param expansionCounts - Map tracking how many times each snippet has been expanded
|
|
16
|
-
* @returns
|
|
113
|
+
* @returns ExpansionResult with text and collected blocks
|
|
17
114
|
*/
|
|
18
115
|
export function expandHashtags(
|
|
19
116
|
text: string,
|
|
20
117
|
registry: SnippetRegistry,
|
|
21
118
|
expansionCounts = new Map<string, number>(),
|
|
22
|
-
):
|
|
119
|
+
): ExpansionResult {
|
|
120
|
+
const collectedPrepend: string[] = [];
|
|
121
|
+
const collectedAppend: string[] = [];
|
|
122
|
+
|
|
23
123
|
let expanded = text;
|
|
24
124
|
let hasChanges = true;
|
|
25
125
|
|
|
@@ -31,6 +131,10 @@ export function expandHashtags(
|
|
|
31
131
|
// Reset regex state (global flag requires this)
|
|
32
132
|
PATTERNS.HASHTAG.lastIndex = 0;
|
|
33
133
|
|
|
134
|
+
// We need to collect blocks during replacement, so we track them here
|
|
135
|
+
const roundPrepend: string[] = [];
|
|
136
|
+
const roundAppend: string[] = [];
|
|
137
|
+
|
|
34
138
|
expanded = expanded.replace(PATTERNS.HASHTAG, (match, name) => {
|
|
35
139
|
const key = name.toLowerCase();
|
|
36
140
|
|
|
@@ -53,15 +157,69 @@ export function expandHashtags(
|
|
|
53
157
|
|
|
54
158
|
expansionCounts.set(key, count);
|
|
55
159
|
|
|
56
|
-
//
|
|
57
|
-
const
|
|
160
|
+
// Parse the snippet content for blocks
|
|
161
|
+
const parsed = parseSnippetBlocks(snippet.content);
|
|
162
|
+
if (parsed === null) {
|
|
163
|
+
// Parse error - leave hashtag unchanged
|
|
164
|
+
logger.warn(`Failed to parse snippet '${key}', leaving hashtag unchanged`);
|
|
165
|
+
return match;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Collect prepend/append blocks
|
|
169
|
+
roundPrepend.push(...parsed.prepend);
|
|
170
|
+
roundAppend.push(...parsed.append);
|
|
58
171
|
|
|
59
|
-
|
|
172
|
+
// Recursively expand any hashtags in the inline content
|
|
173
|
+
const nestedResult = expandHashtags(parsed.inline, registry, expansionCounts);
|
|
174
|
+
|
|
175
|
+
// Collect blocks from nested expansion
|
|
176
|
+
roundPrepend.push(...nestedResult.prepend);
|
|
177
|
+
roundAppend.push(...nestedResult.append);
|
|
178
|
+
|
|
179
|
+
return nestedResult.text;
|
|
60
180
|
});
|
|
61
181
|
|
|
182
|
+
// Add this round's blocks to collected blocks
|
|
183
|
+
collectedPrepend.push(...roundPrepend);
|
|
184
|
+
collectedAppend.push(...roundAppend);
|
|
185
|
+
|
|
62
186
|
// Only continue if the text actually changed AND no loop was detected
|
|
63
187
|
hasChanges = expanded !== previous && !loopDetected;
|
|
64
188
|
}
|
|
65
189
|
|
|
66
|
-
return
|
|
190
|
+
return {
|
|
191
|
+
text: expanded,
|
|
192
|
+
prepend: collectedPrepend,
|
|
193
|
+
append: collectedAppend,
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Assembles the final message from an expansion result
|
|
199
|
+
*
|
|
200
|
+
* Joins: prepend blocks + inline text + append blocks
|
|
201
|
+
* with double newlines between non-empty sections.
|
|
202
|
+
*
|
|
203
|
+
* @param result - The expansion result to assemble
|
|
204
|
+
* @returns The final assembled message
|
|
205
|
+
*/
|
|
206
|
+
export function assembleMessage(result: ExpansionResult): string {
|
|
207
|
+
const parts: string[] = [];
|
|
208
|
+
|
|
209
|
+
// Add prepend blocks
|
|
210
|
+
if (result.prepend.length > 0) {
|
|
211
|
+
parts.push(result.prepend.join("\n\n"));
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Add main text
|
|
215
|
+
if (result.text.trim()) {
|
|
216
|
+
parts.push(result.text);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// Add append blocks
|
|
220
|
+
if (result.append.length > 0) {
|
|
221
|
+
parts.push(result.append.join("\n\n"));
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
return parts.join("\n\n");
|
|
67
225
|
}
|
package/src/types.ts
CHANGED
|
@@ -36,3 +36,27 @@ export interface SnippetFrontmatter {
|
|
|
36
36
|
/** Optional description of what this snippet does */
|
|
37
37
|
description?: string;
|
|
38
38
|
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Parsed snippet content with inline text and prepend/append blocks
|
|
42
|
+
*/
|
|
43
|
+
export interface ParsedSnippetContent {
|
|
44
|
+
/** Content outside blocks (replaces hashtag inline) */
|
|
45
|
+
inline: string;
|
|
46
|
+
/** <prepend> block contents in document order */
|
|
47
|
+
prepend: string[];
|
|
48
|
+
/** <append> block contents in document order */
|
|
49
|
+
append: string[];
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Result of expanding hashtags, including collected prepend/append blocks
|
|
54
|
+
*/
|
|
55
|
+
export interface ExpansionResult {
|
|
56
|
+
/** The inline-expanded text */
|
|
57
|
+
text: string;
|
|
58
|
+
/** Collected prepend blocks from all expanded snippets */
|
|
59
|
+
prepend: string[];
|
|
60
|
+
/** Collected append blocks from all expanded snippets */
|
|
61
|
+
append: string[];
|
|
62
|
+
}
|