@savvy-web/changesets 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/234.js ADDED
@@ -0,0 +1,337 @@
1
+ import { lintRule } from "unified-lint-rule";
2
+ import { SKIP, visit } from "unist-util-visit";
3
+ import { external_mdast_util_to_string_toString } from "./689.js";
4
+ import { isValidHeading, fromHeading, allHeadings } from "./60.js";
5
+ const ContentStructureRule = lintRule("remark-lint:changeset-content-structure", (tree, file)=>{
6
+ visit(tree, "heading", (node, index, parent)=>{
7
+ if (2 !== node.depth || null == parent || null == index) return;
8
+ const next = parent.children[index + 1];
9
+ if (!next || "heading" === next.type && 2 === next.depth) file.message("Empty section: heading has no content before the next section or end of file", node);
10
+ });
11
+ visit(tree, "code", (node)=>{
12
+ if (!node.lang) file.message("Code block is missing a language identifier", node);
13
+ });
14
+ visit(tree, "listItem", (node)=>{
15
+ const text = external_mdast_util_to_string_toString(node).trim();
16
+ if (!text) file.message("Empty list item", node);
17
+ });
18
+ });
19
+ const HeadingHierarchyRule = lintRule("remark-lint:changeset-heading-hierarchy", (tree, file)=>{
20
+ let prevDepth = 0;
21
+ visit(tree, "heading", (node)=>{
22
+ if (1 === node.depth) return void file.message("h1 headings are not allowed in changeset files", node);
23
+ if (prevDepth > 0 && node.depth > prevDepth + 1) file.message(`Heading level skipped: expected h${prevDepth + 1} or lower, found h${node.depth}`, node);
24
+ prevDepth = node.depth;
25
+ });
26
+ });
27
+ const RequiredSectionsRule = lintRule("remark-lint:changeset-required-sections", (tree, file)=>{
28
+ visit(tree, "heading", (node)=>{
29
+ if (2 !== node.depth) return;
30
+ const text = external_mdast_util_to_string_toString(node);
31
+ if (!isValidHeading(text)) file.message(`Unknown section "${text}". Valid sections: ${allHeadings().join(", ")}`, node);
32
+ });
33
+ });
34
+ function getVersionBlocks(tree) {
35
+ const blocks = [];
36
+ for(let i = 0; i < tree.children.length; i++){
37
+ const node = tree.children[i];
38
+ if ("heading" === node.type && 2 === node.depth) blocks.push({
39
+ headingIndex: i,
40
+ startIndex: i + 1,
41
+ endIndex: tree.children.length
42
+ });
43
+ }
44
+ for(let i = 0; i < blocks.length - 1; i++)blocks[i].endIndex = blocks[i + 1].headingIndex;
45
+ return blocks;
46
+ }
47
+ function getBlockSections(tree, block) {
48
+ const sections = [];
49
+ for(let i = block.startIndex; i < block.endIndex; i++){
50
+ const node = tree.children[i];
51
+ if ("heading" === node.type && 3 === node.depth) sections.push({
52
+ heading: node,
53
+ headingIndex: i,
54
+ contentNodes: []
55
+ });
56
+ else if (sections.length > 0) sections[sections.length - 1].contentNodes.push(node);
57
+ }
58
+ return sections;
59
+ }
60
+ function getHeadingText(heading) {
61
+ return external_mdast_util_to_string_toString(heading);
62
+ }
63
+ const ATTRIBUTION_PLAIN_RE = /\s*Thanks @(\w[\w-]*)!$/;
64
+ function extractLinkedAttribution(children) {
65
+ if (children.length < 3) return;
66
+ const last = children[children.length - 1];
67
+ const secondLast = children[children.length - 2];
68
+ const thirdLast = children[children.length - 3];
69
+ if ("text" !== last.type || "!" !== last.value) return;
70
+ if ("link" !== secondLast.type) return;
71
+ const linkNode = secondLast;
72
+ if (1 !== linkNode.children.length || "text" !== linkNode.children[0].type) return;
73
+ const linkText = linkNode.children[0].value;
74
+ if (!linkText.startsWith("@")) return;
75
+ const username = linkText.slice(1);
76
+ if ("text" !== thirdLast.type) return;
77
+ const textNode = thirdLast;
78
+ if (!/\s*Thanks $/.test(textNode.value)) return;
79
+ return {
80
+ contributor: {
81
+ username,
82
+ url: linkNode.url
83
+ },
84
+ removeFrom: children.length - 3
85
+ };
86
+ }
87
+ const ContributorFootnotesPlugin = ()=>(tree)=>{
88
+ const blocks = getVersionBlocks(tree);
89
+ for(let b = blocks.length - 1; b >= 0; b--){
90
+ const block = blocks[b];
91
+ const contributors = new Map();
92
+ for(let i = block.startIndex; i < block.endIndex; i++){
93
+ const node = tree.children[i];
94
+ if ("list" === node.type) visit(node, "paragraph", (para)=>{
95
+ const linked = extractLinkedAttribution(para.children);
96
+ if (linked) {
97
+ const key = linked.contributor.username.toLowerCase();
98
+ if (!contributors.has(key)) contributors.set(key, linked.contributor);
99
+ const textNode = para.children[linked.removeFrom];
100
+ textNode.value = textNode.value.replace(/\s*Thanks $/, "");
101
+ para.children.splice(linked.removeFrom + 1, 2);
102
+ if ("" === textNode.value) para.children.splice(linked.removeFrom, 1);
103
+ return;
104
+ }
105
+ const last = para.children[para.children.length - 1];
106
+ if (last?.type === "text") {
107
+ const textNode = last;
108
+ const match = textNode.value.match(ATTRIBUTION_PLAIN_RE);
109
+ if (match) {
110
+ const username = match[1];
111
+ const key = username.toLowerCase();
112
+ if (!contributors.has(key)) contributors.set(key, {
113
+ username,
114
+ url: void 0
115
+ });
116
+ textNode.value = textNode.value.replace(ATTRIBUTION_PLAIN_RE, "");
117
+ }
118
+ }
119
+ });
120
+ }
121
+ if (0 === contributors.size) continue;
122
+ const sorted = [
123
+ ...contributors.values()
124
+ ].sort((a, b)=>a.username.toLowerCase().localeCompare(b.username.toLowerCase()));
125
+ const phrasingChildren = [];
126
+ phrasingChildren.push({
127
+ type: "text",
128
+ value: "Thanks to "
129
+ });
130
+ for(let i = 0; i < sorted.length; i++){
131
+ const contrib = sorted[i];
132
+ if (i > 0 && sorted.length > 2) phrasingChildren.push({
133
+ type: "text",
134
+ value: ", "
135
+ });
136
+ if (i > 0 && i === sorted.length - 1) phrasingChildren.push({
137
+ type: "text",
138
+ value: 2 === sorted.length ? " and " : "and "
139
+ });
140
+ if (contrib.url) phrasingChildren.push({
141
+ type: "link",
142
+ url: contrib.url,
143
+ children: [
144
+ {
145
+ type: "text",
146
+ value: `@${contrib.username}`
147
+ }
148
+ ]
149
+ });
150
+ else phrasingChildren.push({
151
+ type: "text",
152
+ value: `@${contrib.username}`
153
+ });
154
+ }
155
+ phrasingChildren.push({
156
+ type: "text",
157
+ value: " for their contributions!"
158
+ });
159
+ const paragraph = {
160
+ type: "paragraph",
161
+ children: phrasingChildren
162
+ };
163
+ tree.children.splice(block.endIndex, 0, paragraph);
164
+ }
165
+ };
166
+ const DeduplicateItemsPlugin = ()=>(tree)=>{
167
+ const blocks = getVersionBlocks(tree);
168
+ for (const block of blocks){
169
+ const sections = getBlockSections(tree, block);
170
+ for (const section of sections)for (const node of section.contentNodes){
171
+ if ("list" !== node.type) continue;
172
+ const list = node;
173
+ const seen = new Set();
174
+ list.children = list.children.filter((item)=>{
175
+ const text = external_mdast_util_to_string_toString(item);
176
+ if (seen.has(text)) return false;
177
+ seen.add(text);
178
+ return true;
179
+ });
180
+ }
181
+ }
182
+ tree.children = tree.children.filter((node)=>{
183
+ if ("list" === node.type) return node.children.length > 0;
184
+ return true;
185
+ });
186
+ };
187
+ const ISSUE_RE = /^#\d+$/;
188
+ const IssueLinkRefsPlugin = ()=>(tree)=>{
189
+ const blocks = getVersionBlocks(tree);
190
+ for(let b = blocks.length - 1; b >= 0; b--){
191
+ const block = blocks[b];
192
+ const definitions = new Map();
193
+ for(let i = block.startIndex; i < block.endIndex; i++){
194
+ const node = tree.children[i];
195
+ visit(node, "link", (linkNode, index, parent)=>{
196
+ if (!parent || void 0 === index) return;
197
+ if (1 !== linkNode.children.length || "text" !== linkNode.children[0].type) return;
198
+ const text = linkNode.children[0].value;
199
+ if (!ISSUE_RE.test(text)) return;
200
+ const label = text;
201
+ if (!definitions.has(label)) definitions.set(label, {
202
+ label,
203
+ url: linkNode.url
204
+ });
205
+ const ref = {
206
+ type: "linkReference",
207
+ identifier: label,
208
+ label,
209
+ referenceType: "full",
210
+ children: [
211
+ {
212
+ type: "text",
213
+ value: label
214
+ }
215
+ ]
216
+ };
217
+ parent.children[index] = ref;
218
+ return SKIP;
219
+ });
220
+ }
221
+ if (0 === definitions.size) continue;
222
+ const sorted = [
223
+ ...definitions.values()
224
+ ].sort((a, b)=>{
225
+ const aNum = Number.parseInt(a.label.slice(1), 10);
226
+ const bNum = Number.parseInt(b.label.slice(1), 10);
227
+ return aNum - bNum;
228
+ });
229
+ const defNodes = sorted.map((def)=>({
230
+ type: "definition",
231
+ identifier: def.label,
232
+ label: def.label,
233
+ url: def.url
234
+ }));
235
+ tree.children.splice(block.endIndex, 0, ...defNodes);
236
+ }
237
+ };
238
+ const MergeSectionsPlugin = ()=>(tree)=>{
239
+ const blocks = getVersionBlocks(tree);
240
+ for(let b = blocks.length - 1; b >= 0; b--){
241
+ const sections = getBlockSections(tree, blocks[b]);
242
+ if (sections.length <= 1) continue;
243
+ const groups = new Map();
244
+ for(let s = 0; s < sections.length; s++){
245
+ const text = getHeadingText(sections[s].heading);
246
+ const category = fromHeading(text);
247
+ const key = category ? category.heading.toLowerCase() : text.toLowerCase();
248
+ const existing = groups.get(key);
249
+ if (existing) existing.push(s);
250
+ else groups.set(key, [
251
+ s
252
+ ]);
253
+ }
254
+ const indicesToRemove = [];
255
+ for (const indices of groups.values()){
256
+ if (indices.length <= 1) continue;
257
+ const target = sections[indices[0]];
258
+ const targetEndIndex = target.headingIndex + 1 + target.contentNodes.length;
259
+ const mergedContent = [];
260
+ for(let d = 1; d < indices.length; d++){
261
+ const dup = sections[indices[d]];
262
+ mergedContent.push(...dup.contentNodes);
263
+ indicesToRemove.push(dup.headingIndex);
264
+ for(let c = 0; c < dup.contentNodes.length; c++)indicesToRemove.push(dup.headingIndex + 1 + c);
265
+ }
266
+ tree.children.splice(targetEndIndex, 0, ...mergedContent);
267
+ const shift = mergedContent.length;
268
+ for(let r = 0; r < indicesToRemove.length; r++)if (indicesToRemove[r] >= targetEndIndex) indicesToRemove[r] += shift;
269
+ }
270
+ indicesToRemove.sort((a, b)=>b - a);
271
+ for (const idx of indicesToRemove)tree.children.splice(idx, 1);
272
+ }
273
+ };
274
+ function isHeadingAtDepth(node, depths) {
275
+ return "heading" === node.type && depths.includes(node.depth);
276
+ }
277
+ const NormalizeFormatPlugin = ()=>(tree)=>{
278
+ const blocks = getVersionBlocks(tree);
279
+ const indicesToRemove = new Set();
280
+ for (const block of blocks)for(let i = block.startIndex; i < block.endIndex; i++){
281
+ const node = tree.children[i];
282
+ if ("list" === node.type && 0 === node.children.length) {
283
+ indicesToRemove.add(i);
284
+ continue;
285
+ }
286
+ if (!isHeadingAtDepth(node, [
287
+ 3
288
+ ])) continue;
289
+ let hasContent = false;
290
+ for(let j = i + 1; j < block.endIndex; j++){
291
+ const next = tree.children[j];
292
+ if ("paragraph" === next.type && "" === external_mdast_util_to_string_toString(next).trim()) {
293
+ indicesToRemove.add(j);
294
+ continue;
295
+ }
296
+ if (isHeadingAtDepth(next, [
297
+ 2,
298
+ 3
299
+ ])) break;
300
+ hasContent = true;
301
+ break;
302
+ }
303
+ if (!hasContent) indicesToRemove.add(i);
304
+ }
305
+ const sorted = [
306
+ ...indicesToRemove
307
+ ].sort((a, b)=>b - a);
308
+ for (const idx of sorted)tree.children.splice(idx, 1);
309
+ };
310
+ const UNKNOWN_PRIORITY = 999;
311
+ const ReorderSectionsPlugin = ()=>(tree)=>{
312
+ const blocks = getVersionBlocks(tree);
313
+ for(let b = blocks.length - 1; b >= 0; b--){
314
+ const block = blocks[b];
315
+ const sections = getBlockSections(tree, block);
316
+ if (sections.length <= 1) continue;
317
+ const preamble = [];
318
+ for(let i = block.startIndex; i < block.endIndex; i++)if (i < sections[0].headingIndex) preamble.push(tree.children[i]);
319
+ else break;
320
+ const sorted = [
321
+ ...sections
322
+ ].sort((a, b)=>{
323
+ const aPriority = fromHeading(getHeadingText(a.heading))?.priority ?? UNKNOWN_PRIORITY;
324
+ const bPriority = fromHeading(getHeadingText(b.heading))?.priority ?? UNKNOWN_PRIORITY;
325
+ return aPriority - bPriority;
326
+ });
327
+ const alreadySorted = sorted.every((s, i)=>s.headingIndex === sections[i].headingIndex);
328
+ if (alreadySorted) continue;
329
+ const newChildren = [
330
+ ...preamble
331
+ ];
332
+ for (const section of sorted)newChildren.push(section.heading, ...section.contentNodes);
333
+ const blockLength = block.endIndex - block.startIndex;
334
+ tree.children.splice(block.startIndex, blockLength, ...newChildren);
335
+ }
336
+ };
337
+ export { ContentStructureRule, ContributorFootnotesPlugin, DeduplicateItemsPlugin, HeadingHierarchyRule, IssueLinkRefsPlugin, MergeSectionsPlugin, NormalizeFormatPlugin, ReorderSectionsPlugin, RequiredSectionsRule };
package/245.js ADDED
@@ -0,0 +1,5 @@
1
+ export { Context, Data, Effect, Layer, Schema } from "effect";
2
+ export { default as remark_gfm } from "remark-gfm";
3
+ export { default as remark_parse } from "remark-parse";
4
+ export { default as remark_stringify } from "remark-stringify";
5
+ export { unified } from "unified";
package/273.js ADDED
@@ -0,0 +1,41 @@
1
+ import { existsSync, mkdirSync, readFileSync, readdirSync, writeFileSync } from "node:fs";
2
+ import { join, resolve } from "node:path";
3
+ import { unified, remark_gfm, remark_stringify, remark_parse } from "./245.js";
4
+ import { MergeSectionsPlugin, DeduplicateItemsPlugin, IssueLinkRefsPlugin, ContentStructureRule, HeadingHierarchyRule, NormalizeFormatPlugin, ReorderSectionsPlugin, ContributorFootnotesPlugin, RequiredSectionsRule } from "./234.js";
5
+ function stripFrontmatter(content) {
6
+ return content.replace(/^---\n[\s\S]*?\n---\n?/, "");
7
+ }
8
+ class ChangesetLinter {
9
+ static validateFile(filePath) {
10
+ const raw = readFileSync(filePath, "utf-8");
11
+ return ChangesetLinter.validateContent(raw, filePath);
12
+ }
13
+ static validateContent(content, filePath = "<input>") {
14
+ const body = stripFrontmatter(content);
15
+ const processor = unified().use(remark_parse).use(remark_stringify).use(HeadingHierarchyRule).use(RequiredSectionsRule).use(ContentStructureRule);
16
+ const file = processor.processSync(body);
17
+ return file.messages.map((msg)=>({
18
+ file: filePath,
19
+ rule: msg.ruleId ?? msg.source ?? "unknown",
20
+ line: msg.line ?? 1,
21
+ column: msg.column ?? 1,
22
+ message: msg.message
23
+ }));
24
+ }
25
+ static validate(dir) {
26
+ return readdirSync(dir).filter((f)=>f.endsWith(".md") && "README.md" !== f).flatMap((filename)=>ChangesetLinter.validateFile(join(dir, filename)));
27
+ }
28
+ }
29
+ class ChangelogTransformer {
30
+ static transformContent(content) {
31
+ const processor = unified().use(remark_parse).use(remark_gfm).use(MergeSectionsPlugin).use(ReorderSectionsPlugin).use(DeduplicateItemsPlugin).use(ContributorFootnotesPlugin).use(IssueLinkRefsPlugin).use(NormalizeFormatPlugin).use(remark_stringify);
32
+ const file = processor.processSync(content);
33
+ return String(file);
34
+ }
35
+ static transformFile(filePath) {
36
+ const content = readFileSync(filePath, "utf-8");
37
+ const result = ChangelogTransformer.transformContent(content);
38
+ writeFileSync(filePath, result, "utf-8");
39
+ }
40
+ }
41
+ export { ChangelogTransformer, ChangesetLinter, existsSync, join, mkdirSync, readFileSync, resolve, writeFileSync };
package/60.js ADDED
@@ -0,0 +1,137 @@
1
+ const BREAKING_CHANGES = {
2
+ heading: "Breaking Changes",
3
+ priority: 1,
4
+ commitTypes: [],
5
+ description: "Backward-incompatible changes"
6
+ };
7
+ const FEATURES = {
8
+ heading: "Features",
9
+ priority: 2,
10
+ commitTypes: [
11
+ "feat"
12
+ ],
13
+ description: "New functionality"
14
+ };
15
+ const BUG_FIXES = {
16
+ heading: "Bug Fixes",
17
+ priority: 3,
18
+ commitTypes: [
19
+ "fix"
20
+ ],
21
+ description: "Bug corrections"
22
+ };
23
+ const PERFORMANCE = {
24
+ heading: "Performance",
25
+ priority: 4,
26
+ commitTypes: [
27
+ "perf"
28
+ ],
29
+ description: "Performance improvements"
30
+ };
31
+ const DOCUMENTATION = {
32
+ heading: "Documentation",
33
+ priority: 5,
34
+ commitTypes: [
35
+ "docs"
36
+ ],
37
+ description: "Documentation changes"
38
+ };
39
+ const REFACTORING = {
40
+ heading: "Refactoring",
41
+ priority: 6,
42
+ commitTypes: [
43
+ "refactor"
44
+ ],
45
+ description: "Code restructuring"
46
+ };
47
+ const TESTS = {
48
+ heading: "Tests",
49
+ priority: 7,
50
+ commitTypes: [
51
+ "test"
52
+ ],
53
+ description: "Test additions or modifications"
54
+ };
55
+ const BUILD_SYSTEM = {
56
+ heading: "Build System",
57
+ priority: 8,
58
+ commitTypes: [
59
+ "build"
60
+ ],
61
+ description: "Build configuration changes"
62
+ };
63
+ const CI = {
64
+ heading: "CI",
65
+ priority: 9,
66
+ commitTypes: [
67
+ "ci"
68
+ ],
69
+ description: "Continuous integration changes"
70
+ };
71
+ const DEPENDENCIES = {
72
+ heading: "Dependencies",
73
+ priority: 10,
74
+ commitTypes: [
75
+ "deps"
76
+ ],
77
+ description: "Dependency updates"
78
+ };
79
+ const MAINTENANCE = {
80
+ heading: "Maintenance",
81
+ priority: 11,
82
+ commitTypes: [
83
+ "chore",
84
+ "style"
85
+ ],
86
+ description: "General maintenance"
87
+ };
88
+ const REVERTS = {
89
+ heading: "Reverts",
90
+ priority: 12,
91
+ commitTypes: [
92
+ "revert"
93
+ ],
94
+ description: "Reverted changes"
95
+ };
96
+ const OTHER = {
97
+ heading: "Other",
98
+ priority: 13,
99
+ commitTypes: [],
100
+ description: "Uncategorized changes"
101
+ };
102
+ const CATEGORIES = [
103
+ BREAKING_CHANGES,
104
+ FEATURES,
105
+ BUG_FIXES,
106
+ PERFORMANCE,
107
+ DOCUMENTATION,
108
+ REFACTORING,
109
+ TESTS,
110
+ BUILD_SYSTEM,
111
+ CI,
112
+ DEPENDENCIES,
113
+ MAINTENANCE,
114
+ REVERTS,
115
+ OTHER
116
+ ];
117
+ const headingToCategory = new Map(CATEGORIES.map((cat)=>[
118
+ cat.heading.toLowerCase(),
119
+ cat
120
+ ]));
121
+ const commitTypeToCategory = new Map();
122
+ for (const cat of CATEGORIES)for (const commitType of cat.commitTypes)commitTypeToCategory.set(commitType, cat);
123
+ function resolveCommitType(type, scope, breaking) {
124
+ if (breaking) return BREAKING_CHANGES;
125
+ if ("chore" === type && "deps" === scope) return DEPENDENCIES;
126
+ return commitTypeToCategory.get(type) ?? OTHER;
127
+ }
128
+ function fromHeading(heading) {
129
+ return headingToCategory.get(heading.toLowerCase());
130
+ }
131
+ function isValidHeading(heading) {
132
+ return headingToCategory.has(heading.toLowerCase());
133
+ }
134
+ function allHeadings() {
135
+ return CATEGORIES.map((cat)=>cat.heading);
136
+ }
137
+ export { BREAKING_CHANGES, BUG_FIXES, BUILD_SYSTEM, CATEGORIES, CI, DEPENDENCIES, DOCUMENTATION, FEATURES, MAINTENANCE, OTHER, PERFORMANCE, REFACTORING, REVERTS, TESTS, allHeadings, fromHeading, isValidHeading, resolveCommitType };
package/689.js ADDED
@@ -0,0 +1 @@
1
+ export { toString as external_mdast_util_to_string_toString } from "mdast-util-to-string";
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Savvy Web Strategy, LLC
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,124 @@
1
+ # @savvy-web/changesets
2
+
3
+ [![npm version][npm-badge]][npm-url]
4
+ [![License: MIT][license-badge]][license-url]
5
+
6
+ Custom changelog formatter and markdown processing pipeline
7
+ for the Silk Suite. Replaces the default
8
+ `@changesets/cli/changelog` formatter with a three-layer
9
+ architecture that validates changeset files, formats
10
+ structured changelog entries, and post-processes the
11
+ generated CHANGELOG.md.
12
+
13
+ ## Features
14
+
15
+ - **Section-aware changesets** -- Use h2 headings in
16
+ changeset files to categorize changes (Features,
17
+ Bug Fixes, Breaking Changes, etc.)
18
+ - **Three-layer pipeline** -- Pre-validation
19
+ (remark-lint), changelog formatting (Changesets API),
20
+ and post-processing (remark-transform)
21
+ - **13 section categories** -- Consistent categorization
22
+ with priority-based ordering across all layers
23
+ - **CLI tooling** -- `savvy-changesets` binary with init,
24
+ lint, transform, check, and version subcommands for CI
25
+ and local use
26
+ - **GitHub integration** -- Automatic PR links, commit
27
+ references, and contributor attribution
28
+ - **Remark plugins** -- Lint rules and transform plugins
29
+ via `@savvy-web/changesets/remark`
30
+ - **markdownlint rules** -- Custom rules compatible with
31
+ markdownlint-cli2 and the VS Code extension via
32
+ `@savvy-web/changesets/markdownlint`
33
+
34
+ ## Installation
35
+
36
+ ```bash
37
+ pnpm add @savvy-web/changesets
38
+ ```
39
+
40
+ ## Quick Start
41
+
42
+ Bootstrap your repository:
43
+
44
+ ```bash
45
+ savvy-changesets init
46
+ ```
47
+
48
+ This creates `.changeset/config.json` with auto-detected
49
+ GitHub repo settings. Or configure manually:
50
+
51
+ ```json
52
+ {
53
+ "changelog": [
54
+ "@savvy-web/changesets/changelog",
55
+ { "repo": "owner/repo" }
56
+ ]
57
+ }
58
+ ```
59
+
60
+ Write section-aware changeset files:
61
+
62
+ ```markdown
63
+ ---
64
+ "@my/package": minor
65
+ ---
66
+
67
+ ## Features
68
+
69
+ Added a new authentication system with OAuth2 support.
70
+
71
+ ## Tests
72
+
73
+ - Added unit tests for OAuth2 flow
74
+ - Updated integration test fixtures
75
+ ```
76
+
77
+ ## markdownlint Integration
78
+
79
+ Register the custom rules in your base config
80
+ (e.g., `lib/configs/.markdownlint-cli2.jsonc`):
81
+
82
+ ```jsonc
83
+ {
84
+ "customRules": [
85
+ "@savvy-web/changesets/markdownlint"
86
+ ],
87
+ "config": {
88
+ "changeset-heading-hierarchy": false,
89
+ "changeset-required-sections": false,
90
+ "changeset-content-structure": false
91
+ }
92
+ }
93
+ ```
94
+
95
+ Then enable the rules only for changeset files by
96
+ creating `.changeset/.markdownlint.json`:
97
+
98
+ ```json
99
+ {
100
+ "extends": "../lib/configs/.markdownlint-cli2.jsonc",
101
+ "default": false,
102
+ "changeset-heading-hierarchy": true,
103
+ "changeset-required-sections": true,
104
+ "changeset-content-structure": true,
105
+ "MD041": false
106
+ }
107
+ ```
108
+
109
+ ## Documentation
110
+
111
+ - [Section-Aware Pipeline](./docs/section-aware-pipeline.md) --
112
+ End-to-end walkthrough of how section-aware changesets
113
+ flow through the three-layer pipeline
114
+ - [Full documentation](./docs/) -- CLI usage, API
115
+ reference, configuration, and architecture
116
+
117
+ ## License
118
+
119
+ MIT
120
+
121
+ [npm-badge]: https://img.shields.io/npm/v/@savvy-web/changesets
122
+ [npm-url]: https://www.npmjs.com/package/@savvy-web/changesets
123
+ [license-badge]: https://img.shields.io/badge/License-MIT-yellow.svg
124
+ [license-url]: https://opensource.org/licenses/MIT