@regardio/dev 1.13.8 → 1.14.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/README.md +3 -3
  2. package/dist/bin/exec/clean.d.ts +3 -0
  3. package/dist/bin/exec/clean.d.ts.map +1 -0
  4. package/dist/bin/exec/clean.js +25 -0
  5. package/dist/bin/exec/clean.test.d.ts +2 -0
  6. package/dist/bin/exec/clean.test.d.ts.map +1 -0
  7. package/dist/bin/exec/clean.test.js +45 -0
  8. package/dist/bin/exec/husky.d.ts +3 -0
  9. package/dist/bin/exec/husky.d.ts.map +1 -0
  10. package/dist/bin/exec/p.d.ts +3 -0
  11. package/dist/bin/exec/p.d.ts.map +1 -0
  12. package/dist/bin/exec/s.d.ts +3 -0
  13. package/dist/bin/exec/s.d.ts.map +1 -0
  14. package/dist/bin/exec/ts.d.ts +3 -0
  15. package/dist/bin/exec/ts.d.ts.map +1 -0
  16. package/dist/bin/exec/ts.js +36 -0
  17. package/dist/bin/exec/ts.test.d.ts +2 -0
  18. package/dist/bin/exec/ts.test.d.ts.map +1 -0
  19. package/dist/bin/exec/ts.test.js +39 -0
  20. package/dist/bin/exec/tsc.d.ts +3 -0
  21. package/dist/bin/exec/tsc.d.ts.map +1 -0
  22. package/dist/bin/flow/hotfix.d.ts +3 -0
  23. package/dist/bin/flow/hotfix.d.ts.map +1 -0
  24. package/dist/bin/flow/hotfix.js +116 -0
  25. package/dist/bin/flow/release.d.ts +3 -0
  26. package/dist/bin/flow/release.d.ts.map +1 -0
  27. package/dist/bin/flow/release.js +68 -0
  28. package/dist/bin/flow/ship.d.ts +3 -0
  29. package/dist/bin/flow/ship.d.ts.map +1 -0
  30. package/dist/bin/flow/ship.js +104 -0
  31. package/dist/bin/flow/utils.d.ts +9 -0
  32. package/dist/bin/flow/utils.d.ts.map +1 -0
  33. package/dist/bin/flow/utils.js +63 -0
  34. package/dist/bin/flow/utils.test.d.ts +2 -0
  35. package/dist/bin/flow/utils.test.d.ts.map +1 -0
  36. package/dist/bin/flow/utils.test.js +127 -0
  37. package/dist/bin/lint/biome.d.ts +3 -0
  38. package/dist/bin/lint/biome.d.ts.map +1 -0
  39. package/dist/bin/lint/commit.d.ts +3 -0
  40. package/dist/bin/lint/commit.d.ts.map +1 -0
  41. package/dist/bin/lint/md.d.ts +3 -0
  42. package/dist/bin/lint/md.d.ts.map +1 -0
  43. package/dist/bin/lint/package.d.ts +4 -0
  44. package/dist/bin/lint/package.d.ts.map +1 -0
  45. package/dist/bin/lint/package.js +81 -0
  46. package/dist/bin/lint/package.test.d.ts +2 -0
  47. package/dist/bin/lint/package.test.d.ts.map +1 -0
  48. package/dist/bin/lint/package.test.js +65 -0
  49. package/package.json +21 -22
  50. package/src/bin/exec/clean.test.ts +63 -0
  51. package/src/bin/exec/clean.ts +36 -0
  52. package/src/bin/exec/ts.test.ts +54 -0
  53. package/src/bin/exec/ts.ts +52 -0
  54. package/src/bin/flow/hotfix.ts +210 -0
  55. package/src/bin/flow/release.ts +130 -0
  56. package/src/bin/flow/ship.ts +215 -0
  57. package/src/bin/flow/utils.test.ts +178 -0
  58. package/src/bin/flow/utils.ts +109 -0
  59. package/src/bin/lint/package.test.ts +83 -0
  60. package/src/bin/lint/package.ts +108 -0
  61. package/src/templates/release.yml +23 -17
  62. package/dist/bin/exec-clean.d.ts +0 -3
  63. package/dist/bin/exec-clean.d.ts.map +0 -1
  64. package/dist/bin/exec-clean.js +0 -18
  65. package/dist/bin/exec-husky.d.ts +0 -3
  66. package/dist/bin/exec-husky.d.ts.map +0 -1
  67. package/dist/bin/exec-p.d.ts +0 -3
  68. package/dist/bin/exec-p.d.ts.map +0 -1
  69. package/dist/bin/exec-s.d.ts +0 -3
  70. package/dist/bin/exec-s.d.ts.map +0 -1
  71. package/dist/bin/exec-ts.d.ts +0 -3
  72. package/dist/bin/exec-ts.d.ts.map +0 -1
  73. package/dist/bin/exec-ts.js +0 -28
  74. package/dist/bin/exec-tsc.d.ts +0 -3
  75. package/dist/bin/exec-tsc.d.ts.map +0 -1
  76. package/dist/bin/flow-changeset.d.ts +0 -3
  77. package/dist/bin/flow-changeset.d.ts.map +0 -1
  78. package/dist/bin/flow-changeset.js +0 -18
  79. package/dist/bin/flow-release.d.ts +0 -3
  80. package/dist/bin/flow-release.d.ts.map +0 -1
  81. package/dist/bin/flow-release.js +0 -115
  82. package/dist/bin/lint-biome.d.ts +0 -3
  83. package/dist/bin/lint-biome.d.ts.map +0 -1
  84. package/dist/bin/lint-commit.d.ts +0 -3
  85. package/dist/bin/lint-commit.d.ts.map +0 -1
  86. package/dist/bin/lint-md.d.ts +0 -3
  87. package/dist/bin/lint-md.d.ts.map +0 -1
  88. package/dist/bin/lint-package.d.ts +0 -3
  89. package/dist/bin/lint-package.d.ts.map +0 -1
  90. package/dist/bin/lint-package.js +0 -86
  91. package/dist/bin/lint-package.test.d.ts +0 -2
  92. package/dist/bin/lint-package.test.d.ts.map +0 -1
  93. package/dist/bin/lint-package.test.js +0 -111
  94. package/src/bin/exec-clean.ts +0 -24
  95. package/src/bin/exec-ts.ts +0 -39
  96. package/src/bin/flow-changeset.ts +0 -23
  97. package/src/bin/flow-release.ts +0 -185
  98. package/src/bin/lint-package.test.ts +0 -140
  99. package/src/bin/lint-package.ts +0 -114
  100. /package/dist/bin/{exec-husky.js → exec/husky.js} +0 -0
  101. /package/dist/bin/{exec-p.js → exec/p.js} +0 -0
  102. /package/dist/bin/{exec-s.js → exec/s.js} +0 -0
  103. /package/dist/bin/{exec-tsc.js → exec/tsc.js} +0 -0
  104. /package/dist/bin/{lint-biome.js → lint/biome.js} +0 -0
  105. /package/dist/bin/{lint-commit.js → lint/commit.js} +0 -0
  106. /package/dist/bin/{lint-md.js → lint/md.js} +0 -0
  107. /package/src/bin/{exec-husky.ts → exec/husky.ts} +0 -0
  108. /package/src/bin/{exec-p.ts → exec/p.ts} +0 -0
  109. /package/src/bin/{exec-s.ts → exec/s.ts} +0 -0
  110. /package/src/bin/{exec-tsc.ts → exec/tsc.ts} +0 -0
  111. /package/src/bin/{lint-biome.ts → lint/biome.ts} +0 -0
  112. /package/src/bin/{lint-commit.ts → lint/commit.ts} +0 -0
  113. /package/src/bin/{lint-md.ts → lint/md.ts} +0 -0
@@ -0,0 +1,178 @@
1
+ import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
2
+ import { tmpdir } from 'node:os';
3
+ import { join } from 'node:path';
4
+ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
5
+
6
+ import { bumpVersion, confirm, insertChangelog } from './utils.js';
7
+
8
+ // ---------------------------------------------------------------------------
9
+ // bumpVersion
10
+ // ---------------------------------------------------------------------------
11
+
12
+ describe('bumpVersion', () => {
13
+ it('bumps patch', () => {
14
+ expect(bumpVersion('1.2.3', 'patch')).toBe('1.2.4');
15
+ });
16
+
17
+ it('bumps minor and resets patch', () => {
18
+ expect(bumpVersion('1.2.3', 'minor')).toBe('1.3.0');
19
+ });
20
+
21
+ it('bumps major and resets minor + patch', () => {
22
+ expect(bumpVersion('1.2.3', 'major')).toBe('2.0.0');
23
+ });
24
+
25
+ it('handles zero components', () => {
26
+ expect(bumpVersion('0.0.0', 'patch')).toBe('0.0.1');
27
+ expect(bumpVersion('0.0.0', 'minor')).toBe('0.1.0');
28
+ expect(bumpVersion('0.0.0', 'major')).toBe('1.0.0');
29
+ });
30
+
31
+ it('defaults to patch for unknown bump type', () => {
32
+ expect(bumpVersion('1.2.3', 'unknown')).toBe('1.2.4');
33
+ });
34
+
35
+ it('throws for invalid semver', () => {
36
+ expect(() => bumpVersion('not-semver', 'patch')).toThrow('Invalid semver');
37
+ expect(() => bumpVersion('1.2', 'patch')).toThrow('Invalid semver');
38
+ expect(() => bumpVersion('1.2.x', 'patch')).toThrow('Invalid semver');
39
+ });
40
+ });
41
+
42
+ // ---------------------------------------------------------------------------
43
+ // insertChangelog
44
+ // ---------------------------------------------------------------------------
45
+
46
+ describe('insertChangelog', () => {
47
+ let tmpDir: string;
48
+ let changelogPath: string;
49
+
50
+ beforeEach(() => {
51
+ tmpDir = join(tmpdir(), `flow-utils-test-${Date.now()}-${Math.random().toString(36).slice(2)}`);
52
+ mkdirSync(tmpDir, { recursive: true });
53
+ changelogPath = join(tmpDir, 'CHANGELOG.md');
54
+ });
55
+
56
+ afterEach(() => {
57
+ rmSync(tmpDir, { force: true, recursive: true });
58
+ });
59
+
60
+ it('creates a new file when none exists', () => {
61
+ insertChangelog(changelogPath, '## [1.0.0] - 2025-01-01\n\n- initial release\n');
62
+
63
+ expect(existsSync(changelogPath)).toBe(true);
64
+ const content = readFileSync(changelogPath, 'utf-8');
65
+ expect(content).toContain('# Changelog');
66
+ expect(content).toContain('## [1.0.0] - 2025-01-01');
67
+ });
68
+
69
+ it('inserts before the first existing ## section', () => {
70
+ writeFileSync(changelogPath, '# Changelog\n\n## [1.0.0] - 2025-01-01\n\n- old entry\n');
71
+
72
+ insertChangelog(changelogPath, '## [1.1.0] - 2025-02-01\n\n- new entry\n');
73
+
74
+ const content = readFileSync(changelogPath, 'utf-8');
75
+ const newIdx = content.indexOf('## [1.1.0]');
76
+ const oldIdx = content.indexOf('## [1.0.0]');
77
+ expect(newIdx).toBeLessThan(oldIdx);
78
+ });
79
+
80
+ it('appends when no ## section exists yet', () => {
81
+ writeFileSync(changelogPath, '# Changelog\n');
82
+
83
+ insertChangelog(changelogPath, '## [1.0.0] - 2025-01-01\n\n- initial\n');
84
+
85
+ const content = readFileSync(changelogPath, 'utf-8');
86
+ expect(content).toContain('## [1.0.0]');
87
+ });
88
+
89
+ it('preserves existing entries when inserting', () => {
90
+ writeFileSync(changelogPath, '# Changelog\n\n## [1.0.0] - 2025-01-01\n\n- old entry\n');
91
+
92
+ insertChangelog(changelogPath, '## [1.1.0] - 2025-02-01\n\n- new entry\n');
93
+
94
+ const content = readFileSync(changelogPath, 'utf-8');
95
+ expect(content).toContain('## [1.0.0]');
96
+ expect(content).toContain('- old entry');
97
+ expect(content).toContain('## [1.1.0]');
98
+ expect(content).toContain('- new entry');
99
+ });
100
+
101
+ it('appends when file exists but has no title line', () => {
102
+ writeFileSync(changelogPath, '## [1.0.0] - 2025-01-01\n\n- old entry\n');
103
+
104
+ insertChangelog(changelogPath, '## [1.1.0] - 2025-02-01\n\n- new entry\n');
105
+
106
+ const content = readFileSync(changelogPath, 'utf-8');
107
+ expect(content).toContain('## [1.0.0]');
108
+ expect(content).toContain('## [1.1.0]');
109
+ });
110
+
111
+ it('handles multiple existing entries in the correct order', () => {
112
+ writeFileSync(
113
+ changelogPath,
114
+ '# Changelog\n\n## [1.1.0] - 2025-02-01\n\n- second\n\n## [1.0.0] - 2025-01-01\n\n- first\n',
115
+ );
116
+
117
+ insertChangelog(changelogPath, '## [1.2.0] - 2025-03-01\n\n- third\n');
118
+
119
+ const content = readFileSync(changelogPath, 'utf-8');
120
+ const idx120 = content.indexOf('## [1.2.0]');
121
+ const idx110 = content.indexOf('## [1.1.0]');
122
+ const idx100 = content.indexOf('## [1.0.0]');
123
+ expect(idx120).toBeLessThan(idx110);
124
+ expect(idx110).toBeLessThan(idx100);
125
+ });
126
+ });
127
+
128
+ // ---------------------------------------------------------------------------
129
+ // confirm
130
+ // ---------------------------------------------------------------------------
131
+
132
+ describe('confirm', () => {
133
+ let tmpDir: string;
134
+ let inputFile: string;
135
+
136
+ beforeEach(() => {
137
+ tmpDir = join(tmpdir(), `confirm-test-${Date.now()}-${Math.random().toString(36).slice(2)}`);
138
+ mkdirSync(tmpDir, { recursive: true });
139
+ inputFile = join(tmpDir, 'input');
140
+ vi.spyOn(process.stdout, 'write').mockImplementation(() => true);
141
+ });
142
+
143
+ afterEach(() => {
144
+ rmSync(tmpDir, { force: true, recursive: true });
145
+ vi.restoreAllMocks();
146
+ });
147
+
148
+ it('returns true for "y"', () => {
149
+ writeFileSync(inputFile, 'y\n');
150
+ expect(confirm('Continue?', inputFile)).toBe(true);
151
+ });
152
+
153
+ it('returns true for "Y"', () => {
154
+ writeFileSync(inputFile, 'Y\n');
155
+ expect(confirm('Continue?', inputFile)).toBe(true);
156
+ });
157
+
158
+ it('returns false for "n"', () => {
159
+ writeFileSync(inputFile, 'n\n');
160
+ expect(confirm('Continue?', inputFile)).toBe(false);
161
+ });
162
+
163
+ it('returns false for empty input', () => {
164
+ writeFileSync(inputFile, '\n');
165
+ expect(confirm('Continue?', inputFile)).toBe(false);
166
+ });
167
+
168
+ it('returns false for any other input', () => {
169
+ writeFileSync(inputFile, 'yes\n');
170
+ expect(confirm('Continue?', inputFile)).toBe(false);
171
+ });
172
+
173
+ it('writes the prompt to stdout', () => {
174
+ writeFileSync(inputFile, 'y\n');
175
+ confirm('Ship it?', inputFile);
176
+ expect(process.stdout.write).toHaveBeenCalledWith('Ship it? (y/N) ');
177
+ });
178
+ });
@@ -0,0 +1,109 @@
1
+ /**
2
+ * Shared utilities for flow-release, flow-ship, and flow-hotfix.
3
+ *
4
+ * Git commands use execFileSync (not a shell string) so user-provided
5
+ * strings such as commit messages are never interpolated by the shell.
6
+ * pnpm script invocations use execSync via shell since script names are
7
+ * developer-controlled and pnpm itself is resolved through PATH.
8
+ */
9
+ import { execFileSync, execSync } from 'node:child_process';
10
+ import { closeSync, existsSync, openSync, readFileSync, readSync, writeFileSync } from 'node:fs';
11
+
12
+ /**
13
+ * Run a git command with the given arguments.
14
+ * Output is inherited (visible to the user).
15
+ */
16
+ export const git = (...args: string[]): void => {
17
+ console.log(`$ git ${args.join(' ')}`);
18
+ execFileSync('git', args, { stdio: 'inherit' });
19
+ };
20
+
21
+ /**
22
+ * Run a git command and return trimmed stdout.
23
+ * Stderr is suppressed; throws on non-zero exit.
24
+ */
25
+ export const gitRead = (...args: string[]): string =>
26
+ execFileSync('git', args, { encoding: 'utf-8' }).trim();
27
+
28
+ /**
29
+ * Run a pnpm script via the shell.
30
+ * Only pass developer-controlled script names — never user input.
31
+ */
32
+ export const runScript = (script: string): void => {
33
+ console.log(`$ pnpm ${script}`);
34
+ execSync(`pnpm ${script}`, { stdio: 'inherit' });
35
+ };
36
+
37
+ /**
38
+ * Bump a semver string by the given increment type.
39
+ */
40
+ export const bumpVersion = (current: string, bump: string): string => {
41
+ const parts = current.split('.').map(Number);
42
+ if (parts.length !== 3 || parts.some(Number.isNaN)) {
43
+ throw new Error(`Invalid semver: ${current}`);
44
+ }
45
+ const [major, minor, patch] = parts as [number, number, number];
46
+ if (bump === 'major') return `${major + 1}.0.0`;
47
+ if (bump === 'minor') return `${major}.${minor + 1}.0`;
48
+ return `${major}.${minor}.${patch + 1}`;
49
+ };
50
+
51
+ /**
52
+ * Insert a new changelog entry after the title line of CHANGELOG.md.
53
+ * Creates the file if it does not exist.
54
+ */
55
+ export const insertChangelog = (changelogPath: string, entry: string): void => {
56
+ if (existsSync(changelogPath)) {
57
+ const existing = readFileSync(changelogPath, 'utf-8');
58
+ const insertAt = existing.indexOf('\n## ');
59
+ if (insertAt === -1) {
60
+ writeFileSync(changelogPath, `${existing.trimEnd()}\n\n${entry}`);
61
+ } else {
62
+ writeFileSync(
63
+ changelogPath,
64
+ `${existing.slice(0, insertAt)}\n\n${entry}${existing.slice(insertAt + 1)}`,
65
+ );
66
+ }
67
+ } else {
68
+ writeFileSync(changelogPath, `# Changelog\n\n${entry}`);
69
+ }
70
+ };
71
+
72
+ /**
73
+ * Run quality checks. Throws if any step fails.
74
+ */
75
+ export const runQualityChecks = (): void => {
76
+ runScript('build');
77
+ runScript('typecheck');
78
+ runScript('report');
79
+ };
80
+
81
+ /**
82
+ * Check whether a branch exists locally or on origin.
83
+ * Requires a prior `git fetch` to have up-to-date remote refs.
84
+ */
85
+ export const branchExists = (name: string): boolean =>
86
+ gitRead('branch', '--list', name) !== ''
87
+ || gitRead('branch', '--list', '--remotes', `origin/${name}`) !== '';
88
+
89
+ /**
90
+ * Prompt the user for confirmation and return true only if they type "y" or "Y".
91
+ * Defaults to "no" on empty input, so pressing Enter alone aborts.
92
+ * The ttyPath parameter is injectable for testing.
93
+ */
94
+ export const confirm = (prompt: string, ttyPath = '/dev/tty'): boolean => {
95
+ process.stdout.write(`${prompt} (y/N) `);
96
+ const buf = Buffer.alloc(1024);
97
+ let fd: number;
98
+ let shouldClose = false;
99
+ try {
100
+ fd = openSync(ttyPath, 'r');
101
+ shouldClose = true;
102
+ } catch {
103
+ fd = process.stdin.fd;
104
+ }
105
+ const bytesRead = readSync(fd, buf, 0, buf.length, null);
106
+ if (shouldClose) closeSync(fd);
107
+ const answer = buf.slice(0, bytesRead).toString().trim();
108
+ return answer === 'y' || answer === 'Y';
109
+ };
@@ -0,0 +1,83 @@
1
+ import { describe, expect, it } from 'vitest';
2
+
3
+ import { reorderConditions } from './package.js';
4
+
5
+ describe('lint-package', () => {
6
+ describe('reorderConditions', () => {
7
+ it('reorders types before default when default comes first', () => {
8
+ const input = {
9
+ './foo': {
10
+ default: './dist/foo.js',
11
+ types: './dist/foo.d.ts',
12
+ },
13
+ };
14
+
15
+ const result = reorderConditions(input);
16
+
17
+ expect(Object.keys(result['./foo'] as Record<string, unknown>)).toEqual(['types', 'default']);
18
+ });
19
+
20
+ it('does not modify when types already comes before default', () => {
21
+ const input = {
22
+ './foo': {
23
+ default: './dist/foo.js',
24
+ types: './dist/foo.d.ts',
25
+ },
26
+ };
27
+
28
+ const result = reorderConditions(input);
29
+
30
+ expect(Object.keys(result['./foo'] as Record<string, unknown>)).toEqual(['types', 'default']);
31
+ });
32
+
33
+ it('handles multiple exports with mixed order', () => {
34
+ const input = {
35
+ './a': { default: './dist/a.js', types: './dist/a.d.ts' },
36
+ './b': { default: './dist/b.js', types: './dist/b.d.ts' },
37
+ };
38
+
39
+ const result = reorderConditions(input);
40
+
41
+ expect(Object.keys(result['./a'] as Record<string, unknown>)[0]).toBe('types');
42
+ expect(Object.keys(result['./b'] as Record<string, unknown>)[0]).toBe('types');
43
+ });
44
+
45
+ it('preserves other keys after types', () => {
46
+ const input = {
47
+ './foo': {
48
+ default: './dist/foo.js',
49
+ import: './dist/foo.mjs',
50
+ require: './dist/foo.cjs',
51
+ types: './dist/foo.d.ts',
52
+ },
53
+ };
54
+
55
+ const result = reorderConditions(input);
56
+ const keys = Object.keys(result['./foo'] as Record<string, unknown>);
57
+
58
+ expect(keys[0]).toBe('types');
59
+ expect(keys.slice(1)).toEqual(['default', 'import', 'require']);
60
+ });
61
+
62
+ it('handles exports without types or default', () => {
63
+ const input = { './styles.css': './dist/styles.css' };
64
+
65
+ expect(reorderConditions(input)).toEqual(input);
66
+ });
67
+
68
+ it('handles deeply nested condition objects', () => {
69
+ const input = {
70
+ './foo': {
71
+ browser: { default: './dist/foo.browser.js', types: './dist/foo.browser.d.ts' },
72
+ node: { default: './dist/foo.node.js', types: './dist/foo.node.d.ts' },
73
+ },
74
+ };
75
+
76
+ const result = reorderConditions(input);
77
+ const foo = result['./foo'] as Record<string, Record<string, unknown>>;
78
+
79
+ expect(Object.keys(foo.node as object)[0]).toBe('types');
80
+ expect(Object.keys(foo.browser as object)[0]).toBe('types');
81
+ });
82
+ });
83
+ });
@@ -0,0 +1,108 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Sorts package.json files using sort-package-json and fixes
4
+ * exports condition order (types must come before default for TypeScript).
5
+ */
6
+ import { execSync } from 'node:child_process';
7
+ import { existsSync, readFileSync, writeFileSync } from 'node:fs';
8
+ import { dirname, join, resolve } from 'node:path';
9
+ import { fileURLToPath } from 'node:url';
10
+
11
+ /**
12
+ * Recursively reorder exports condition objects so that `types` always
13
+ * appears before `default`. Returns a new object; does not mutate input.
14
+ */
15
+ export function reorderConditions(obj: Record<string, unknown>): Record<string, unknown> {
16
+ if (typeof obj !== 'object' || obj === null) return obj;
17
+
18
+ const processed: Record<string, unknown> = {};
19
+ for (const [key, value] of Object.entries(obj)) {
20
+ processed[key] =
21
+ typeof value === 'object' && value !== null && !Array.isArray(value)
22
+ ? reorderConditions(value as Record<string, unknown>)
23
+ : value;
24
+ }
25
+
26
+ if ('types' in processed && 'default' in processed) {
27
+ const keys = Object.keys(processed);
28
+ if (keys.indexOf('default') < keys.indexOf('types')) {
29
+ const reordered: Record<string, unknown> = { types: processed.types };
30
+ for (const key of keys) {
31
+ if (key !== 'types') reordered[key] = processed[key];
32
+ }
33
+ return reordered;
34
+ }
35
+ }
36
+
37
+ return processed;
38
+ }
39
+
40
+ /**
41
+ * Fix exports condition order in a package.json file.
42
+ * Returns true if the file needs changes (or was changed when fix=true).
43
+ */
44
+ export function fixExportsOrder(filePath: string, fix: boolean): boolean {
45
+ const fullPath = resolve(process.cwd(), filePath);
46
+ if (!existsSync(fullPath)) return false;
47
+
48
+ const content = readFileSync(fullPath, 'utf-8');
49
+ const pkg = JSON.parse(content) as Record<string, unknown>;
50
+
51
+ if (!pkg.exports || typeof pkg.exports !== 'object') return false;
52
+
53
+ const fixed = reorderConditions(pkg.exports as Record<string, unknown>);
54
+ const changed = JSON.stringify(fixed) !== JSON.stringify(pkg.exports);
55
+
56
+ if (changed && fix) {
57
+ writeFileSync(fullPath, `${JSON.stringify({ ...pkg, exports: fixed }, null, 2)}\n`);
58
+ }
59
+
60
+ return changed;
61
+ }
62
+
63
+ // ---------------------------------------------------------------------------
64
+ // CLI entry point — only runs when executed directly
65
+ // ---------------------------------------------------------------------------
66
+ if (fileURLToPath(import.meta.url) === resolve(process.argv[1] ?? '')) {
67
+ const __dirname = dirname(fileURLToPath(import.meta.url));
68
+ const devRoot = resolve(__dirname, '../../..');
69
+
70
+ const sortPkgBin = join(devRoot, 'node_modules/.bin/sort-package-json');
71
+ const sortPkgBinAlt = join(devRoot, 'node_modules/sort-package-json/cli.js');
72
+
73
+ let bin = '';
74
+ if (existsSync(sortPkgBin)) {
75
+ bin = sortPkgBin;
76
+ } else if (existsSync(sortPkgBinAlt)) {
77
+ bin = `node ${sortPkgBinAlt}`;
78
+ } else {
79
+ bin = 'npx sort-package-json';
80
+ }
81
+
82
+ const args = process.argv.slice(2);
83
+ const fixMode = args.includes('--fix');
84
+ const files = args.filter((arg) => arg !== '--fix');
85
+ const targets = files.length > 0 ? files : ['package.json'];
86
+
87
+ try {
88
+ const checkFlag = fixMode ? '' : '--check';
89
+ execSync(`${bin} ${checkFlag} ${targets.join(' ')}`.trim(), { stdio: 'inherit' });
90
+ } catch {
91
+ process.exit(1);
92
+ }
93
+
94
+ let hasExportsIssues = false;
95
+ for (const file of targets) {
96
+ const needsFix = fixExportsOrder(file, fixMode);
97
+ if (needsFix && !fixMode) {
98
+ console.error(
99
+ `${file}: exports condition order is incorrect (types must come before default)`,
100
+ );
101
+ hasExportsIssues = true;
102
+ }
103
+ }
104
+
105
+ if (hasExportsIssues) {
106
+ process.exit(1);
107
+ }
108
+ }
@@ -2,21 +2,23 @@
2
2
  # Copy this file to .github/workflows/release.yml in your package
3
3
  #
4
4
  # Required setup:
5
- # 1. Create .changeset/config.json (see changeset-config.json template)
6
- # 2. Add "release": "flow-release" to package.json scripts
7
- # 3. First publish must be done locally (see docs/toolchain/changesets.md)
5
+ # 1. Add "flow:release": "flow-release", "flow:ship": "flow-ship" to package.json scripts
6
+ # 2. Create staging and production branches:
7
+ # git checkout -b staging && git push -u origin staging
8
+ # git checkout -b production && git push -u origin production
9
+ # 3. First npm publish must be done locally:
10
+ # pnpm build && npm publish --access public
8
11
  #
9
12
  # Usage:
10
- # - Run `pnpm release patch|minor|major "message"` locally
11
- # - Push to main branch
12
- # - This workflow publishes to npm and creates a GitHub release
13
+ # - Run `pnpm flow:release "message"` to deploy changes to staging
14
+ # - Run `pnpm flow:ship patch|minor|major` to version, promote to production, and trigger this workflow
13
15
 
14
16
  name: Release
15
17
 
16
18
  on:
17
19
  push:
18
20
  branches:
19
- - main
21
+ - production
20
22
 
21
23
  concurrency: ${{ github.workflow }}-${{ github.ref }}
22
24
 
@@ -26,7 +28,6 @@ jobs:
26
28
  runs-on: ubuntu-latest
27
29
  permissions:
28
30
  contents: write
29
- pull-requests: write
30
31
  id-token: write
31
32
  steps:
32
33
  - name: Checkout
@@ -52,14 +53,10 @@ jobs:
52
53
  - name: Build
53
54
  run: pnpm build
54
55
 
55
- - name: Type check
56
- run: pnpm typecheck
57
-
58
- - name: Test with coverage
59
- run: pnpm report
60
-
61
56
  - name: Publish to npm
62
57
  id: publish
58
+ env:
59
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
63
60
  run: |
64
61
  PACKAGE_NAME=$(node -p "require('./package.json').name")
65
62
  CURRENT_VERSION=$(node -p "require('./package.json').version")
@@ -71,7 +68,7 @@ jobs:
71
68
  echo "::error::Run: pnpm build && npm publish --access public"
72
69
  exit 1
73
70
  elif [ "$PUBLISHED_VERSION" != "$CURRENT_VERSION" ]; then
74
- echo "Publishing $PACKAGE_NAME@$CURRENT_VERSION (npm has v$PUBLISHED_VERSION)"
71
+ echo "Publishing $PACKAGE_NAME@$CURRENT_VERSION (npm has $PUBLISHED_VERSION)"
75
72
  npm publish --access public --provenance
76
73
  echo "published=true" >> $GITHUB_OUTPUT
77
74
  else
@@ -79,13 +76,22 @@ jobs:
79
76
  echo "published=false" >> $GITHUB_OUTPUT
80
77
  fi
81
78
 
79
+ - name: Extract changelog entry
80
+ if: steps.publish.outputs.published == 'true'
81
+ id: changelog
82
+ run: |
83
+ # Extract the top section of CHANGELOG.md (from first ## heading to the next one)
84
+ ENTRY=$(awk '/^## /{found++} found==1{print} found==2{exit}' CHANGELOG.md)
85
+ echo "entry<<EOF" >> $GITHUB_OUTPUT
86
+ echo "$ENTRY" >> $GITHUB_OUTPUT
87
+ echo "EOF" >> $GITHUB_OUTPUT
88
+
82
89
  - name: Create GitHub Release
83
90
  if: steps.publish.outputs.published == 'true'
84
91
  uses: softprops/action-gh-release@v2
85
92
  with:
86
93
  tag_name: v${{ steps.publish.outputs.current_version }}
87
94
  name: v${{ steps.publish.outputs.current_version }}
88
- body_path: CHANGELOG.md
89
- generate_release_notes: true
95
+ body: ${{ steps.changelog.outputs.entry }}
90
96
  env:
91
97
  GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=exec-clean.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"exec-clean.d.ts","sourceRoot":"","sources":["../../src/bin/exec-clean.ts"],"names":[],"mappings":""}
@@ -1,18 +0,0 @@
1
- #!/usr/bin/env node
2
- import { spawn } from 'node:child_process';
3
- import { createRequire } from 'node:module';
4
- import path from 'node:path';
5
- const require = createRequire(import.meta.url);
6
- const pkgPath = require.resolve('rimraf/package.json');
7
- const pkg = require(pkgPath);
8
- let binRel = typeof pkg.bin === 'string' ? pkg.bin : pkg.bin?.rimraf;
9
- if (!binRel) {
10
- console.error('Unable to locate rimraf binary from package.json bin field');
11
- process.exit(1);
12
- }
13
- if (binRel.startsWith('./'))
14
- binRel = binRel.slice(2);
15
- const bin = path.join(path.dirname(pkgPath), binRel);
16
- const args = process.argv.slice(2);
17
- const child = spawn(process.execPath, [bin, ...args], { stdio: 'inherit' });
18
- child.on('exit', (code) => process.exit(code ?? 0));
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=exec-husky.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"exec-husky.d.ts","sourceRoot":"","sources":["../../src/bin/exec-husky.ts"],"names":[],"mappings":""}
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=exec-p.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"exec-p.d.ts","sourceRoot":"","sources":["../../src/bin/exec-p.ts"],"names":[],"mappings":""}
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=exec-s.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"exec-s.d.ts","sourceRoot":"","sources":["../../src/bin/exec-s.ts"],"names":[],"mappings":""}
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=exec-ts.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"exec-ts.d.ts","sourceRoot":"","sources":["../../src/bin/exec-ts.ts"],"names":[],"mappings":""}
@@ -1,28 +0,0 @@
1
- #!/usr/bin/env node
2
- import { spawn } from 'node:child_process';
3
- import { createRequire } from 'node:module';
4
- import path from 'node:path';
5
- const args = process.argv.slice(2);
6
- if (args.length === 0) {
7
- console.error('Usage: exec-ts <script.ts> [args...]');
8
- process.exit(1);
9
- }
10
- const [scriptArg, ...rest] = args;
11
- const script = scriptArg ?? '';
12
- const require = createRequire(import.meta.url);
13
- const pkgPath = require.resolve('tsx/package.json');
14
- const pkg = require(pkgPath);
15
- const binRel = pkg.bin;
16
- const binPath = typeof binRel === 'string'
17
- ? binRel
18
- : typeof binRel === 'object' && binRel !== null && 'tsx' in binRel
19
- ? binRel.tsx
20
- : undefined;
21
- if (!binPath) {
22
- console.error('Unable to locate tsx binary from package.json bin field');
23
- process.exit(1);
24
- }
25
- const bin = path.join(path.dirname(pkgPath), binPath);
26
- const spawnOptions = { stdio: 'inherit' };
27
- const child = spawn(process.execPath, [bin, script, ...rest], spawnOptions);
28
- child.on('exit', (code) => process.exit(code ?? 0));
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=exec-tsc.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"exec-tsc.d.ts","sourceRoot":"","sources":["../../src/bin/exec-tsc.ts"],"names":[],"mappings":""}
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=flow-changeset.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"flow-changeset.d.ts","sourceRoot":"","sources":["../../src/bin/flow-changeset.ts"],"names":[],"mappings":""}