happy-stacks 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -33
- package/bin/happys.mjs +44 -1
- package/docs/codex-mcp-resume.md +130 -0
- package/docs/commit-audits/happy/leeroy-wip.commit-analysis.md +17640 -0
- package/docs/commit-audits/happy/leeroy-wip.commit-export.fuller-stat.md +3845 -0
- package/docs/commit-audits/happy/leeroy-wip.commit-inventory.md +102 -0
- package/docs/commit-audits/happy/leeroy-wip.commit-manual-review.md +1452 -0
- package/docs/commit-audits/happy/leeroy-wip.manual-review-queue.md +116 -0
- package/docs/happy-development.md +1 -2
- package/docs/monorepo-migration.md +286 -0
- package/docs/server-flavors.md +19 -3
- package/docs/stacks.md +35 -0
- package/package.json +1 -1
- package/scripts/auth.mjs +21 -3
- package/scripts/build.mjs +1 -1
- package/scripts/dev.mjs +20 -7
- package/scripts/doctor.mjs +0 -4
- package/scripts/edison.mjs +2 -2
- package/scripts/env.mjs +150 -0
- package/scripts/env_cmd.test.mjs +128 -0
- package/scripts/init.mjs +5 -2
- package/scripts/install.mjs +99 -57
- package/scripts/migrate.mjs +3 -12
- package/scripts/monorepo.mjs +1096 -0
- package/scripts/monorepo_port.test.mjs +1470 -0
- package/scripts/review.mjs +715 -24
- package/scripts/review_pr.mjs +5 -20
- package/scripts/run.mjs +21 -15
- package/scripts/setup.mjs +147 -25
- package/scripts/setup_pr.mjs +19 -28
- package/scripts/stack.mjs +493 -157
- package/scripts/stack_archive_cmd.test.mjs +91 -0
- package/scripts/stack_editor_workspace_monorepo_root.test.mjs +65 -0
- package/scripts/stack_env_cmd.test.mjs +87 -0
- package/scripts/stack_happy_cmd.test.mjs +126 -0
- package/scripts/stack_interactive_monorepo_group.test.mjs +71 -0
- package/scripts/stack_monorepo_defaults.test.mjs +62 -0
- package/scripts/stack_monorepo_server_light_from_happy_spec.test.mjs +66 -0
- package/scripts/stack_server_flavors_defaults.test.mjs +55 -0
- package/scripts/stack_shorthand_cmd.test.mjs +55 -0
- package/scripts/stack_wt_list.test.mjs +128 -0
- package/scripts/tui.mjs +88 -2
- package/scripts/utils/cli/cli_registry.mjs +20 -5
- package/scripts/utils/cli/cwd_scope.mjs +56 -2
- package/scripts/utils/cli/cwd_scope.test.mjs +40 -7
- package/scripts/utils/cli/prereqs.mjs +8 -5
- package/scripts/utils/cli/prereqs.test.mjs +34 -0
- package/scripts/utils/cli/wizard.mjs +17 -9
- package/scripts/utils/cli/wizard_prompt_worktree_source_lazy.test.mjs +60 -0
- package/scripts/utils/dev/daemon.mjs +14 -1
- package/scripts/utils/dev/expo_dev.mjs +188 -4
- package/scripts/utils/dev/server.mjs +21 -17
- package/scripts/utils/edison/git_roots.mjs +29 -0
- package/scripts/utils/edison/git_roots.test.mjs +36 -0
- package/scripts/utils/env/env.mjs +7 -3
- package/scripts/utils/env/env_file.mjs +4 -2
- package/scripts/utils/env/env_file.test.mjs +44 -0
- package/scripts/utils/git/worktrees.mjs +63 -12
- package/scripts/utils/git/worktrees_monorepo.test.mjs +54 -0
- package/scripts/utils/net/tcp_forward.mjs +162 -0
- package/scripts/utils/paths/paths.mjs +118 -3
- package/scripts/utils/paths/paths_monorepo.test.mjs +58 -0
- package/scripts/utils/paths/paths_server_flavors.test.mjs +45 -0
- package/scripts/utils/proc/commands.mjs +2 -3
- package/scripts/utils/proc/pm.mjs +113 -16
- package/scripts/utils/proc/pm_spawn.test.mjs +76 -0
- package/scripts/utils/proc/pm_stack_cache_env.test.mjs +142 -0
- package/scripts/utils/proc/proc.mjs +68 -10
- package/scripts/utils/proc/proc.test.mjs +77 -0
- package/scripts/utils/review/chunks.mjs +55 -0
- package/scripts/utils/review/chunks.test.mjs +51 -0
- package/scripts/utils/review/findings.mjs +165 -0
- package/scripts/utils/review/findings.test.mjs +85 -0
- package/scripts/utils/review/head_slice.mjs +153 -0
- package/scripts/utils/review/head_slice.test.mjs +91 -0
- package/scripts/utils/review/instructions/deep.md +20 -0
- package/scripts/utils/review/runners/coderabbit.mjs +56 -14
- package/scripts/utils/review/runners/coderabbit.test.mjs +59 -0
- package/scripts/utils/review/runners/codex.mjs +32 -22
- package/scripts/utils/review/runners/codex.test.mjs +35 -0
- package/scripts/utils/review/slices.mjs +140 -0
- package/scripts/utils/review/slices.test.mjs +32 -0
- package/scripts/utils/server/flavor_scripts.mjs +98 -0
- package/scripts/utils/server/flavor_scripts.test.mjs +146 -0
- package/scripts/utils/server/prisma_import.mjs +37 -0
- package/scripts/utils/server/prisma_import.test.mjs +70 -0
- package/scripts/utils/server/ui_env.mjs +14 -0
- package/scripts/utils/server/ui_env.test.mjs +46 -0
- package/scripts/utils/server/validate.mjs +53 -16
- package/scripts/utils/server/validate.test.mjs +89 -0
- package/scripts/utils/stack/editor_workspace.mjs +4 -4
- package/scripts/utils/stack/interactive_stack_config.mjs +185 -0
- package/scripts/utils/stack/startup.mjs +113 -13
- package/scripts/utils/stack/startup_server_light_dirs.test.mjs +64 -0
- package/scripts/utils/stack/startup_server_light_generate.test.mjs +70 -0
- package/scripts/utils/stack/startup_server_light_legacy.test.mjs +88 -0
- package/scripts/utils/tailscale/ip.mjs +116 -0
- package/scripts/utils/ui/ansi.mjs +39 -0
- package/scripts/where.mjs +2 -2
- package/scripts/worktrees.mjs +627 -137
- package/scripts/worktrees_archive_cmd.test.mjs +245 -0
- package/scripts/worktrees_cursor_monorepo_root.test.mjs +63 -0
- package/scripts/worktrees_list_specs_no_recurse.test.mjs +33 -0
- package/scripts/worktrees_monorepo_use_group.test.mjs +67 -0
|
@@ -1,19 +1,61 @@
|
|
|
1
1
|
import { runCaptureResult } from '../../proc/proc.mjs';
|
|
2
|
+
import { join } from 'node:path';
|
|
2
3
|
|
|
3
|
-
|
|
4
|
-
const
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
4
|
+
function normalizeType(raw) {
|
|
5
|
+
const t = String(raw ?? '').trim().toLowerCase();
|
|
6
|
+
if (!t) return 'committed';
|
|
7
|
+
if (t === 'all' || t === 'committed' || t === 'uncommitted') return t;
|
|
8
|
+
throw new Error(`[review] invalid coderabbit type: ${raw} (expected: all|committed|uncommitted)`);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function buildCodeRabbitReviewArgs({ repoDir, baseRef, baseCommit, type, configFiles }) {
|
|
12
|
+
const args = ['review', '--plain', '--no-color', '--type', normalizeType(type), '--cwd', repoDir];
|
|
13
|
+
const base = String(baseRef ?? '').trim();
|
|
14
|
+
const bc = String(baseCommit ?? '').trim();
|
|
15
|
+
if (base && bc) {
|
|
16
|
+
throw new Error('[review] coderabbit: baseRef and baseCommit are mutually exclusive');
|
|
15
17
|
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
+
if (base) args.push('--base', base);
|
|
19
|
+
if (bc) args.push('--base-commit', bc);
|
|
20
|
+
const files = Array.isArray(configFiles) ? configFiles.filter(Boolean) : [];
|
|
21
|
+
if (files.length) args.push('--config', ...files);
|
|
22
|
+
return args;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function buildCodeRabbitEnv({ env, homeDir }) {
|
|
26
|
+
const merged = { ...(env ?? {}) };
|
|
27
|
+
const dir = String(homeDir ?? '').trim();
|
|
28
|
+
if (!dir) return merged;
|
|
29
|
+
|
|
30
|
+
merged.HOME = dir;
|
|
31
|
+
merged.USERPROFILE = dir;
|
|
32
|
+
merged.CODERABBIT_HOME = join(dir, '.coderabbit');
|
|
33
|
+
merged.XDG_CONFIG_HOME = join(dir, '.config');
|
|
34
|
+
merged.XDG_CACHE_HOME = join(dir, '.cache');
|
|
35
|
+
merged.XDG_STATE_HOME = join(dir, '.local', 'state');
|
|
36
|
+
merged.XDG_DATA_HOME = join(dir, '.local', 'share');
|
|
37
|
+
return merged;
|
|
18
38
|
}
|
|
19
39
|
|
|
40
|
+
export async function runCodeRabbitReview({
|
|
41
|
+
repoDir,
|
|
42
|
+
baseRef,
|
|
43
|
+
baseCommit,
|
|
44
|
+
env,
|
|
45
|
+
type = 'committed',
|
|
46
|
+
configFiles = [],
|
|
47
|
+
streamLabel,
|
|
48
|
+
teeFile,
|
|
49
|
+
teeLabel,
|
|
50
|
+
}) {
|
|
51
|
+
const homeDir = (env?.HAPPY_STACKS_CODERABBIT_HOME_DIR ?? env?.HAPPY_LOCAL_CODERABBIT_HOME_DIR ?? '').toString().trim();
|
|
52
|
+
const args = buildCodeRabbitReviewArgs({ repoDir, baseRef, baseCommit, type, configFiles });
|
|
53
|
+
const res = await runCaptureResult('coderabbit', args, {
|
|
54
|
+
cwd: repoDir,
|
|
55
|
+
env: buildCodeRabbitEnv({ env, homeDir }),
|
|
56
|
+
streamLabel,
|
|
57
|
+
teeFile,
|
|
58
|
+
teeLabel,
|
|
59
|
+
});
|
|
60
|
+
return { ...res, stdout: res.out, stderr: res.err };
|
|
61
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
|
|
5
|
+
import { buildCodeRabbitEnv, buildCodeRabbitReviewArgs } from './coderabbit.mjs';
|
|
6
|
+
|
|
7
|
+
test('buildCodeRabbitReviewArgs builds committed review args by default', () => {
|
|
8
|
+
const repoDir = '/tmp/repo';
|
|
9
|
+
const args = buildCodeRabbitReviewArgs({ repoDir, baseRef: 'upstream/main', type: undefined, configFiles: [] });
|
|
10
|
+
assert.deepEqual(args, ['review', '--plain', '--no-color', '--type', 'committed', '--cwd', repoDir, '--base', 'upstream/main']);
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
test('buildCodeRabbitReviewArgs uses --base-commit when provided', () => {
|
|
14
|
+
const repoDir = '/tmp/repo';
|
|
15
|
+
const args = buildCodeRabbitReviewArgs({ repoDir, baseCommit: 'abc123', type: 'committed', configFiles: [] });
|
|
16
|
+
assert.deepEqual(args, ['review', '--plain', '--no-color', '--type', 'committed', '--cwd', repoDir, '--base-commit', 'abc123']);
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
test('buildCodeRabbitReviewArgs rejects providing both baseRef and baseCommit', () => {
|
|
20
|
+
assert.throws(
|
|
21
|
+
() => buildCodeRabbitReviewArgs({ repoDir: '/tmp/repo', baseRef: 'upstream/main', baseCommit: 'abc123', type: 'committed', configFiles: [] }),
|
|
22
|
+
/mutually exclusive/
|
|
23
|
+
);
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
test('buildCodeRabbitReviewArgs includes --config when files are provided', () => {
|
|
27
|
+
const repoDir = '/tmp/repo';
|
|
28
|
+
const args = buildCodeRabbitReviewArgs({
|
|
29
|
+
repoDir,
|
|
30
|
+
baseRef: 'upstream/main',
|
|
31
|
+
type: 'committed',
|
|
32
|
+
configFiles: ['/tmp/a.md', '/tmp/b.md'],
|
|
33
|
+
});
|
|
34
|
+
assert.deepEqual(args, [
|
|
35
|
+
'review',
|
|
36
|
+
'--plain',
|
|
37
|
+
'--no-color',
|
|
38
|
+
'--type',
|
|
39
|
+
'committed',
|
|
40
|
+
'--cwd',
|
|
41
|
+
repoDir,
|
|
42
|
+
'--base',
|
|
43
|
+
'upstream/main',
|
|
44
|
+
'--config',
|
|
45
|
+
'/tmp/a.md',
|
|
46
|
+
'/tmp/b.md',
|
|
47
|
+
]);
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
test('buildCodeRabbitEnv overrides HOME/XDG paths when a homeDir is provided', () => {
|
|
51
|
+
const env = buildCodeRabbitEnv({ env: { PATH: '/bin' }, homeDir: '/tmp/cr-home' });
|
|
52
|
+
assert.equal(env.PATH, '/bin');
|
|
53
|
+
assert.equal(env.HOME, '/tmp/cr-home');
|
|
54
|
+
assert.equal(env.CODERABBIT_HOME, join('/tmp/cr-home', '.coderabbit'));
|
|
55
|
+
assert.equal(env.XDG_CONFIG_HOME, join('/tmp/cr-home', '.config'));
|
|
56
|
+
assert.equal(env.XDG_CACHE_HOME, join('/tmp/cr-home', '.cache'));
|
|
57
|
+
assert.equal(env.XDG_STATE_HOME, join('/tmp/cr-home', '.local', 'state'));
|
|
58
|
+
assert.equal(env.XDG_DATA_HOME, join('/tmp/cr-home', '.local', 'share'));
|
|
59
|
+
});
|
|
@@ -15,37 +15,47 @@ export function extractCodexReviewFromJsonl(jsonlText) {
|
|
|
15
15
|
} catch {
|
|
16
16
|
continue;
|
|
17
17
|
}
|
|
18
|
-
const
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
if (reviewOutput) return reviewOutput;
|
|
18
|
+
const candidates = [obj, obj?.msg, obj?.payload, obj?.event, obj?.data, obj?.result].filter(Boolean);
|
|
19
|
+
for (const c of candidates) {
|
|
20
|
+
const exited =
|
|
21
|
+
c?.ExitedReviewMode ??
|
|
22
|
+
(c?.type === 'ExitedReviewMode' ? c : null) ??
|
|
23
|
+
(c?.event?.type === 'ExitedReviewMode' ? c.event : null) ??
|
|
24
|
+
(c?.payload?.type === 'ExitedReviewMode' ? c.payload : null);
|
|
25
|
+
|
|
26
|
+
const reviewOutput = exited?.review_output ?? exited?.reviewOutput ?? null;
|
|
27
|
+
if (reviewOutput) return reviewOutput;
|
|
28
|
+
}
|
|
30
29
|
}
|
|
31
30
|
return null;
|
|
32
31
|
}
|
|
33
32
|
|
|
34
|
-
export
|
|
35
|
-
const args = ['
|
|
33
|
+
export function buildCodexReviewArgs({ baseRef, jsonMode, prompt }) {
|
|
34
|
+
const args = ['exec', 'review', '--dangerously-bypass-approvals-and-sandbox'];
|
|
36
35
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
}
|
|
36
|
+
// Codex review targets are mutually exclusive:
|
|
37
|
+
// - --base / --commit / --uncommitted are distinct "targets"
|
|
38
|
+
// - Providing a PROMPT switches to the "custom instructions" target and cannot be combined with the above.
|
|
39
|
+
// Therefore, when reviewing a target (base/commit/uncommitted), we do not pass a prompt.
|
|
40
|
+
if (baseRef) args.push('--base', baseRef);
|
|
43
41
|
|
|
44
42
|
if (jsonMode) {
|
|
45
43
|
args.push('--json');
|
|
46
44
|
}
|
|
47
45
|
|
|
48
|
-
const
|
|
49
|
-
|
|
46
|
+
const p = String(prompt ?? '').trim();
|
|
47
|
+
if (!baseRef && p) args.push(p);
|
|
48
|
+
if (!baseRef && !p) args.push('--uncommitted');
|
|
49
|
+
return args;
|
|
50
50
|
}
|
|
51
51
|
|
|
52
|
+
export async function runCodexReview({ repoDir, baseRef, env, jsonMode, streamLabel, teeFile, teeLabel, prompt }) {
|
|
53
|
+
const merged = { ...(env ?? {}) };
|
|
54
|
+
const codexHome =
|
|
55
|
+
(merged.HAPPY_STACKS_CODEX_HOME_DIR ?? merged.HAPPY_LOCAL_CODEX_HOME_DIR ?? merged.CODEX_HOME ?? '').toString().trim();
|
|
56
|
+
if (codexHome) merged.CODEX_HOME = codexHome;
|
|
57
|
+
|
|
58
|
+
const args = buildCodexReviewArgs({ baseRef, jsonMode, prompt });
|
|
59
|
+
const res = await runCaptureResult('codex', args, { cwd: repoDir, env: merged, streamLabel, teeFile, teeLabel });
|
|
60
|
+
return { ...res, stdout: res.out, stderr: res.err };
|
|
61
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
|
|
4
|
+
import { buildCodexReviewArgs, extractCodexReviewFromJsonl } from './codex.mjs';
|
|
5
|
+
|
|
6
|
+
test('buildCodexReviewArgs uses --base and avoids --cd', () => {
|
|
7
|
+
const args = buildCodexReviewArgs({ baseRef: 'upstream/main', jsonMode: false });
|
|
8
|
+
assert.equal(args.includes('--cd'), false);
|
|
9
|
+
assert.deepEqual(args, ['exec', 'review', '--dangerously-bypass-approvals-and-sandbox', '--base', 'upstream/main']);
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
test('buildCodexReviewArgs uses --experimental-json when jsonMode is true', () => {
|
|
13
|
+
const args = buildCodexReviewArgs({ baseRef: 'upstream/main', jsonMode: true });
|
|
14
|
+
assert.deepEqual(args, ['exec', 'review', '--dangerously-bypass-approvals-and-sandbox', '--base', 'upstream/main', '--json']);
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test('buildCodexReviewArgs appends a prompt when provided', () => {
|
|
18
|
+
const args = buildCodexReviewArgs({ baseRef: null, jsonMode: false, prompt: 'be thorough' });
|
|
19
|
+
assert.deepEqual(args, ['exec', 'review', '--dangerously-bypass-approvals-and-sandbox', 'be thorough']);
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
test('extractCodexReviewFromJsonl finds review_output in multiple event shapes', () => {
|
|
23
|
+
const out1 = extractCodexReviewFromJsonl(
|
|
24
|
+
JSON.stringify({ msg: { ExitedReviewMode: { review_output: { a: 1 } } } }) + '\n'
|
|
25
|
+
);
|
|
26
|
+
assert.deepEqual(out1, { a: 1 });
|
|
27
|
+
|
|
28
|
+
const out2 = extractCodexReviewFromJsonl(JSON.stringify({ type: 'ExitedReviewMode', review_output: { b: 2 } }) + '\n');
|
|
29
|
+
assert.deepEqual(out2, { b: 2 });
|
|
30
|
+
|
|
31
|
+
const out3 = extractCodexReviewFromJsonl(
|
|
32
|
+
JSON.stringify({ event: { type: 'ExitedReviewMode', reviewOutput: { c: 3 } } }) + '\n'
|
|
33
|
+
);
|
|
34
|
+
assert.deepEqual(out3, { c: 3 });
|
|
35
|
+
});
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
function normalizePath(p) {
|
|
2
|
+
return String(p ?? '').replace(/\\/g, '/').replace(/^\/+/, '');
|
|
3
|
+
}
|
|
4
|
+
|
|
5
|
+
function commonPrefixParts(partsList) {
|
|
6
|
+
if (!partsList.length) return [];
|
|
7
|
+
const first = partsList[0];
|
|
8
|
+
let n = first.length;
|
|
9
|
+
for (const parts of partsList.slice(1)) {
|
|
10
|
+
n = Math.min(n, parts.length, n);
|
|
11
|
+
for (let i = 0; i < n; i += 1) {
|
|
12
|
+
if (parts[i] !== first[i]) {
|
|
13
|
+
n = i;
|
|
14
|
+
break;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
return first.slice(0, n);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function pathPrefixLabel(parts, { maxDepth = 4 } = {}) {
|
|
22
|
+
const depth = Math.min(parts.length, Math.max(1, maxDepth));
|
|
23
|
+
return parts.slice(0, depth).join('/');
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function groupByPrefix(paths, depth) {
|
|
27
|
+
const groups = new Map();
|
|
28
|
+
for (const p of paths) {
|
|
29
|
+
const parts = normalizePath(p).split('/').filter(Boolean);
|
|
30
|
+
const key = parts.slice(0, Math.max(1, Math.min(depth, parts.length))).join('/');
|
|
31
|
+
if (!groups.has(key)) groups.set(key, []);
|
|
32
|
+
groups.get(key).push(p);
|
|
33
|
+
}
|
|
34
|
+
return groups;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Plan review slices that:
|
|
39
|
+
* - cover every changed path exactly once
|
|
40
|
+
* - keep each slice at <= maxFiles where possible
|
|
41
|
+
* - prefer directory-prefix grouping (better reviewer context) over raw batching
|
|
42
|
+
*
|
|
43
|
+
* The output is intended for "HEAD-sliced" review: the reviewer gets a focused diff
|
|
44
|
+
* while still having access to the full repo code at HEAD.
|
|
45
|
+
*/
|
|
46
|
+
export function planPathSlices({ changedPaths, maxFiles = 300, maxPrefixDepth = 6 } = {}) {
|
|
47
|
+
const unique = Array.from(new Set((Array.isArray(changedPaths) ? changedPaths : []).map(normalizePath))).filter(Boolean);
|
|
48
|
+
unique.sort();
|
|
49
|
+
if (!unique.length) return [];
|
|
50
|
+
|
|
51
|
+
const limit = Number.isFinite(maxFiles) && maxFiles > 0 ? Math.floor(maxFiles) : 300;
|
|
52
|
+
if (unique.length <= limit) {
|
|
53
|
+
const parts = unique.map((p) => p.split('/').filter(Boolean));
|
|
54
|
+
const prefix = commonPrefixParts(parts);
|
|
55
|
+
return [
|
|
56
|
+
{
|
|
57
|
+
label: prefix.length ? `${pathPrefixLabel(prefix, { maxDepth: 3 })}/` : 'repo/',
|
|
58
|
+
paths: unique,
|
|
59
|
+
},
|
|
60
|
+
];
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// First pass: top-level directories (plus root files).
|
|
64
|
+
const topGroups = groupByPrefix(unique, 1);
|
|
65
|
+
|
|
66
|
+
const slices = [];
|
|
67
|
+
const pushSlice = (label, paths) => {
|
|
68
|
+
const normalized = Array.from(new Set(paths.map(normalizePath))).filter(Boolean).sort();
|
|
69
|
+
if (!normalized.length) return;
|
|
70
|
+
slices.push({ label, paths: normalized });
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
for (const [top, paths] of topGroups.entries()) {
|
|
74
|
+
if (paths.length <= limit) {
|
|
75
|
+
pushSlice(top.includes('/') ? top : `${top}/`, paths);
|
|
76
|
+
continue;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Iteratively refine prefix depth within this group until all chunks are <= limit.
|
|
80
|
+
let pending = [{ label: top, paths }];
|
|
81
|
+
for (let depth = 2; depth <= maxPrefixDepth && pending.some((x) => x.paths.length > limit); depth += 1) {
|
|
82
|
+
const next = [];
|
|
83
|
+
for (const item of pending) {
|
|
84
|
+
if (item.paths.length <= limit) {
|
|
85
|
+
next.push(item);
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
const groups = groupByPrefix(item.paths, depth);
|
|
89
|
+
if (groups.size <= 1) {
|
|
90
|
+
next.push(item);
|
|
91
|
+
continue;
|
|
92
|
+
}
|
|
93
|
+
for (const [k, v] of groups.entries()) {
|
|
94
|
+
next.push({ label: k, paths: v });
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
pending = next;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Final pass: pack refined groups into <=limit windows (greedy, stable order).
|
|
101
|
+
pending.sort((a, b) => a.label.localeCompare(b.label));
|
|
102
|
+
let bucket = [];
|
|
103
|
+
let bucketCount = 0;
|
|
104
|
+
let bucketLabelParts = [];
|
|
105
|
+
const flush = () => {
|
|
106
|
+
if (!bucket.length) return;
|
|
107
|
+
const parts = commonPrefixParts(bucketLabelParts);
|
|
108
|
+
const label = parts.length ? `${pathPrefixLabel(parts, { maxDepth: 4 })}/` : `${top}/`;
|
|
109
|
+
pushSlice(label, bucket);
|
|
110
|
+
bucket = [];
|
|
111
|
+
bucketCount = 0;
|
|
112
|
+
bucketLabelParts = [];
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
for (const g of pending) {
|
|
116
|
+
const n = g.paths.length;
|
|
117
|
+
if (n > limit) {
|
|
118
|
+
// Fall back to raw batching for truly massive groups (rare).
|
|
119
|
+
flush();
|
|
120
|
+
for (let i = 0; i < g.paths.length; i += limit) {
|
|
121
|
+
const batch = g.paths.slice(i, i + limit);
|
|
122
|
+
pushSlice(`${g.label}/`, batch);
|
|
123
|
+
}
|
|
124
|
+
continue;
|
|
125
|
+
}
|
|
126
|
+
if (bucketCount + n > limit) {
|
|
127
|
+
flush();
|
|
128
|
+
}
|
|
129
|
+
bucket.push(...g.paths);
|
|
130
|
+
bucketCount += n;
|
|
131
|
+
bucketLabelParts.push(g.label.split('/').filter(Boolean));
|
|
132
|
+
}
|
|
133
|
+
flush();
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Stable ordering helps humans follow progress.
|
|
137
|
+
slices.sort((a, b) => a.label.localeCompare(b.label));
|
|
138
|
+
return slices;
|
|
139
|
+
}
|
|
140
|
+
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { planPathSlices } from './slices.mjs';
|
|
4
|
+
|
|
5
|
+
test('planPathSlices returns empty for no paths', () => {
|
|
6
|
+
assert.deepEqual(planPathSlices({ changedPaths: [], maxFiles: 3 }), []);
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
test('planPathSlices creates a single slice when under maxFiles', () => {
|
|
10
|
+
const slices = planPathSlices({
|
|
11
|
+
changedPaths: ['expo-app/a.txt', 'cli/b.txt', 'server/c.txt'],
|
|
12
|
+
maxFiles: 10,
|
|
13
|
+
});
|
|
14
|
+
assert.equal(slices.length, 1);
|
|
15
|
+
assert.deepEqual(slices[0].paths, ['cli/b.txt', 'expo-app/a.txt', 'server/c.txt']);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
test('planPathSlices splits large groups by prefix depth and respects maxFiles', () => {
|
|
19
|
+
const changedPaths = [
|
|
20
|
+
...Array.from({ length: 6 }, (_, i) => `expo-app/sources/a${i}.ts`),
|
|
21
|
+
...Array.from({ length: 6 }, (_, i) => `expo-app/sources/b${i}.ts`),
|
|
22
|
+
...Array.from({ length: 2 }, (_, i) => `cli/src/x${i}.ts`),
|
|
23
|
+
];
|
|
24
|
+
const slices = planPathSlices({ changedPaths, maxFiles: 5, maxPrefixDepth: 4 });
|
|
25
|
+
assert.ok(slices.length > 1);
|
|
26
|
+
for (const s of slices) {
|
|
27
|
+
assert.ok(s.paths.length <= 5, `slice ${s.label} exceeded maxFiles`);
|
|
28
|
+
}
|
|
29
|
+
const all = slices.flatMap((s) => s.paths).sort();
|
|
30
|
+
assert.deepEqual(all, Array.from(new Set(changedPaths)).sort());
|
|
31
|
+
});
|
|
32
|
+
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { pathToFileURL } from 'node:url';
|
|
4
|
+
|
|
5
|
+
function readScripts(serverDir) {
|
|
6
|
+
try {
|
|
7
|
+
const pkgPath = join(serverDir, 'package.json');
|
|
8
|
+
const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
|
|
9
|
+
const scripts = pkg?.scripts && typeof pkg.scripts === 'object' ? pkg.scripts : {};
|
|
10
|
+
return scripts;
|
|
11
|
+
} catch {
|
|
12
|
+
return {};
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function hasScript(scripts, name) {
|
|
17
|
+
return typeof scripts?.[name] === 'string' && scripts[name].trim().length > 0;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function isUnifiedHappyServerLight({ serverDir }) {
|
|
21
|
+
return (
|
|
22
|
+
existsSync(join(serverDir, 'prisma', 'sqlite', 'schema.prisma')) ||
|
|
23
|
+
existsSync(join(serverDir, 'prisma', 'schema.sqlite.prisma'))
|
|
24
|
+
);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function resolveServerLightPrismaSchemaArgs({ serverDir }) {
|
|
28
|
+
if (existsSync(join(serverDir, 'prisma', 'sqlite', 'schema.prisma'))) {
|
|
29
|
+
return ['--schema', 'prisma/sqlite/schema.prisma'];
|
|
30
|
+
}
|
|
31
|
+
if (existsSync(join(serverDir, 'prisma', 'schema.sqlite.prisma'))) {
|
|
32
|
+
return ['--schema', 'prisma/schema.sqlite.prisma'];
|
|
33
|
+
}
|
|
34
|
+
return [];
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function resolveServerLightPrismaMigrateDeployArgs({ serverDir }) {
|
|
38
|
+
return ['migrate', 'deploy', ...resolveServerLightPrismaSchemaArgs({ serverDir })];
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export function resolveServerLightPrismaClientImport({ serverDir }) {
|
|
42
|
+
if (!isUnifiedHappyServerLight({ serverDir })) {
|
|
43
|
+
return '@prisma/client';
|
|
44
|
+
}
|
|
45
|
+
const clientPath = join(serverDir, 'generated', 'sqlite-client', 'index.js');
|
|
46
|
+
return pathToFileURL(clientPath).href;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export function resolvePrismaClientImportForServerComponent({ serverComponentName, serverComponent, serverDir }) {
|
|
50
|
+
const name = serverComponentName ?? serverComponent;
|
|
51
|
+
if (name === 'happy-server-light') {
|
|
52
|
+
return resolveServerLightPrismaClientImport({ serverDir });
|
|
53
|
+
}
|
|
54
|
+
return '@prisma/client';
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export function resolveServerDevScript({ serverComponentName, serverDir, prismaPush }) {
|
|
58
|
+
const scripts = readScripts(serverDir);
|
|
59
|
+
|
|
60
|
+
if (serverComponentName === 'happy-server') {
|
|
61
|
+
return 'start';
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (serverComponentName === 'happy-server-light') {
|
|
65
|
+
const unified = isUnifiedHappyServerLight({ serverDir });
|
|
66
|
+
if (unified) {
|
|
67
|
+
// Server-light now relies on deterministic migrations (not db push).
|
|
68
|
+
// Prefer the dedicated dev script that runs migrate deploy before starting.
|
|
69
|
+
if (hasScript(scripts, 'dev:light')) {
|
|
70
|
+
return 'dev:light';
|
|
71
|
+
}
|
|
72
|
+
// Fallback: no dev script, run the light start script.
|
|
73
|
+
return hasScript(scripts, 'start:light') ? 'start:light' : 'start';
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Legacy behavior: prefer `dev` for older happy-server-light checkouts.
|
|
77
|
+
if (prismaPush) {
|
|
78
|
+
return hasScript(scripts, 'dev') ? 'dev' : 'start';
|
|
79
|
+
}
|
|
80
|
+
return hasScript(scripts, 'start') ? 'start' : 'dev';
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Unknown: be conservative.
|
|
84
|
+
return 'start';
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export function resolveServerStartScript({ serverComponentName, serverDir }) {
|
|
88
|
+
const scripts = readScripts(serverDir);
|
|
89
|
+
|
|
90
|
+
if (serverComponentName === 'happy-server-light') {
|
|
91
|
+
const unified = isUnifiedHappyServerLight({ serverDir });
|
|
92
|
+
if (unified && hasScript(scripts, 'start:light')) {
|
|
93
|
+
return 'start:light';
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return 'start';
|
|
98
|
+
}
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import test from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import { mkdtemp, mkdir, rm, writeFile } from 'node:fs/promises';
|
|
4
|
+
import { tmpdir } from 'node:os';
|
|
5
|
+
import { join } from 'node:path';
|
|
6
|
+
|
|
7
|
+
import {
|
|
8
|
+
resolvePrismaClientImportForServerComponent,
|
|
9
|
+
resolveServerDevScript,
|
|
10
|
+
resolveServerLightPrismaClientImport,
|
|
11
|
+
resolveServerLightPrismaMigrateDeployArgs,
|
|
12
|
+
resolveServerStartScript,
|
|
13
|
+
} from './flavor_scripts.mjs';
|
|
14
|
+
|
|
15
|
+
async function writeJson(path, obj) {
|
|
16
|
+
await writeFile(path, JSON.stringify(obj, null, 2) + '\n', 'utf-8');
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
test('resolveServer*Script uses light scripts when unified light flavor is detected', async () => {
|
|
20
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
21
|
+
try {
|
|
22
|
+
await mkdir(join(dir, 'prisma', 'sqlite'), { recursive: true });
|
|
23
|
+
await writeFile(join(dir, 'prisma', 'sqlite', 'schema.prisma'), 'datasource db { provider = "sqlite" }\n', 'utf-8');
|
|
24
|
+
await writeJson(join(dir, 'package.json'), { scripts: { 'start:light': 'node x', 'dev:light': 'node y' } });
|
|
25
|
+
|
|
26
|
+
assert.equal(resolveServerDevScript({ serverComponentName: 'happy-server-light', serverDir: dir, prismaPush: true }), 'dev:light');
|
|
27
|
+
assert.equal(resolveServerDevScript({ serverComponentName: 'happy-server-light', serverDir: dir, prismaPush: false }), 'dev:light');
|
|
28
|
+
assert.equal(resolveServerStartScript({ serverComponentName: 'happy-server-light', serverDir: dir }), 'start:light');
|
|
29
|
+
} finally {
|
|
30
|
+
await rm(dir, { recursive: true, force: true });
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
test('resolveServer*Script falls back to legacy scripts for non-unified happy-server-light', async () => {
|
|
35
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
36
|
+
try {
|
|
37
|
+
await writeJson(join(dir, 'package.json'), { scripts: { start: 'node start', dev: 'node dev' } });
|
|
38
|
+
|
|
39
|
+
assert.equal(resolveServerDevScript({ serverComponentName: 'happy-server-light', serverDir: dir, prismaPush: true }), 'dev');
|
|
40
|
+
assert.equal(resolveServerDevScript({ serverComponentName: 'happy-server-light', serverDir: dir, prismaPush: false }), 'start');
|
|
41
|
+
assert.equal(resolveServerStartScript({ serverComponentName: 'happy-server-light', serverDir: dir }), 'start');
|
|
42
|
+
} finally {
|
|
43
|
+
await rm(dir, { recursive: true, force: true });
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
test('resolveServer*Script returns start for happy-server', async () => {
|
|
48
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
49
|
+
try {
|
|
50
|
+
await writeJson(join(dir, 'package.json'), { scripts: { start: 'node start', dev: 'node dev' } });
|
|
51
|
+
|
|
52
|
+
assert.equal(resolveServerDevScript({ serverComponentName: 'happy-server', serverDir: dir, prismaPush: true }), 'start');
|
|
53
|
+
assert.equal(resolveServerDevScript({ serverComponentName: 'happy-server', serverDir: dir, prismaPush: false }), 'start');
|
|
54
|
+
assert.equal(resolveServerStartScript({ serverComponentName: 'happy-server', serverDir: dir }), 'start');
|
|
55
|
+
} finally {
|
|
56
|
+
await rm(dir, { recursive: true, force: true });
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
test('resolveServerLightPrismaMigrateDeployArgs adds --schema when unified light flavor is detected', async () => {
|
|
61
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
62
|
+
try {
|
|
63
|
+
await mkdir(join(dir, 'prisma', 'sqlite'), { recursive: true });
|
|
64
|
+
await writeFile(join(dir, 'prisma', 'sqlite', 'schema.prisma'), 'datasource db { provider = "sqlite" }\n', 'utf-8');
|
|
65
|
+
|
|
66
|
+
assert.deepEqual(resolveServerLightPrismaMigrateDeployArgs({ serverDir: dir }), ['migrate', 'deploy', '--schema', 'prisma/sqlite/schema.prisma']);
|
|
67
|
+
} finally {
|
|
68
|
+
await rm(dir, { recursive: true, force: true });
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
test('resolveServerLightPrismaMigrateDeployArgs supports legacy schema.sqlite.prisma', async () => {
|
|
73
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
74
|
+
try {
|
|
75
|
+
await mkdir(join(dir, 'prisma'), { recursive: true });
|
|
76
|
+
await writeFile(join(dir, 'prisma', 'schema.sqlite.prisma'), 'datasource db { provider = "sqlite" }\n', 'utf-8');
|
|
77
|
+
|
|
78
|
+
assert.deepEqual(resolveServerLightPrismaMigrateDeployArgs({ serverDir: dir }), ['migrate', 'deploy', '--schema', 'prisma/schema.sqlite.prisma']);
|
|
79
|
+
} finally {
|
|
80
|
+
await rm(dir, { recursive: true, force: true });
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
test('resolveServerLightPrismaClientImport returns file URL when unified light flavor is detected', async () => {
|
|
85
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
86
|
+
try {
|
|
87
|
+
await mkdir(join(dir, 'prisma', 'sqlite'), { recursive: true });
|
|
88
|
+
await writeFile(join(dir, 'prisma', 'sqlite', 'schema.prisma'), 'datasource db { provider = "sqlite" }\n', 'utf-8');
|
|
89
|
+
|
|
90
|
+
const spec = resolveServerLightPrismaClientImport({ serverDir: dir });
|
|
91
|
+
assert.equal(typeof spec, 'string');
|
|
92
|
+
assert.ok(spec.startsWith('file:'), `expected file: URL import spec, got: ${spec}`);
|
|
93
|
+
assert.ok(spec.endsWith('/generated/sqlite-client/index.js'), `unexpected import spec: ${spec}`);
|
|
94
|
+
} finally {
|
|
95
|
+
await rm(dir, { recursive: true, force: true });
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
test('resolveServerLightPrismaClientImport returns @prisma/client for legacy happy-server-light', async () => {
|
|
100
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
101
|
+
try {
|
|
102
|
+
assert.equal(resolveServerLightPrismaClientImport({ serverDir: dir }), '@prisma/client');
|
|
103
|
+
assert.deepEqual(resolveServerLightPrismaMigrateDeployArgs({ serverDir: dir }), ['migrate', 'deploy']);
|
|
104
|
+
} finally {
|
|
105
|
+
await rm(dir, { recursive: true, force: true });
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
test('resolvePrismaClientImportForServerComponent returns sqlite client file URL for unified server-light', async () => {
|
|
110
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
111
|
+
try {
|
|
112
|
+
await mkdir(join(dir, 'prisma', 'sqlite'), { recursive: true });
|
|
113
|
+
await writeFile(join(dir, 'prisma', 'sqlite', 'schema.prisma'), 'datasource db { provider = "sqlite" }\n', 'utf-8');
|
|
114
|
+
|
|
115
|
+
const spec = resolvePrismaClientImportForServerComponent({ serverComponentName: 'happy-server-light', serverDir: dir });
|
|
116
|
+
assert.equal(typeof spec, 'string');
|
|
117
|
+
assert.ok(spec.startsWith('file:'), `expected file: URL import spec, got: ${spec}`);
|
|
118
|
+
assert.ok(spec.endsWith('/generated/sqlite-client/index.js'), `unexpected import spec: ${spec}`);
|
|
119
|
+
} finally {
|
|
120
|
+
await rm(dir, { recursive: true, force: true });
|
|
121
|
+
}
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
test('resolvePrismaClientImportForServerComponent accepts serverComponent alias (back-compat)', async () => {
|
|
125
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
126
|
+
try {
|
|
127
|
+
await mkdir(join(dir, 'prisma', 'sqlite'), { recursive: true });
|
|
128
|
+
await writeFile(join(dir, 'prisma', 'sqlite', 'schema.prisma'), 'datasource db { provider = "sqlite" }\n', 'utf-8');
|
|
129
|
+
|
|
130
|
+
const spec = resolvePrismaClientImportForServerComponent({ serverComponent: 'happy-server-light', serverDir: dir });
|
|
131
|
+
assert.equal(typeof spec, 'string');
|
|
132
|
+
assert.ok(spec.startsWith('file:'), `expected file: URL import spec, got: ${spec}`);
|
|
133
|
+
assert.ok(spec.endsWith('/generated/sqlite-client/index.js'), `unexpected import spec: ${spec}`);
|
|
134
|
+
} finally {
|
|
135
|
+
await rm(dir, { recursive: true, force: true });
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
test('resolvePrismaClientImportForServerComponent returns @prisma/client for happy-server', async () => {
|
|
140
|
+
const dir = await mkdtemp(join(tmpdir(), 'hs-flavor-scripts-'));
|
|
141
|
+
try {
|
|
142
|
+
assert.equal(resolvePrismaClientImportForServerComponent({ serverComponentName: 'happy-server', serverDir: dir }), '@prisma/client');
|
|
143
|
+
} finally {
|
|
144
|
+
await rm(dir, { recursive: true, force: true });
|
|
145
|
+
}
|
|
146
|
+
});
|