@happier-dev/stack 0.1.0-preview.74.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +501 -0
- package/bin/hstack.mjs +348 -0
- package/docs/codex-mcp-resume.md +129 -0
- package/docs/edison.md +74 -0
- package/docs/forking-and-branding.md +189 -0
- package/docs/happy-development.md +22 -0
- package/docs/isolated-linux-vm.md +243 -0
- package/docs/menubar.md +244 -0
- package/docs/mobile-ios.md +322 -0
- package/docs/monorepo-migration.md +20 -0
- package/docs/paths-and-env.md +154 -0
- package/docs/remote-access.md +43 -0
- package/docs/server-flavors.md +147 -0
- package/docs/stacks.md +330 -0
- package/docs/tauri.md +60 -0
- package/docs/worktrees-and-forks.md +133 -0
- package/extras/swiftbar/auth-login.sh +29 -0
- package/extras/swiftbar/git-cache-refresh.sh +122 -0
- package/extras/swiftbar/hstack-term.sh +133 -0
- package/extras/swiftbar/hstack.5s.sh +296 -0
- package/extras/swiftbar/hstack.sh +35 -0
- package/extras/swiftbar/icons/happy-green.png +0 -0
- package/extras/swiftbar/icons/happy-orange.png +0 -0
- package/extras/swiftbar/icons/happy-red.png +0 -0
- package/extras/swiftbar/icons/logo-white.png +0 -0
- package/extras/swiftbar/install.sh +265 -0
- package/extras/swiftbar/lib/git.sh +629 -0
- package/extras/swiftbar/lib/icons.sh +92 -0
- package/extras/swiftbar/lib/render.sh +999 -0
- package/extras/swiftbar/lib/system.sh +244 -0
- package/extras/swiftbar/lib/utils.sh +717 -0
- package/extras/swiftbar/set-interval.sh +65 -0
- package/extras/swiftbar/set-server-flavor.sh +61 -0
- package/extras/swiftbar/wt-pr.sh +140 -0
- package/node_modules/@happier-dev/cli-common/README.md +6 -0
- package/node_modules/@happier-dev/cli-common/dist/index.d.ts +4 -0
- package/node_modules/@happier-dev/cli-common/dist/index.d.ts.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/index.js +4 -0
- package/node_modules/@happier-dev/cli-common/dist/index.js.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/links/index.d.ts +18 -0
- package/node_modules/@happier-dev/cli-common/dist/links/index.d.ts.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/links/index.js +25 -0
- package/node_modules/@happier-dev/cli-common/dist/links/index.js.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/links.d.ts +2 -0
- package/node_modules/@happier-dev/cli-common/dist/links.d.ts.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/links.js +2 -0
- package/node_modules/@happier-dev/cli-common/dist/links.js.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/update/index.d.ts +67 -0
- package/node_modules/@happier-dev/cli-common/dist/update/index.d.ts.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/update/index.js +259 -0
- package/node_modules/@happier-dev/cli-common/dist/update/index.js.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/workspaces/index.d.ts +17 -0
- package/node_modules/@happier-dev/cli-common/dist/workspaces/index.d.ts.map +1 -0
- package/node_modules/@happier-dev/cli-common/dist/workspaces/index.js +80 -0
- package/node_modules/@happier-dev/cli-common/dist/workspaces/index.js.map +1 -0
- package/node_modules/@happier-dev/cli-common/package.json +26 -0
- package/package.json +77 -0
- package/scripts/auth.mjs +1829 -0
- package/scripts/auth_copy_from_pglite_lock_in_use.integration.test.mjs +90 -0
- package/scripts/auth_copy_from_runCapture.integration.test.mjs +447 -0
- package/scripts/auth_help_cmd.test.mjs +28 -0
- package/scripts/auth_login_flow_in_tty.test.mjs +100 -0
- package/scripts/auth_login_force_default.test.mjs +66 -0
- package/scripts/auth_login_guided_server_no_expo.test.mjs +126 -0
- package/scripts/auth_login_method_override.test.mjs +67 -0
- package/scripts/auth_login_print_includes_configure_links.test.mjs +99 -0
- package/scripts/auth_status_server_validation.integration.test.mjs +140 -0
- package/scripts/build.mjs +266 -0
- package/scripts/bundleWorkspaceDeps.mjs +38 -0
- package/scripts/bundleWorkspaceDeps.test.mjs +77 -0
- package/scripts/ci.mjs +135 -0
- package/scripts/ci.test.mjs +50 -0
- package/scripts/cli-link.mjs +57 -0
- package/scripts/completion.mjs +395 -0
- package/scripts/contrib.mjs +333 -0
- package/scripts/daemon.mjs +1160 -0
- package/scripts/daemon.status_scope.test.mjs +51 -0
- package/scripts/daemon_cmd.mjs +26 -0
- package/scripts/daemon_dist_guard.test.mjs +171 -0
- package/scripts/daemon_invalid_auth_reseed_stack_name.integration.test.mjs +608 -0
- package/scripts/daemon_server_scoped_state.test.mjs +49 -0
- package/scripts/daemon_start_verification.integration.test.mjs +296 -0
- package/scripts/dev.mjs +545 -0
- package/scripts/doctor.mjs +340 -0
- package/scripts/doctor_cmd.test.mjs +22 -0
- package/scripts/doctor_ui_index_missing.test.mjs +37 -0
- package/scripts/eas.mjs +367 -0
- package/scripts/eas_platform_parsing.test.mjs +63 -0
- package/scripts/edison.mjs +1848 -0
- package/scripts/env.mjs +149 -0
- package/scripts/env_cmd.test.mjs +118 -0
- package/scripts/exit_cleanup_kills_detached_children_on_crash.integration.test.mjs +80 -0
- package/scripts/happier.mjs +82 -0
- package/scripts/import.mjs +1327 -0
- package/scripts/init.mjs +464 -0
- package/scripts/install.mjs +550 -0
- package/scripts/lint.mjs +177 -0
- package/scripts/menubar.mjs +202 -0
- package/scripts/migrate.mjs +318 -0
- package/scripts/mobile.mjs +353 -0
- package/scripts/mobile_dev_client.mjs +87 -0
- package/scripts/monorepo.mjs +2234 -0
- package/scripts/monorepo_port.apply.integration.test.mjs +680 -0
- package/scripts/monorepo_port.conflicts.integration.test.mjs +454 -0
- package/scripts/monorepo_port.validation.integration.test.mjs +486 -0
- package/scripts/orchestrated_stack_auth_flow.test.mjs +134 -0
- package/scripts/orchestrated_stack_auth_flow_resolve_port.test.mjs +98 -0
- package/scripts/orchestrated_stack_auth_flow_webapp_url.test.mjs +119 -0
- package/scripts/pack.mjs +257 -0
- package/scripts/pack.test.mjs +68 -0
- package/scripts/pglite_lock.integration.test.mjs +152 -0
- package/scripts/provision/linux-ubuntu-e2e.sh +132 -0
- package/scripts/provision/linux-ubuntu-review-pr.sh +66 -0
- package/scripts/provision/macos-lima-happy-vm.sh +192 -0
- package/scripts/provision/macos-lima-hstack-e2e.sh +100 -0
- package/scripts/release.mjs +53 -0
- package/scripts/release_binary_smoke.integration.test.mjs +138 -0
- package/scripts/review.mjs +1752 -0
- package/scripts/review_pr.mjs +435 -0
- package/scripts/run.mjs +561 -0
- package/scripts/run_script_with_stack_env.restart_port_reuse.test.mjs +30 -0
- package/scripts/self.mjs +465 -0
- package/scripts/self_host.mjs +9 -0
- package/scripts/self_host_binary_smoke.integration.test.mjs +74 -0
- package/scripts/self_host_runtime.mjs +883 -0
- package/scripts/self_host_runtime.test.mjs +82 -0
- package/scripts/self_host_systemd.real.integration.test.mjs +367 -0
- package/scripts/server_flavor.mjs +148 -0
- package/scripts/service.mjs +868 -0
- package/scripts/service_mode_help.test.mjs +27 -0
- package/scripts/setup.mjs +1324 -0
- package/scripts/setup_non_interactive_flag.test.mjs +60 -0
- package/scripts/setup_pr.mjs +605 -0
- package/scripts/setup_pr_orchestrated_auth_flow_util_import.test.mjs +117 -0
- package/scripts/stack/command_arguments.mjs +91 -0
- package/scripts/stack/copy_auth_from_stack.mjs +111 -0
- package/scripts/stack/delegated_script_commands.mjs +92 -0
- package/scripts/stack/help_text.mjs +110 -0
- package/scripts/stack/port_reservation.mjs +74 -0
- package/scripts/stack/repo_checkout_resolution.mjs +31 -0
- package/scripts/stack/run_script_with_stack_env.mjs +634 -0
- package/scripts/stack/stack_daemon_command.mjs +219 -0
- package/scripts/stack/stack_delegated_help.mjs +81 -0
- package/scripts/stack/stack_environment.mjs +151 -0
- package/scripts/stack/stack_environment.sanitization.test.mjs +75 -0
- package/scripts/stack/stack_happier_passthrough_command.mjs +63 -0
- package/scripts/stack/stack_info_snapshot.mjs +167 -0
- package/scripts/stack/stack_mobile_install_command.mjs +61 -0
- package/scripts/stack/stack_resume_command.mjs +76 -0
- package/scripts/stack/stack_stop_command.mjs +34 -0
- package/scripts/stack/stack_workspace_command.mjs +83 -0
- package/scripts/stack/transient_repo_overrides.mjs +29 -0
- package/scripts/stack.mjs +2388 -0
- package/scripts/stack_archive_cmd.integration.test.mjs +31 -0
- package/scripts/stack_audit_fix_light_env.test.mjs +129 -0
- package/scripts/stack_background_pinned_stack_json.test.mjs +81 -0
- package/scripts/stack_copy_auth_server_scoped.test.mjs +243 -0
- package/scripts/stack_daemon_cmd.integration.test.mjs +484 -0
- package/scripts/stack_eas_help.test.mjs +72 -0
- package/scripts/stack_editor_workspace_monorepo_root.test.mjs +102 -0
- package/scripts/stack_env_cmd.test.mjs +107 -0
- package/scripts/stack_guided_login_bundle_error_parse.test.mjs +20 -0
- package/scripts/stack_guided_login_inner_invocation.test.mjs +46 -0
- package/scripts/stack_happy_cmd.integration.test.mjs +263 -0
- package/scripts/stack_info_snapshot_running_status.test.mjs +186 -0
- package/scripts/stack_interactive_monorepo_group.test.mjs +128 -0
- package/scripts/stack_monorepo_defaults.test.mjs +31 -0
- package/scripts/stack_monorepo_repo_dev_token.test.mjs +32 -0
- package/scripts/stack_monorepo_server_light_from_happy_spec.test.mjs +37 -0
- package/scripts/stack_new_name_normalize_cmd.test.mjs +38 -0
- package/scripts/stack_pr_name_normalize_cmd.test.mjs +84 -0
- package/scripts/stack_resume_cmd.integration.test.mjs +134 -0
- package/scripts/stack_server_flavors_defaults.test.mjs +64 -0
- package/scripts/stack_shorthand_cmd.integration.test.mjs +74 -0
- package/scripts/stack_stop_sweeps_legacy_infra_without_kind.integration.test.mjs +44 -0
- package/scripts/stack_stop_sweeps_when_runtime_missing.integration.test.mjs +42 -0
- package/scripts/stack_stop_sweeps_when_runtime_stale.integration.test.mjs +50 -0
- package/scripts/stack_wt_list.test.mjs +117 -0
- package/scripts/start_ui_required_default.test.mjs +63 -0
- package/scripts/stop.mjs +190 -0
- package/scripts/stopStackWithEnv_no_autosweep_when_runtime_missing.integration.test.mjs +95 -0
- package/scripts/swiftbar_git_monorepo_cmd.test.mjs +75 -0
- package/scripts/swiftbar_render_monorepo_wt_actions.integration.test.mjs +116 -0
- package/scripts/swiftbar_utils_cmd.test.mjs +92 -0
- package/scripts/swiftbar_wt_pr_backcompat.test.mjs +162 -0
- package/scripts/systemd_unit_info.test.mjs +24 -0
- package/scripts/tailscale.mjs +490 -0
- package/scripts/test_ci.mjs +36 -0
- package/scripts/test_cmd.mjs +274 -0
- package/scripts/test_cmd.test.mjs +133 -0
- package/scripts/test_integration.mjs +33 -0
- package/scripts/testkit/auth_testkit.mjs +121 -0
- package/scripts/testkit/doctor_testkit.mjs +68 -0
- package/scripts/testkit/monorepo_port_testkit.mjs +157 -0
- package/scripts/testkit/stack_archive_command_testkit.mjs +55 -0
- package/scripts/testkit/stack_new_monorepo_testkit.mjs +83 -0
- package/scripts/testkit/stack_script_command_testkit.mjs +27 -0
- package/scripts/testkit/stack_stop_sweeps_testkit.mjs +172 -0
- package/scripts/testkit/worktrees_monorepo_testkit.mjs +53 -0
- package/scripts/tools.mjs +70 -0
- package/scripts/tui.mjs +914 -0
- package/scripts/tui_stopStackForTuiExit_no_autosweep.integration.test.mjs +95 -0
- package/scripts/typecheck.mjs +178 -0
- package/scripts/ui_gateway.mjs +247 -0
- package/scripts/uninstall.mjs +179 -0
- package/scripts/utils/auth/credentials_paths.mjs +181 -0
- package/scripts/utils/auth/credentials_paths.test.mjs +187 -0
- package/scripts/utils/auth/daemon_gate.mjs +66 -0
- package/scripts/utils/auth/daemon_gate.test.mjs +116 -0
- package/scripts/utils/auth/decode_jwt_payload_unsafe.mjs +16 -0
- package/scripts/utils/auth/dev_key.mjs +163 -0
- package/scripts/utils/auth/files.mjs +56 -0
- package/scripts/utils/auth/guided_pr_auth.mjs +86 -0
- package/scripts/utils/auth/guided_stack_web_login.mjs +56 -0
- package/scripts/utils/auth/handy_master_secret.mjs +42 -0
- package/scripts/utils/auth/interactive_stack_auth.mjs +70 -0
- package/scripts/utils/auth/login_ux.mjs +105 -0
- package/scripts/utils/auth/orchestrated_stack_auth_flow.mjs +291 -0
- package/scripts/utils/auth/sources.mjs +28 -0
- package/scripts/utils/auth/stable_scope_id.mjs +91 -0
- package/scripts/utils/auth/stable_scope_id.test.mjs +51 -0
- package/scripts/utils/auth/stack_guided_login.mjs +438 -0
- package/scripts/utils/cli/arg_values.mjs +23 -0
- package/scripts/utils/cli/arg_values.test.mjs +43 -0
- package/scripts/utils/cli/args.mjs +17 -0
- package/scripts/utils/cli/cli.mjs +24 -0
- package/scripts/utils/cli/cli_registry.mjs +440 -0
- package/scripts/utils/cli/cwd_scope.mjs +158 -0
- package/scripts/utils/cli/cwd_scope.test.mjs +154 -0
- package/scripts/utils/cli/flags.mjs +17 -0
- package/scripts/utils/cli/log_forwarder.mjs +157 -0
- package/scripts/utils/cli/normalize.mjs +16 -0
- package/scripts/utils/cli/prereqs.mjs +103 -0
- package/scripts/utils/cli/prereqs.test.mjs +33 -0
- package/scripts/utils/cli/progress.mjs +141 -0
- package/scripts/utils/cli/smoke_help.mjs +44 -0
- package/scripts/utils/cli/verbosity.mjs +11 -0
- package/scripts/utils/cli/wizard.mjs +139 -0
- package/scripts/utils/cli/wizard_promptSelect.test.mjs +44 -0
- package/scripts/utils/cli/wizard_prompt_worktree_source_lazy.test.mjs +132 -0
- package/scripts/utils/cli/wizard_worktree_slug.test.mjs +33 -0
- package/scripts/utils/crypto/tokens.mjs +14 -0
- package/scripts/utils/dev/daemon.mjs +232 -0
- package/scripts/utils/dev/daemon_watch_resilience.test.mjs +224 -0
- package/scripts/utils/dev/expo_dev.buildEnv.test.mjs +35 -0
- package/scripts/utils/dev/expo_dev.mjs +478 -0
- package/scripts/utils/dev/expo_dev.test.mjs +89 -0
- package/scripts/utils/dev/expo_dev_restart_port_reservation.test.mjs +120 -0
- package/scripts/utils/dev/expo_dev_verbose_logs.test.mjs +60 -0
- package/scripts/utils/dev/server.mjs +180 -0
- package/scripts/utils/dev_auth_key.mjs +7 -0
- package/scripts/utils/edison/git_roots.mjs +30 -0
- package/scripts/utils/edison/git_roots.test.mjs +49 -0
- package/scripts/utils/env/config.mjs +52 -0
- package/scripts/utils/env/dotenv.mjs +32 -0
- package/scripts/utils/env/dotenv.test.mjs +32 -0
- package/scripts/utils/env/env.mjs +130 -0
- package/scripts/utils/env/env_file.mjs +98 -0
- package/scripts/utils/env/env_file.test.mjs +49 -0
- package/scripts/utils/env/env_local.mjs +25 -0
- package/scripts/utils/env/load_env_file.mjs +34 -0
- package/scripts/utils/env/read.mjs +30 -0
- package/scripts/utils/env/sandbox.mjs +13 -0
- package/scripts/utils/env/scrub_env.mjs +69 -0
- package/scripts/utils/env/scrub_env.test.mjs +102 -0
- package/scripts/utils/env/values.mjs +13 -0
- package/scripts/utils/expo/command.mjs +65 -0
- package/scripts/utils/expo/expo.mjs +139 -0
- package/scripts/utils/expo/expo_state_running.test.mjs +48 -0
- package/scripts/utils/expo/metro_ports.mjs +101 -0
- package/scripts/utils/expo/metro_ports.test.mjs +35 -0
- package/scripts/utils/fs/atomic_dir_swap.mjs +55 -0
- package/scripts/utils/fs/atomic_dir_swap.test.mjs +54 -0
- package/scripts/utils/fs/file_has_content.mjs +10 -0
- package/scripts/utils/fs/fs.mjs +11 -0
- package/scripts/utils/fs/json.mjs +25 -0
- package/scripts/utils/fs/ops.mjs +29 -0
- package/scripts/utils/fs/package_json.mjs +8 -0
- package/scripts/utils/fs/tail.mjs +12 -0
- package/scripts/utils/git/dev_checkout.mjs +127 -0
- package/scripts/utils/git/dev_checkout.test.mjs +115 -0
- package/scripts/utils/git/git.mjs +67 -0
- package/scripts/utils/git/parse_name_status_z.mjs +21 -0
- package/scripts/utils/git/refs.mjs +26 -0
- package/scripts/utils/git/worktrees.mjs +323 -0
- package/scripts/utils/git/worktrees_monorepo.test.mjs +60 -0
- package/scripts/utils/git/worktrees_pathstyle.test.mjs +53 -0
- package/scripts/utils/llm/assist.mjs +260 -0
- package/scripts/utils/llm/codex_exec.mjs +61 -0
- package/scripts/utils/llm/codex_exec.test.mjs +46 -0
- package/scripts/utils/llm/hstack_runner.mjs +59 -0
- package/scripts/utils/llm/tools.mjs +56 -0
- package/scripts/utils/llm/tools.test.mjs +67 -0
- package/scripts/utils/menubar/swiftbar.mjs +121 -0
- package/scripts/utils/menubar/swiftbar.test.mjs +85 -0
- package/scripts/utils/mobile/config.mjs +35 -0
- package/scripts/utils/mobile/dev_client_links.mjs +59 -0
- package/scripts/utils/mobile/identifiers.mjs +46 -0
- package/scripts/utils/mobile/identifiers.test.mjs +41 -0
- package/scripts/utils/mobile/ios_xcodeproj_patch.mjs +128 -0
- package/scripts/utils/mobile/ios_xcodeproj_patch.test.mjs +131 -0
- package/scripts/utils/net/bind_mode.mjs +39 -0
- package/scripts/utils/net/dns.mjs +10 -0
- package/scripts/utils/net/lan_ip.mjs +24 -0
- package/scripts/utils/net/ports.mjs +110 -0
- package/scripts/utils/net/tcp_forward.mjs +162 -0
- package/scripts/utils/net/url.mjs +30 -0
- package/scripts/utils/net/url.test.mjs +29 -0
- package/scripts/utils/paths/canonical_home.mjs +15 -0
- package/scripts/utils/paths/canonical_home.test.mjs +28 -0
- package/scripts/utils/paths/localhost_host.mjs +112 -0
- package/scripts/utils/paths/localhost_host.test.mjs +58 -0
- package/scripts/utils/paths/paths.mjs +302 -0
- package/scripts/utils/paths/paths_env_win32.test.mjs +36 -0
- package/scripts/utils/paths/paths_monorepo.test.mjs +58 -0
- package/scripts/utils/paths/paths_server_flavors.test.mjs +50 -0
- package/scripts/utils/paths/runtime.mjs +41 -0
- package/scripts/utils/pglite_lock.mjs +107 -0
- package/scripts/utils/proc/commands.mjs +33 -0
- package/scripts/utils/proc/exit_cleanup.mjs +57 -0
- package/scripts/utils/proc/happy_monorepo_deps.mjs +37 -0
- package/scripts/utils/proc/happy_monorepo_deps.test.mjs +89 -0
- package/scripts/utils/proc/ownership.mjs +217 -0
- package/scripts/utils/proc/ownership_killProcessGroupOwnedByStack.test.mjs +216 -0
- package/scripts/utils/proc/ownership_listPidsWithEnvNeedles.test.mjs +88 -0
- package/scripts/utils/proc/package_scripts.mjs +38 -0
- package/scripts/utils/proc/package_scripts.test.mjs +58 -0
- package/scripts/utils/proc/parallel.mjs +25 -0
- package/scripts/utils/proc/pids.mjs +11 -0
- package/scripts/utils/proc/pm.mjs +478 -0
- package/scripts/utils/proc/pm_spawn.integration.test.mjs +131 -0
- package/scripts/utils/proc/pm_stack_cache_env.test.mjs +313 -0
- package/scripts/utils/proc/proc.mjs +331 -0
- package/scripts/utils/proc/proc.test.mjs +85 -0
- package/scripts/utils/proc/terminate.mjs +69 -0
- package/scripts/utils/proc/terminate.test.mjs +54 -0
- package/scripts/utils/proc/watch.mjs +63 -0
- package/scripts/utils/review/augment_runner_integration.test.mjs +105 -0
- package/scripts/utils/review/base_ref.mjs +82 -0
- package/scripts/utils/review/base_ref.test.mjs +89 -0
- package/scripts/utils/review/chunks.mjs +55 -0
- package/scripts/utils/review/chunks.test.mjs +107 -0
- package/scripts/utils/review/detached_worktree.mjs +61 -0
- package/scripts/utils/review/detached_worktree.test.mjs +61 -0
- package/scripts/utils/review/findings.mjs +278 -0
- package/scripts/utils/review/findings.test.mjs +203 -0
- package/scripts/utils/review/head_slice.mjs +132 -0
- package/scripts/utils/review/head_slice.test.mjs +117 -0
- package/scripts/utils/review/instructions/deep.md +20 -0
- package/scripts/utils/review/prompts.mjs +279 -0
- package/scripts/utils/review/prompts.test.mjs +77 -0
- package/scripts/utils/review/run_reviewers_safe.mjs +12 -0
- package/scripts/utils/review/run_reviewers_safe.test.mjs +45 -0
- package/scripts/utils/review/runners/augment.mjs +91 -0
- package/scripts/utils/review/runners/augment.test.mjs +64 -0
- package/scripts/utils/review/runners/claude.mjs +92 -0
- package/scripts/utils/review/runners/claude.test.mjs +47 -0
- package/scripts/utils/review/runners/coderabbit.mjs +105 -0
- package/scripts/utils/review/runners/coderabbit.test.mjs +32 -0
- package/scripts/utils/review/runners/codex.mjs +129 -0
- package/scripts/utils/review/runners/codex.test.mjs +115 -0
- package/scripts/utils/review/slice_mode.mjs +20 -0
- package/scripts/utils/review/slice_mode.test.mjs +69 -0
- package/scripts/utils/review/sliced_runner.mjs +39 -0
- package/scripts/utils/review/sliced_runner.test.mjs +57 -0
- package/scripts/utils/review/slices.mjs +140 -0
- package/scripts/utils/review/slices.test.mjs +41 -0
- package/scripts/utils/review/targets.mjs +23 -0
- package/scripts/utils/review/targets.test.mjs +31 -0
- package/scripts/utils/review/tool_home_seed.mjs +106 -0
- package/scripts/utils/review/tool_home_seed.test.mjs +124 -0
- package/scripts/utils/review/uncommitted_ops.mjs +77 -0
- package/scripts/utils/review/uncommitted_ops.test.mjs +117 -0
- package/scripts/utils/sandbox/review_pr_sandbox.mjs +105 -0
- package/scripts/utils/server/apply_server_light_env_defaults.mjs +14 -0
- package/scripts/utils/server/flavor_scripts.mjs +138 -0
- package/scripts/utils/server/flavor_scripts.test.mjs +115 -0
- package/scripts/utils/server/infra/happy_server_infra.mjs +444 -0
- package/scripts/utils/server/mobile_api_url.mjs +60 -0
- package/scripts/utils/server/mobile_api_url.test.mjs +58 -0
- package/scripts/utils/server/port.mjs +55 -0
- package/scripts/utils/server/prisma_import.mjs +36 -0
- package/scripts/utils/server/prisma_import.test.mjs +78 -0
- package/scripts/utils/server/server.mjs +109 -0
- package/scripts/utils/server/ui_build_check.mjs +37 -0
- package/scripts/utils/server/ui_build_check.test.mjs +70 -0
- package/scripts/utils/server/ui_env.mjs +13 -0
- package/scripts/utils/server/ui_env.test.mjs +57 -0
- package/scripts/utils/server/urls.mjs +100 -0
- package/scripts/utils/server/validate.mjs +60 -0
- package/scripts/utils/server/validate.test.mjs +76 -0
- package/scripts/utils/service/autostart_darwin.mjs +198 -0
- package/scripts/utils/service/autostart_darwin.test.mjs +49 -0
- package/scripts/utils/service/autostart_darwin_keepalive.test.mjs +19 -0
- package/scripts/utils/stack/cli_identities.mjs +29 -0
- package/scripts/utils/stack/context.mjs +19 -0
- package/scripts/utils/stack/dirs.mjs +26 -0
- package/scripts/utils/stack/editor_workspace.mjs +126 -0
- package/scripts/utils/stack/interactive_stack_config.mjs +266 -0
- package/scripts/utils/stack/interactive_stack_config.port_validation.test.mjs +93 -0
- package/scripts/utils/stack/interactive_stack_config.remote_validation.test.mjs +122 -0
- package/scripts/utils/stack/interactive_stack_config.stack_name_validation.test.mjs +76 -0
- package/scripts/utils/stack/interactive_stack_config_testkit.mjs +18 -0
- package/scripts/utils/stack/names.mjs +27 -0
- package/scripts/utils/stack/names.test.mjs +26 -0
- package/scripts/utils/stack/pr_stack_name.mjs +16 -0
- package/scripts/utils/stack/runtime_state.mjs +88 -0
- package/scripts/utils/stack/stacks.mjs +40 -0
- package/scripts/utils/stack/startup.mjs +370 -0
- package/scripts/utils/stack/startup_server_light_dirs.test.mjs +119 -0
- package/scripts/utils/stack/startup_server_light_generate.test.mjs +20 -0
- package/scripts/utils/stack/startup_server_light_legacy.test.mjs +79 -0
- package/scripts/utils/stack/startup_server_light_testkit.mjs +106 -0
- package/scripts/utils/stack/stop.mjs +284 -0
- package/scripts/utils/stack_context.mjs +1 -0
- package/scripts/utils/stack_runtime_state.mjs +1 -0
- package/scripts/utils/stacks.mjs +1 -0
- package/scripts/utils/tailscale/ip.mjs +116 -0
- package/scripts/utils/tauri/stack_overrides.mjs +22 -0
- package/scripts/utils/test/collect_test_files.mjs +29 -0
- package/scripts/utils/time/get_today_ymd.mjs +7 -0
- package/scripts/utils/tui/cleanup.mjs +38 -0
- package/scripts/utils/ui/ansi.mjs +47 -0
- package/scripts/utils/ui/browser.mjs +31 -0
- package/scripts/utils/ui/browser.test.mjs +56 -0
- package/scripts/utils/ui/clipboard.mjs +38 -0
- package/scripts/utils/ui/layout.mjs +44 -0
- package/scripts/utils/ui/qr.mjs +17 -0
- package/scripts/utils/ui/terminal_launcher.mjs +129 -0
- package/scripts/utils/ui/text.mjs +16 -0
- package/scripts/utils/update/auto_update_notice.mjs +93 -0
- package/scripts/utils/validate.mjs +5 -0
- package/scripts/where.mjs +138 -0
- package/scripts/worktrees.mjs +2174 -0
- package/scripts/worktrees_archive_cmd.integration.test.mjs +228 -0
- package/scripts/worktrees_cursor_monorepo_root.test.mjs +23 -0
- package/scripts/worktrees_list_specs_no_recurse.test.mjs +32 -0
- package/scripts/worktrees_monorepo_testkit.test.mjs +29 -0
- package/scripts/worktrees_monorepo_use_group.test.mjs +41 -0
|
@@ -0,0 +1,1752 @@
|
|
|
1
|
+
import './utils/env/env.mjs';
|
|
2
|
+
import { parseArgs } from './utils/cli/args.mjs';
|
|
3
|
+
import { printResult, wantsHelp, wantsJson } from './utils/cli/cli.mjs';
|
|
4
|
+
import { coerceHappyMonorepoRootFromPath, getComponentDir, getRootDir } from './utils/paths/paths.mjs';
|
|
5
|
+
import { getInvokedCwd, inferComponentFromCwd } from './utils/cli/cwd_scope.mjs';
|
|
6
|
+
import { assertCliPrereqs } from './utils/cli/prereqs.mjs';
|
|
7
|
+
import { resolveBaseRef } from './utils/review/base_ref.mjs';
|
|
8
|
+
import { isStackMode, resolveDefaultStackReviewComponents } from './utils/review/targets.mjs';
|
|
9
|
+
import { planCommitChunks } from './utils/review/chunks.mjs';
|
|
10
|
+
import { planPathSlices } from './utils/review/slices.mjs';
|
|
11
|
+
import { createHeadSliceCommits, getChangedOps } from './utils/review/head_slice.mjs';
|
|
12
|
+
import { assertSafeRelativeRepoPath, getUncommittedOps } from './utils/review/uncommitted_ops.mjs';
|
|
13
|
+
import { runWithConcurrencyLimit } from './utils/proc/parallel.mjs';
|
|
14
|
+
import { runCodeRabbitReview } from './utils/review/runners/coderabbit.mjs';
|
|
15
|
+
import { extractCodexReviewFromJsonl, runCodexReview } from './utils/review/runners/codex.mjs';
|
|
16
|
+
import { detectAugmentAuthError, runAugmentReview } from './utils/review/runners/augment.mjs';
|
|
17
|
+
import { detectClaudeAuthError, runClaudeReview } from './utils/review/runners/claude.mjs';
|
|
18
|
+
import { formatTriageMarkdown, parseCodeRabbitPlainOutput, parseCodexReviewText } from './utils/review/findings.mjs';
|
|
19
|
+
import {
|
|
20
|
+
buildCodexDeepPrompt,
|
|
21
|
+
buildCodexNormalPrompt,
|
|
22
|
+
buildCodexMonorepoDeepPrompt,
|
|
23
|
+
buildCodexMonorepoNormalPrompt,
|
|
24
|
+
buildCodexAuditPrompt,
|
|
25
|
+
buildCodexMonorepoAuditPrompt,
|
|
26
|
+
buildCodexMonorepoSlicePrompt,
|
|
27
|
+
buildUncommittedSlicePrompt,
|
|
28
|
+
} from './utils/review/prompts.mjs';
|
|
29
|
+
import { runSlicedJobs } from './utils/review/sliced_runner.mjs';
|
|
30
|
+
import { seedAugmentHomeFromRealHome, seedCodeRabbitHomeFromRealHome, seedCodexHomeFromRealHome } from './utils/review/tool_home_seed.mjs';
|
|
31
|
+
import { shouldUseUncommittedPathSlices } from './utils/review/slice_mode.mjs';
|
|
32
|
+
import { runReviewersSafe } from './utils/review/run_reviewers_safe.mjs';
|
|
33
|
+
import { dirname, join } from 'node:path';
|
|
34
|
+
import { ensureDir } from './utils/fs/ops.mjs';
|
|
35
|
+
import { copyFile, readFile, rm, writeFile } from 'node:fs/promises';
|
|
36
|
+
import { existsSync } from 'node:fs';
|
|
37
|
+
import { runCapture } from './utils/proc/proc.mjs';
|
|
38
|
+
import { withDetachedWorktree } from './utils/review/detached_worktree.mjs';
|
|
39
|
+
|
|
40
|
+
const VALID_TARGETS = ['ui', 'cli', 'server'];
|
|
41
|
+
const DEFAULT_TARGETS = VALID_TARGETS;
|
|
42
|
+
const VALID_COMPONENTS = ['happier-ui', 'happier-cli', 'happier-server'];
|
|
43
|
+
const VALID_REVIEWERS = ['coderabbit', 'codex', 'augment', 'claude'];
|
|
44
|
+
const VALID_DEPTHS = ['deep', 'normal'];
|
|
45
|
+
const VALID_CHANGE_TYPES = ['committed', 'uncommitted', 'all'];
|
|
46
|
+
const VALID_REVIEW_MODES = ['diff', 'audit'];
|
|
47
|
+
const DEFAULT_REVIEW_MAX_FILES = 50;
|
|
48
|
+
|
|
49
|
+
function parseCsv(raw) {
|
|
50
|
+
return String(raw ?? '')
|
|
51
|
+
.split(',')
|
|
52
|
+
.map((s) => s.trim())
|
|
53
|
+
.filter(Boolean);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function normalizeReviewers(list) {
|
|
57
|
+
const raw = Array.isArray(list) ? list : [];
|
|
58
|
+
const lower = raw.map((r) => String(r).trim().toLowerCase()).filter(Boolean);
|
|
59
|
+
const uniq = Array.from(new Set(lower));
|
|
60
|
+
return uniq.length ? uniq : ['coderabbit'];
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function normalizeChangeType(raw) {
|
|
64
|
+
const t = String(raw ?? '').trim().toLowerCase();
|
|
65
|
+
if (!t) return 'committed';
|
|
66
|
+
if (VALID_CHANGE_TYPES.includes(t)) return t;
|
|
67
|
+
throw new Error(`[review] invalid --type=${raw} (expected: ${VALID_CHANGE_TYPES.join(' | ')})`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
function subsetSet(set, allowed) {
|
|
71
|
+
const out = new Set();
|
|
72
|
+
for (const v of set) {
|
|
73
|
+
if (allowed.has(v)) out.add(v);
|
|
74
|
+
}
|
|
75
|
+
return out;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
function normalizeCodexModelAlias(raw) {
|
|
79
|
+
const m = String(raw ?? '').trim();
|
|
80
|
+
// Back-compat: early experiments used "codex-5.3" as a shorthand, but the Codex CLI expects
|
|
81
|
+
// the actual model ID ("gpt-5.3-codex").
|
|
82
|
+
if (m === 'codex-5.3') return 'gpt-5.3-codex';
|
|
83
|
+
return m;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
async function applyUncommittedSlice({ srcRepoDir, worktreeDir, checkoutPaths, removePaths }) {
|
|
87
|
+
for (const rel of removePaths) {
|
|
88
|
+
const safeRel = assertSafeRelativeRepoPath(rel);
|
|
89
|
+
// Best-effort: remove file/dir if it exists.
|
|
90
|
+
// This is an ephemeral review worktree; being defensive is fine.
|
|
91
|
+
// eslint-disable-next-line no-await-in-loop
|
|
92
|
+
await rm(join(worktreeDir, safeRel), { recursive: true, force: true });
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
for (const rel of checkoutPaths) {
|
|
96
|
+
const safeRel = assertSafeRelativeRepoPath(rel);
|
|
97
|
+
const dest = join(worktreeDir, safeRel);
|
|
98
|
+
const src = join(srcRepoDir, safeRel);
|
|
99
|
+
if (!existsSync(src)) {
|
|
100
|
+
// A file can disappear between planning and application if the worktree changes mid-run.
|
|
101
|
+
// Treat missing sources as a deletion in the ephemeral review worktree.
|
|
102
|
+
// eslint-disable-next-line no-await-in-loop
|
|
103
|
+
await rm(dest, { recursive: true, force: true });
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
// eslint-disable-next-line no-await-in-loop
|
|
107
|
+
await ensureDir(dirname(dest));
|
|
108
|
+
try {
|
|
109
|
+
// eslint-disable-next-line no-await-in-loop
|
|
110
|
+
await copyFile(src, dest);
|
|
111
|
+
} catch (e) {
|
|
112
|
+
if (e && typeof e === 'object' && 'code' in e && e.code === 'ENOENT') {
|
|
113
|
+
// A file can disappear between planning and application if the worktree changes mid-run.
|
|
114
|
+
// Treat missing sources as a deletion in the ephemeral review worktree.
|
|
115
|
+
// eslint-disable-next-line no-await-in-loop
|
|
116
|
+
await rm(dest, { recursive: true, force: true });
|
|
117
|
+
continue;
|
|
118
|
+
}
|
|
119
|
+
throw e;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function usage() {
|
|
125
|
+
return [
|
|
126
|
+
'[review] usage:',
|
|
127
|
+
' hstack tools review [ui|cli|server|all] [--reviewers=coderabbit,codex,augment,claude] [--review-mode=diff|audit] [--review-paths=pathA,pathB] [--type=committed|uncommitted|all] [--base-remote=<remote>] [--base-branch=<branch>] [--base-ref=<ref>] [--concurrency=N] [--depth=deep|normal] [--chunks|--no-chunks] [--chunking=auto|head-slice|commit-window] [--chunk-max-files=N] [--review-prompt=<text>] [--review-prompt-file=<path>] [--coderabbit-type=committed|uncommitted|all] [--coderabbit-max-files=N] [--coderabbit-chunks|--no-coderabbit-chunks] [--codex-chunks|--no-codex-chunks] [--codex-model=<id>] [--claude-model=<id>] [--augment-chunks|--no-augment-chunks] [--augment-model=<id>] [--augment-max-turns=N] [--run-label=<label>] [--no-stream] [--json]',
|
|
128
|
+
'',
|
|
129
|
+
'targets:',
|
|
130
|
+
` ${[...VALID_TARGETS, 'all'].join(' | ')}`,
|
|
131
|
+
'',
|
|
132
|
+
'reviewers:',
|
|
133
|
+
` ${VALID_REVIEWERS.join(' | ')}`,
|
|
134
|
+
'',
|
|
135
|
+
'depth:',
|
|
136
|
+
` ${VALID_DEPTHS.join(' | ')}`,
|
|
137
|
+
'',
|
|
138
|
+
'notes:',
|
|
139
|
+
'- If run from inside a repo checkout/worktree and no targets are provided, defaults to the inferred app (ui/cli/server).',
|
|
140
|
+
'- In stack mode, if no targets are provided, defaults to reviewing only when the stack is pinned to a non-default repo/worktree.',
|
|
141
|
+
'',
|
|
142
|
+
'examples:',
|
|
143
|
+
' hstack tools review',
|
|
144
|
+
' hstack tools review cli --reviewers=coderabbit,codex',
|
|
145
|
+
' hstack tools review ui --base-remote=upstream --base-branch=main',
|
|
146
|
+
].join('\n');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function resolveComponentFromCwdOrNull({ rootDir, invokedCwd }) {
|
|
150
|
+
return inferComponentFromCwd({ rootDir, invokedCwd, components: VALID_COMPONENTS });
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
function stackRemoteFallbackFromEnv(env) {
|
|
154
|
+
return String(env.HAPPIER_STACK_STACK_REMOTE ?? '').trim();
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
function targetFromLegacyComponent(component) {
|
|
158
|
+
const c = String(component ?? '').trim();
|
|
159
|
+
if (c === 'happier-ui') return 'ui';
|
|
160
|
+
if (c === 'happier-cli') return 'cli';
|
|
161
|
+
if (c === 'happier-server' || c === 'happier-server-light') return 'server';
|
|
162
|
+
return null;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
function legacyComponentFromTarget(target) {
|
|
166
|
+
const t = String(target ?? '').trim();
|
|
167
|
+
if (t === 'ui') return 'happier-ui';
|
|
168
|
+
if (t === 'cli') return 'happier-cli';
|
|
169
|
+
if (t === 'server') return 'happier-server';
|
|
170
|
+
return null;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
function normalizeTargets(rawTargets) {
|
|
174
|
+
const requested = Array.isArray(rawTargets) ? rawTargets.map((t) => String(t ?? '').trim()).filter(Boolean) : [];
|
|
175
|
+
if (!requested.length) return ['all'];
|
|
176
|
+
const mapped = requested
|
|
177
|
+
.map((t) => {
|
|
178
|
+
const lower = t.toLowerCase();
|
|
179
|
+
if (lower === 'all') return 'all';
|
|
180
|
+
if (VALID_TARGETS.includes(lower)) return lower;
|
|
181
|
+
const legacy = targetFromLegacyComponent(lower);
|
|
182
|
+
return legacy ?? null;
|
|
183
|
+
})
|
|
184
|
+
.filter(Boolean);
|
|
185
|
+
return mapped.length ? mapped : ['all'];
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
function sanitizeLabel(raw) {
|
|
189
|
+
return String(raw ?? '')
|
|
190
|
+
.trim()
|
|
191
|
+
.toLowerCase()
|
|
192
|
+
.replace(/[^a-z0-9._-]+/g, '-')
|
|
193
|
+
.replace(/^-+|-+$/g, '');
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
function tailLines(text, n) {
|
|
197
|
+
const lines = String(text ?? '')
|
|
198
|
+
.split('\n')
|
|
199
|
+
.slice(-n)
|
|
200
|
+
.join('\n')
|
|
201
|
+
.trimEnd();
|
|
202
|
+
return lines;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
function formatInternalError(e) {
|
|
206
|
+
if (e && typeof e === 'object') {
|
|
207
|
+
const stack = 'stack' in e ? e.stack : null;
|
|
208
|
+
if (stack) return String(stack);
|
|
209
|
+
}
|
|
210
|
+
return String(e ?? 'unknown error');
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
function detectCodeRabbitAuthError({ stdout, stderr }) {
|
|
214
|
+
const combined = `${stdout ?? ''}\n${stderr ?? ''}`;
|
|
215
|
+
return combined.includes('Authentication required') && combined.includes("coderabbit auth login");
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
function detectCodeRabbitNoFilesToReview({ stdout, stderr }) {
|
|
219
|
+
const combined = `${stdout ?? ''}\n${stderr ?? ''}`.toLowerCase();
|
|
220
|
+
return combined.includes('no files to review');
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
function detectCodexUsageLimit({ stdout, stderr }) {
|
|
224
|
+
const combined = `${stdout ?? ''}\n${stderr ?? ''}`.toLowerCase();
|
|
225
|
+
return combined.includes('usage limit') || combined.includes('http 429') || combined.includes('status code: 429');
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
function printReviewOperatorGuidance() {
|
|
229
|
+
// Guidance for the human/LLM running the review (not the reviewer model itself).
|
|
230
|
+
// eslint-disable-next-line no-console
|
|
231
|
+
console.log(
|
|
232
|
+
[
|
|
233
|
+
'[review] operator guidance:',
|
|
234
|
+
'- Treat reviewer output as suggestions; verify against best practices + this codebase before applying.',
|
|
235
|
+
'- Triage every single finding (no skipping): apply / adjust / defer-with-rationale.',
|
|
236
|
+
'- Do not apply changes blindly; when uncertain, record in the report for discussion.',
|
|
237
|
+
'- When a suggestion references external standards, verify via official docs (or note what you checked).',
|
|
238
|
+
'- Prefer unified fixes; avoid duplication; avoid brittle tests (no exact wording assertions).',
|
|
239
|
+
'- This command writes a triage checklist file; work through it item-by-item and record decisions + commits.',
|
|
240
|
+
'',
|
|
241
|
+
].join('\n')
|
|
242
|
+
);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
async function gitLines({ cwd, args, env }) {
|
|
246
|
+
const out = await runCapture('git', args, { cwd, env });
|
|
247
|
+
return String(out ?? '')
|
|
248
|
+
.split('\n')
|
|
249
|
+
.map((l) => l.trimEnd())
|
|
250
|
+
.filter(Boolean);
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
async function countChangedFiles({ cwd, base, env }) {
|
|
254
|
+
const lines = await gitLines({ cwd, env, args: ['diff', '--name-only', `${base}...HEAD`] });
|
|
255
|
+
return lines.length;
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
async function countChangedFilesBetween({ cwd, base, head, env }) {
|
|
259
|
+
const lines = await gitLines({ cwd, env, args: ['diff', '--name-only', `${base}...${head}`] });
|
|
260
|
+
return lines.length;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
async function mergeBase({ cwd, a, b, env }) {
|
|
264
|
+
const out = await runCapture('git', ['merge-base', a, b], { cwd, env });
|
|
265
|
+
const mb = String(out ?? '').trim();
|
|
266
|
+
if (!mb) throw new Error('[review] failed to compute merge-base');
|
|
267
|
+
return mb;
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
async function listCommitsBetween({ cwd, base, head, env }) {
|
|
271
|
+
return await gitLines({ cwd, env, args: ['rev-list', '--reverse', `${base}..${head}`] });
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
async function pickCoderabbitBaseCommitForMaxFiles({ cwd, baseRef, maxFiles, env }) {
|
|
275
|
+
const commits = await gitLines({ cwd, env, args: ['rev-list', '--reverse', `${baseRef}..HEAD`] });
|
|
276
|
+
if (!commits.length) return null;
|
|
277
|
+
|
|
278
|
+
let lo = 0;
|
|
279
|
+
let hi = commits.length - 1;
|
|
280
|
+
let best = null;
|
|
281
|
+
|
|
282
|
+
while (lo <= hi) {
|
|
283
|
+
const mid = Math.floor((lo + hi) / 2);
|
|
284
|
+
const startCommit = commits[mid];
|
|
285
|
+
let baseCommit = '';
|
|
286
|
+
try {
|
|
287
|
+
baseCommit = (await runCapture('git', ['rev-parse', `${startCommit}^`], { cwd, env })).toString().trim();
|
|
288
|
+
} catch {
|
|
289
|
+
baseCommit = (await runCapture('git', ['rev-parse', startCommit], { cwd, env })).toString().trim();
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
const n = await countChangedFiles({ cwd, env, base: baseCommit });
|
|
293
|
+
if (n <= maxFiles) {
|
|
294
|
+
best = baseCommit;
|
|
295
|
+
hi = mid - 1;
|
|
296
|
+
} else {
|
|
297
|
+
lo = mid + 1;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
return best;
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
async function main() {
|
|
305
|
+
const argv = process.argv.slice(2);
|
|
306
|
+
const { flags, kv } = parseArgs(argv);
|
|
307
|
+
const json = wantsJson(argv, { flags });
|
|
308
|
+
const stream = !json && !flags.has('--no-stream');
|
|
309
|
+
|
|
310
|
+
if (wantsHelp(argv, { flags })) {
|
|
311
|
+
printResult({ json, data: { usage: usage() }, text: usage() });
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const rootDir = getRootDir(import.meta.url);
|
|
316
|
+
const invokedCwd = getInvokedCwd(process.env);
|
|
317
|
+
const positionals = argv.filter((a) => !a.startsWith('--'));
|
|
318
|
+
|
|
319
|
+
const reviewers = normalizeReviewers(parseCsv(kv.get('--reviewers') ?? ''));
|
|
320
|
+
for (const r of reviewers) {
|
|
321
|
+
if (!VALID_REVIEWERS.includes(r)) {
|
|
322
|
+
throw new Error(`[review] unknown reviewer: ${r} (expected one of: ${VALID_REVIEWERS.join(', ')})`);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
await assertCliPrereqs({
|
|
327
|
+
git: true,
|
|
328
|
+
coderabbit: reviewers.includes('coderabbit'),
|
|
329
|
+
codex: reviewers.includes('codex'),
|
|
330
|
+
augment: reviewers.includes('augment'),
|
|
331
|
+
claude: reviewers.includes('claude'),
|
|
332
|
+
});
|
|
333
|
+
|
|
334
|
+
const inferredFromCwd = resolveComponentFromCwdOrNull({ rootDir, invokedCwd });
|
|
335
|
+
if (inferredFromCwd && !(process.env.HAPPIER_STACK_REPO_DIR ?? '').toString().trim()) {
|
|
336
|
+
// Make downstream getComponentDir() resolve to the inferred repo dir for this run.
|
|
337
|
+
// This is intentionally independent of target positionals: users often pass `all`/`ui`/`cli`/`server`
|
|
338
|
+
// but still expect the repo/worktree to be inferred from their current directory.
|
|
339
|
+
process.env.HAPPIER_STACK_REPO_DIR = inferredFromCwd.repoDir;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
const inStackMode = isStackMode(process.env);
|
|
343
|
+
const inferredTarget = inferredFromCwd ? targetFromLegacyComponent(inferredFromCwd.component) : null;
|
|
344
|
+
const requestedTargets = normalizeTargets(positionals.length ? positionals : inferredTarget ? [inferredTarget] : ['all']);
|
|
345
|
+
const wantAll = requestedTargets.includes('all');
|
|
346
|
+
|
|
347
|
+
let targets = wantAll ? DEFAULT_TARGETS : requestedTargets;
|
|
348
|
+
if (!positionals.length && !inferredFromCwd && inStackMode) {
|
|
349
|
+
const pinned = resolveDefaultStackReviewComponents({ rootDir, components: DEFAULT_TARGETS });
|
|
350
|
+
targets = pinned.length ? pinned : [];
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
for (const t of targets) {
|
|
354
|
+
if (!VALID_TARGETS.includes(t)) {
|
|
355
|
+
throw new Error(`[review] unknown target: ${t} (expected one of: ${[...VALID_TARGETS, 'all'].join(', ')})`);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
if (!targets.length) {
|
|
360
|
+
const msg = inStackMode ? '[review] no non-default stack-pinned repo/worktree to review' : '[review] no targets selected';
|
|
361
|
+
printResult({ json, data: { ok: true, skipped: true, reason: msg }, text: msg });
|
|
362
|
+
return;
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
const components = targets.map((t) => legacyComponentFromTarget(t)).filter(Boolean);
|
|
366
|
+
|
|
367
|
+
const baseRefOverride = (kv.get('--base-ref') ?? '').trim();
|
|
368
|
+
const baseRemoteOverride = (kv.get('--base-remote') ?? '').trim();
|
|
369
|
+
const baseBranchOverride = (kv.get('--base-branch') ?? '').trim();
|
|
370
|
+
const stackRemoteFallback = stackRemoteFallbackFromEnv(process.env);
|
|
371
|
+
const concurrency = (kv.get('--concurrency') ?? '').trim();
|
|
372
|
+
const limit = concurrency ? Number(concurrency) : 4;
|
|
373
|
+
const depth = (kv.get('--depth') ?? 'deep').toString().trim().toLowerCase();
|
|
374
|
+
const changeType = normalizeChangeType(kv.get('--type') ?? kv.get('--review-type') ?? 'committed');
|
|
375
|
+
const reviewMode = (kv.get('--review-mode') ?? 'diff').toString().trim().toLowerCase();
|
|
376
|
+
const reviewPaths = parseCsv(kv.get('--review-paths') ?? kv.get('--audit-paths') ?? '');
|
|
377
|
+
const coderabbitTypeRaw = (kv.get('--coderabbit-type') ?? '').toString().trim();
|
|
378
|
+
const coderabbitType = coderabbitTypeRaw
|
|
379
|
+
? (() => {
|
|
380
|
+
try {
|
|
381
|
+
return normalizeChangeType(coderabbitTypeRaw);
|
|
382
|
+
} catch {
|
|
383
|
+
throw new Error(
|
|
384
|
+
`[review] invalid --coderabbit-type=${coderabbitTypeRaw} (expected: ${VALID_CHANGE_TYPES.join(' | ')})`,
|
|
385
|
+
);
|
|
386
|
+
}
|
|
387
|
+
})()
|
|
388
|
+
: changeType;
|
|
389
|
+
const chunkingMode = (kv.get('--chunking') ?? 'auto').toString().trim().toLowerCase();
|
|
390
|
+
const codexModelFlag = normalizeCodexModelAlias((kv.get('--codex-model') ?? '').toString().trim());
|
|
391
|
+
const claudeModelFlag = (kv.get('--claude-model') ?? '').toString().trim();
|
|
392
|
+
const augmentModelFlag = (kv.get('--augment-model') ?? '').toString().trim();
|
|
393
|
+
const augmentMaxTurnsFlag = (kv.get('--augment-max-turns') ?? '').toString().trim();
|
|
394
|
+
const reviewPromptFlag = (kv.get('--review-prompt') ?? '').toString();
|
|
395
|
+
const reviewPromptFileFlag = (kv.get('--review-prompt-file') ?? '').toString().trim();
|
|
396
|
+
const chunkMaxFilesRaw = (kv.get('--chunk-max-files') ?? '').toString().trim();
|
|
397
|
+
const coderabbitMaxFilesRaw = (kv.get('--coderabbit-max-files') ?? '').toString().trim();
|
|
398
|
+
const coderabbitMaxFiles = coderabbitMaxFilesRaw ? Number(coderabbitMaxFilesRaw) : DEFAULT_REVIEW_MAX_FILES;
|
|
399
|
+
const chunkMaxFiles = chunkMaxFilesRaw ? Number(chunkMaxFilesRaw) : coderabbitMaxFiles;
|
|
400
|
+
const globalChunks = flags.has('--chunks') ? true : flags.has('--no-chunks') ? false : null;
|
|
401
|
+
const coderabbitChunksOverride = flags.has('--coderabbit-chunks')
|
|
402
|
+
? true
|
|
403
|
+
: flags.has('--no-coderabbit-chunks')
|
|
404
|
+
? false
|
|
405
|
+
: null;
|
|
406
|
+
const codexChunksOverride = flags.has('--codex-chunks') ? true : flags.has('--no-codex-chunks') ? false : null;
|
|
407
|
+
const augmentChunksOverride = flags.has('--augment-chunks') ? true : flags.has('--no-augment-chunks') ? false : null;
|
|
408
|
+
if (!VALID_DEPTHS.includes(depth)) {
|
|
409
|
+
throw new Error(`[review] invalid --depth=${depth} (expected: ${VALID_DEPTHS.join(' | ')})`);
|
|
410
|
+
}
|
|
411
|
+
if (!VALID_REVIEW_MODES.includes(reviewMode)) {
|
|
412
|
+
throw new Error(`[review] invalid --review-mode=${reviewMode} (expected: ${VALID_REVIEW_MODES.join(' | ')})`);
|
|
413
|
+
}
|
|
414
|
+
if (!['auto', 'head-slice', 'commit-window'].includes(chunkingMode)) {
|
|
415
|
+
throw new Error('[review] invalid --chunking (expected: auto|head-slice|commit-window)');
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
if (codexModelFlag) process.env.HAPPIER_STACK_CODEX_MODEL = codexModelFlag;
|
|
419
|
+
if (claudeModelFlag) process.env.HAPPIER_STACK_CLAUDE_MODEL = claudeModelFlag;
|
|
420
|
+
if (augmentModelFlag) process.env.HAPPIER_STACK_AUGMENT_MODEL = augmentModelFlag;
|
|
421
|
+
if (augmentMaxTurnsFlag) process.env.HAPPIER_STACK_AUGMENT_MAX_TURNS = augmentMaxTurnsFlag;
|
|
422
|
+
|
|
423
|
+
let customReviewPrompt = String(reviewPromptFlag ?? '').trim();
|
|
424
|
+
if (reviewPromptFileFlag) {
|
|
425
|
+
const resolved = reviewPromptFileFlag.startsWith('/')
|
|
426
|
+
? reviewPromptFileFlag
|
|
427
|
+
: join(getInvokedCwd(process.env), reviewPromptFileFlag);
|
|
428
|
+
customReviewPrompt = (await readFile(resolved, 'utf8')).toString().trim();
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
// Review artifacts: always create a per-run directory containing raw outputs + a triage checklist.
|
|
432
|
+
const reviewsRootDir = join(rootDir, '.project', 'reviews');
|
|
433
|
+
await ensureDir(reviewsRootDir);
|
|
434
|
+
const runLabelOverride = (kv.get('--run-label') ?? '').toString().trim();
|
|
435
|
+
const ts = new Date().toISOString().replace(/[:.]/g, '-');
|
|
436
|
+
const stackName = (process.env.HAPPIER_STACK_STACK ?? '').toString().trim();
|
|
437
|
+
const defaultLabel = `review-${ts}${stackName ? `-${sanitizeLabel(stackName)}` : ''}`;
|
|
438
|
+
const runLabel = sanitizeLabel(runLabelOverride || defaultLabel) || defaultLabel;
|
|
439
|
+
const runDir = join(reviewsRootDir, runLabel);
|
|
440
|
+
await ensureDir(runDir);
|
|
441
|
+
await ensureDir(join(runDir, 'raw'));
|
|
442
|
+
|
|
443
|
+
const deepInstructionsPath = join(rootDir, 'scripts', 'utils', 'review', 'instructions', 'deep.md');
|
|
444
|
+
const coderabbitConfigFiles = depth === 'deep' ? [deepInstructionsPath] : [];
|
|
445
|
+
|
|
446
|
+
if (reviewers.includes('coderabbit')) {
|
|
447
|
+
const coderabbitHomeKey = 'HAPPIER_STACK_CODERABBIT_HOME_DIR';
|
|
448
|
+
if (!(process.env[coderabbitHomeKey] ?? '').toString().trim()) {
|
|
449
|
+
process.env[coderabbitHomeKey] = join(rootDir, '.project', 'coderabbit-home');
|
|
450
|
+
}
|
|
451
|
+
await ensureDir(process.env[coderabbitHomeKey]);
|
|
452
|
+
|
|
453
|
+
// Seed CodeRabbit auth/config into the isolated home dir so review runs can be non-interactive.
|
|
454
|
+
// We never print or inspect auth contents.
|
|
455
|
+
try {
|
|
456
|
+
const realHome = (process.env.HOME ?? '').toString().trim();
|
|
457
|
+
const overrideHome = (process.env[coderabbitHomeKey] ?? '').toString().trim();
|
|
458
|
+
if (realHome && overrideHome && realHome !== overrideHome) {
|
|
459
|
+
await seedCodeRabbitHomeFromRealHome({ realHomeDir: realHome, isolatedHomeDir: overrideHome });
|
|
460
|
+
}
|
|
461
|
+
} catch {
|
|
462
|
+
// ignore (coderabbit will surface auth issues if seeding fails)
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
if (reviewers.includes('codex')) {
|
|
467
|
+
const codexHomeKey = 'HAPPIER_STACK_CODEX_HOME_DIR';
|
|
468
|
+
if (!(process.env[codexHomeKey] ?? '').toString().trim()) {
|
|
469
|
+
process.env[codexHomeKey] = join(runDir, 'tool-homes', 'codex');
|
|
470
|
+
}
|
|
471
|
+
await ensureDir(process.env[codexHomeKey]);
|
|
472
|
+
|
|
473
|
+
if (!(process.env.HAPPIER_STACK_CODEX_SANDBOX ?? '').toString().trim()) {
|
|
474
|
+
process.env.HAPPIER_STACK_CODEX_SANDBOX = 'workspace-write';
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
// Seed Codex auth/config into the isolated CODEX_HOME to avoid sandbox permission issues
|
|
478
|
+
// writing under the real ~/.codex. We never print or inspect auth contents.
|
|
479
|
+
try {
|
|
480
|
+
const realHome = (process.env.HOME ?? '').toString().trim();
|
|
481
|
+
const overrideHome = process.env[codexHomeKey];
|
|
482
|
+
if (realHome && overrideHome && realHome !== overrideHome) {
|
|
483
|
+
await seedCodexHomeFromRealHome({ realHomeDir: realHome, isolatedHomeDir: overrideHome });
|
|
484
|
+
}
|
|
485
|
+
} catch {
|
|
486
|
+
// ignore (codex will surface auth issues if seeding fails)
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
if (reviewers.includes('augment')) {
|
|
491
|
+
const augmentHomeKey = 'HAPPIER_STACK_AUGMENT_CACHE_DIR';
|
|
492
|
+
if (!(process.env[augmentHomeKey] ?? '').toString().trim()) {
|
|
493
|
+
process.env[augmentHomeKey] = join(rootDir, '.project', 'augment-home');
|
|
494
|
+
}
|
|
495
|
+
await ensureDir(process.env[augmentHomeKey]);
|
|
496
|
+
|
|
497
|
+
// Seed Auggie auth/config into the isolated cache dir so review runs can be non-interactive.
|
|
498
|
+
// We never print or inspect auth contents.
|
|
499
|
+
try {
|
|
500
|
+
const realHome = (process.env.HOME ?? '').toString().trim();
|
|
501
|
+
const overrideHome = process.env[augmentHomeKey];
|
|
502
|
+
if (realHome && overrideHome && realHome !== overrideHome) {
|
|
503
|
+
await seedAugmentHomeFromRealHome({ realHomeDir: realHome, isolatedHomeDir: overrideHome });
|
|
504
|
+
}
|
|
505
|
+
} catch {
|
|
506
|
+
// ignore (auggie will surface auth issues if seeding fails)
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
if (stream) {
|
|
511
|
+
// eslint-disable-next-line no-console
|
|
512
|
+
console.log('[review] note: this can take a long time (up to 60+ minutes per reviewer). No timeout is enforced.');
|
|
513
|
+
printReviewOperatorGuidance();
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
const resolved = components.map((component) => ({ component, repoDir: getComponentDir(rootDir, component) }));
|
|
517
|
+
const monoRoots = new Set(resolved.map((x) => coerceHappyMonorepoRootFromPath(x.repoDir)).filter(Boolean));
|
|
518
|
+
if (monoRoots.size > 1) {
|
|
519
|
+
const roots = Array.from(monoRoots).sort();
|
|
520
|
+
throw new Error(
|
|
521
|
+
`[review] multiple monorepo roots detected across selected component dirs:\n` +
|
|
522
|
+
roots.map((r) => `- ${r}`).join('\n') +
|
|
523
|
+
`\n\n` +
|
|
524
|
+
`Fix: ensure all monorepo components (happier-ui/happier-cli/happier-server(-light)) point at the same worktree.\n` +
|
|
525
|
+
`- Stack mode: use \`hstack stack wt <stack> -- use <owner/branch|/abs/path>\`\n` +
|
|
526
|
+
`- One-shot: pass \`--repo=<owner/branch|/abs/path>\` to the stack command you're running`
|
|
527
|
+
);
|
|
528
|
+
}
|
|
529
|
+
const monorepoRoot = monoRoots.size === 1 ? Array.from(monoRoots)[0] : null;
|
|
530
|
+
|
|
531
|
+
const jobs = monorepoRoot
|
|
532
|
+
? [{ component: 'monorepo', repoDir: monorepoRoot, monorepo: true }]
|
|
533
|
+
: resolved.map((x) => ({ component: x.component, repoDir: x.repoDir, monorepo: false }));
|
|
534
|
+
|
|
535
|
+
const jobResults = await runWithConcurrencyLimit({
|
|
536
|
+
items: jobs,
|
|
537
|
+
limit,
|
|
538
|
+
fn: async (job) => {
|
|
539
|
+
const { component, repoDir, monorepo } = job;
|
|
540
|
+
let base = { baseRef: '', remote: '', branch: '' };
|
|
541
|
+
try {
|
|
542
|
+
base = await resolveBaseRef({
|
|
543
|
+
cwd: repoDir,
|
|
544
|
+
baseRefOverride,
|
|
545
|
+
baseRemoteOverride,
|
|
546
|
+
baseBranchOverride,
|
|
547
|
+
stackRemoteFallback,
|
|
548
|
+
});
|
|
549
|
+
|
|
550
|
+
const maxFiles = Number.isFinite(chunkMaxFiles) && chunkMaxFiles > 0 ? chunkMaxFiles : 300;
|
|
551
|
+
const sliceConcurrency = Math.max(1, Math.floor(limit / Math.max(1, reviewers.length)));
|
|
552
|
+
const wantChunksCoderabbit =
|
|
553
|
+
coderabbitType === 'committed' || coderabbitType === 'uncommitted'
|
|
554
|
+
? (coderabbitChunksOverride ?? globalChunks)
|
|
555
|
+
: false;
|
|
556
|
+
const wantChunksCodex = codexChunksOverride ?? globalChunks;
|
|
557
|
+
const wantChunksAugment = changeType === 'committed' ? (augmentChunksOverride ?? globalChunks) : false;
|
|
558
|
+
const effectiveChunking = chunkingMode === 'auto' ? (monorepo ? 'head-slice' : 'commit-window') : chunkingMode;
|
|
559
|
+
|
|
560
|
+
if (monorepo && stream) {
|
|
561
|
+
// eslint-disable-next-line no-console
|
|
562
|
+
console.log(
|
|
563
|
+
`[review] monorepo detected at ${repoDir}; running a single unified review (chunking=${effectiveChunking}, concurrency=${sliceConcurrency}).`
|
|
564
|
+
);
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
const perReviewer = await runReviewersSafe({
|
|
568
|
+
reviewers,
|
|
569
|
+
runReviewer: async (reviewer) => {
|
|
570
|
+
if (reviewer === 'coderabbit') {
|
|
571
|
+
const uncommittedOps = coderabbitType === 'uncommitted' ? await getUncommittedOps({ cwd: repoDir, env: process.env }) : null;
|
|
572
|
+
const fileCount =
|
|
573
|
+
coderabbitType === 'uncommitted'
|
|
574
|
+
? (uncommittedOps?.all?.size ?? 0)
|
|
575
|
+
: await countChangedFiles({ cwd: repoDir, env: process.env, base: base.baseRef });
|
|
576
|
+
const autoChunks = fileCount > maxFiles;
|
|
577
|
+
|
|
578
|
+
let coderabbitBaseCommit = null;
|
|
579
|
+
let note = '';
|
|
580
|
+
|
|
581
|
+
// Uncommitted mode: CodeRabbit has a hard max-files limit. If exceeded, run in path slices
|
|
582
|
+
// inside ephemeral detached worktrees so each slice stays under the limit.
|
|
583
|
+
if (coderabbitType === 'uncommitted' && fileCount > maxFiles && (wantChunksCoderabbit ?? autoChunks)) {
|
|
584
|
+
const ops = uncommittedOps ?? (await getUncommittedOps({ cwd: repoDir, env: process.env }));
|
|
585
|
+
const slices = planPathSlices({ changedPaths: Array.from(ops.all), maxFiles });
|
|
586
|
+
|
|
587
|
+
const sliceItems = slices.map((slice, i) => ({ slice, index: i + 1, of: slices.length }));
|
|
588
|
+
const sliceResults = await runSlicedJobs({
|
|
589
|
+
items: sliceItems,
|
|
590
|
+
limit: sliceConcurrency,
|
|
591
|
+
run: async ({ slice, index, of }) => {
|
|
592
|
+
const logFile = join(runDir, 'raw', `coderabbit-slice-${index}-of-${of}-${sanitizeLabel(slice.label)}.log`);
|
|
593
|
+
const rr = await withDetachedWorktree(
|
|
594
|
+
{ repoDir, headCommit: 'HEAD', label: `coderabbit-uncommitted-${index}-of-${of}`, env: process.env },
|
|
595
|
+
async (worktreeDir) => {
|
|
596
|
+
const allowed = new Set(slice.paths);
|
|
597
|
+
const sliceCheckout = subsetSet(ops.checkout, allowed);
|
|
598
|
+
const sliceRemove = subsetSet(ops.remove, allowed);
|
|
599
|
+
await applyUncommittedSlice({
|
|
600
|
+
srcRepoDir: repoDir,
|
|
601
|
+
worktreeDir,
|
|
602
|
+
checkoutPaths: sliceCheckout,
|
|
603
|
+
removePaths: sliceRemove,
|
|
604
|
+
});
|
|
605
|
+
return await runCodeRabbitReview({
|
|
606
|
+
repoDir: worktreeDir,
|
|
607
|
+
baseRef: null,
|
|
608
|
+
baseCommit: null,
|
|
609
|
+
env: process.env,
|
|
610
|
+
type: coderabbitType,
|
|
611
|
+
configFiles: coderabbitConfigFiles,
|
|
612
|
+
streamLabel: stream ? `monorepo:coderabbit:${index}/${of}` : undefined,
|
|
613
|
+
teeFile: logFile,
|
|
614
|
+
teeLabel: `monorepo:coderabbit:${index}/${of}`,
|
|
615
|
+
});
|
|
616
|
+
}
|
|
617
|
+
);
|
|
618
|
+
const noFilesToReview = detectCodeRabbitNoFilesToReview({ stdout: rr.stdout, stderr: rr.stderr });
|
|
619
|
+
const ok = Boolean(rr.ok) || noFilesToReview;
|
|
620
|
+
return {
|
|
621
|
+
index,
|
|
622
|
+
of,
|
|
623
|
+
slice: slice.label,
|
|
624
|
+
fileCount: slice.paths.length,
|
|
625
|
+
logFile,
|
|
626
|
+
ok,
|
|
627
|
+
exitCode: ok ? 0 : rr.exitCode,
|
|
628
|
+
signal: rr.signal,
|
|
629
|
+
durationMs: rr.durationMs,
|
|
630
|
+
stdout: rr.stdout ?? '',
|
|
631
|
+
stderr: rr.stderr ?? '',
|
|
632
|
+
};
|
|
633
|
+
},
|
|
634
|
+
shouldAbortEarly: (r) => detectCodeRabbitAuthError({ stdout: r?.stdout, stderr: r?.stderr }),
|
|
635
|
+
});
|
|
636
|
+
|
|
637
|
+
if (sliceResults.length === 1 && detectCodeRabbitAuthError(sliceResults[0])) {
|
|
638
|
+
const msg = `[review] coderabbit auth required: run 'coderabbit auth login' in an interactive session, then re-run this review.`;
|
|
639
|
+
// eslint-disable-next-line no-console
|
|
640
|
+
console.error(msg);
|
|
641
|
+
}
|
|
642
|
+
|
|
643
|
+
const okAll = sliceResults.every((r) => r.ok);
|
|
644
|
+
return {
|
|
645
|
+
reviewer,
|
|
646
|
+
ok: okAll,
|
|
647
|
+
exitCode: okAll ? 0 : 1,
|
|
648
|
+
signal: null,
|
|
649
|
+
durationMs: sliceResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
650
|
+
stdout: '',
|
|
651
|
+
stderr: '',
|
|
652
|
+
note: `uncommitted slices: ${sliceResults.length} slices (maxFiles=${maxFiles})`,
|
|
653
|
+
slices: sliceResults,
|
|
654
|
+
};
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
// Monorepo: prefer HEAD-sliced chunking so each slice is reviewed in the final HEAD state.
|
|
658
|
+
if (monorepo && effectiveChunking === 'head-slice' && (wantChunksCoderabbit ?? autoChunks)) {
|
|
659
|
+
const headCommit = (await runCapture('git', ['rev-parse', 'HEAD'], { cwd: repoDir, env: process.env })).trim();
|
|
660
|
+
const baseCommit = (await runCapture('git', ['rev-parse', base.baseRef], { cwd: repoDir, env: process.env })).trim();
|
|
661
|
+
const ops = await getChangedOps({ cwd: repoDir, baseRef: baseCommit, headRef: headCommit, env: process.env });
|
|
662
|
+
const slices = planPathSlices({ changedPaths: Array.from(ops.all), maxFiles });
|
|
663
|
+
|
|
664
|
+
const sliceItems = slices.map((slice, i) => ({ slice, index: i + 1, of: slices.length }));
|
|
665
|
+
const sliceResults = await runSlicedJobs({
|
|
666
|
+
items: sliceItems,
|
|
667
|
+
limit: sliceConcurrency,
|
|
668
|
+
run: async ({ slice, index, of }) => {
|
|
669
|
+
const logFile = join(runDir, 'raw', `coderabbit-slice-${index}-of-${of}-${sanitizeLabel(slice.label)}.log`);
|
|
670
|
+
const rr = await withDetachedWorktree(
|
|
671
|
+
{ repoDir, headCommit: baseCommit, label: `coderabbit-${index}-of-${of}`, env: process.env },
|
|
672
|
+
async (worktreeDir) => {
|
|
673
|
+
const { baseSliceCommit } = await createHeadSliceCommits({
|
|
674
|
+
cwd: worktreeDir,
|
|
675
|
+
env: process.env,
|
|
676
|
+
baseRef: baseCommit,
|
|
677
|
+
headCommit,
|
|
678
|
+
ops,
|
|
679
|
+
slicePaths: slice.paths,
|
|
680
|
+
label: slice.label.replace(/\/+$/g, ''),
|
|
681
|
+
});
|
|
682
|
+
return await runCodeRabbitReview({
|
|
683
|
+
repoDir: worktreeDir,
|
|
684
|
+
baseRef: null,
|
|
685
|
+
baseCommit: baseSliceCommit,
|
|
686
|
+
env: process.env,
|
|
687
|
+
type: coderabbitType,
|
|
688
|
+
configFiles: coderabbitConfigFiles,
|
|
689
|
+
streamLabel: stream ? `monorepo:coderabbit:${index}/${of}` : undefined,
|
|
690
|
+
teeFile: logFile,
|
|
691
|
+
teeLabel: `monorepo:coderabbit:${index}/${of}`,
|
|
692
|
+
});
|
|
693
|
+
}
|
|
694
|
+
);
|
|
695
|
+
const noFilesToReview = detectCodeRabbitNoFilesToReview({ stdout: rr.stdout, stderr: rr.stderr });
|
|
696
|
+
const ok = Boolean(rr.ok) || noFilesToReview;
|
|
697
|
+
return {
|
|
698
|
+
index,
|
|
699
|
+
of,
|
|
700
|
+
slice: slice.label,
|
|
701
|
+
fileCount: slice.paths.length,
|
|
702
|
+
logFile,
|
|
703
|
+
ok,
|
|
704
|
+
exitCode: ok ? 0 : rr.exitCode,
|
|
705
|
+
signal: rr.signal,
|
|
706
|
+
durationMs: rr.durationMs,
|
|
707
|
+
stdout: rr.stdout ?? '',
|
|
708
|
+
stderr: rr.stderr ?? '',
|
|
709
|
+
};
|
|
710
|
+
},
|
|
711
|
+
shouldAbortEarly: (r) => detectCodeRabbitAuthError({ stdout: r?.stdout, stderr: r?.stderr }),
|
|
712
|
+
});
|
|
713
|
+
|
|
714
|
+
if (sliceResults.length === 1 && detectCodeRabbitAuthError(sliceResults[0])) {
|
|
715
|
+
const msg = `[review] coderabbit auth required: run 'coderabbit auth login' in an interactive session, then re-run this review.`;
|
|
716
|
+
// eslint-disable-next-line no-console
|
|
717
|
+
console.error(msg);
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
const okAll = sliceResults.every((r) => r.ok);
|
|
721
|
+
return {
|
|
722
|
+
reviewer,
|
|
723
|
+
ok: okAll,
|
|
724
|
+
exitCode: okAll ? 0 : 1,
|
|
725
|
+
signal: null,
|
|
726
|
+
durationMs: sliceResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
727
|
+
stdout: '',
|
|
728
|
+
stderr: '',
|
|
729
|
+
note: `monorepo head-slice: ${sliceResults.length} slices (maxFiles=${maxFiles})`,
|
|
730
|
+
slices: sliceResults,
|
|
731
|
+
};
|
|
732
|
+
}
|
|
733
|
+
|
|
734
|
+
// Non-monorepo or non-sliced: optionally chunk by commit windows (older behavior).
|
|
735
|
+
if (coderabbitType === 'committed' && fileCount > maxFiles && effectiveChunking === 'commit-window' && (wantChunksCoderabbit ?? false)) {
|
|
736
|
+
// fall through to commit-window chunking below
|
|
737
|
+
} else if (coderabbitType === 'committed' && fileCount > maxFiles && (wantChunksCoderabbit === false || wantChunksCoderabbit == null)) {
|
|
738
|
+
coderabbitBaseCommit = await pickCoderabbitBaseCommitForMaxFiles({
|
|
739
|
+
cwd: repoDir,
|
|
740
|
+
env: process.env,
|
|
741
|
+
baseRef: base.baseRef,
|
|
742
|
+
maxFiles,
|
|
743
|
+
});
|
|
744
|
+
note = coderabbitBaseCommit
|
|
745
|
+
? `diff too large (${fileCount} files vs limit ${maxFiles}); using --base-commit ${coderabbitBaseCommit} for a partial review`
|
|
746
|
+
: `diff too large (${fileCount} files vs limit ${maxFiles}); unable to pick a --base-commit automatically`;
|
|
747
|
+
// eslint-disable-next-line no-console
|
|
748
|
+
console.log(`[review] coderabbit: ${note}`);
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
if (!(coderabbitType === 'committed' && fileCount > maxFiles && effectiveChunking === 'commit-window' && (wantChunksCoderabbit ?? false))) {
|
|
752
|
+
const logFile = join(runDir, 'raw', `coderabbit-${sanitizeLabel(component)}.log`);
|
|
753
|
+
const baseRefForType = coderabbitType === 'uncommitted' ? null : coderabbitBaseCommit ? null : base.baseRef;
|
|
754
|
+
const res = await runCodeRabbitReview({
|
|
755
|
+
repoDir,
|
|
756
|
+
baseRef: baseRefForType,
|
|
757
|
+
baseCommit: coderabbitBaseCommit,
|
|
758
|
+
env: process.env,
|
|
759
|
+
type: coderabbitType,
|
|
760
|
+
configFiles: coderabbitConfigFiles,
|
|
761
|
+
streamLabel: stream ? `${component}:coderabbit` : undefined,
|
|
762
|
+
teeFile: logFile,
|
|
763
|
+
teeLabel: `${component}:coderabbit`,
|
|
764
|
+
});
|
|
765
|
+
return {
|
|
766
|
+
reviewer,
|
|
767
|
+
ok: Boolean(res.ok),
|
|
768
|
+
exitCode: res.exitCode,
|
|
769
|
+
signal: res.signal,
|
|
770
|
+
durationMs: res.durationMs,
|
|
771
|
+
stdout: res.stdout ?? '',
|
|
772
|
+
stderr: res.stderr ?? '',
|
|
773
|
+
note,
|
|
774
|
+
logFile,
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
// Chunked mode: split the commit range into <=maxFiles windows and review each window by
|
|
779
|
+
// running CodeRabbit in a detached worktree checked out at the window head.
|
|
780
|
+
const mb = await mergeBase({ cwd: repoDir, env: process.env, a: base.baseRef, b: 'HEAD' });
|
|
781
|
+
const commits = await listCommitsBetween({ cwd: repoDir, env: process.env, base: mb, head: 'HEAD' });
|
|
782
|
+
const planned = await planCommitChunks({
|
|
783
|
+
baseCommit: mb,
|
|
784
|
+
commits,
|
|
785
|
+
maxFiles,
|
|
786
|
+
countFilesBetween: async ({ base: baseCommit, head }) =>
|
|
787
|
+
await countChangedFilesBetween({ cwd: repoDir, env: process.env, base: baseCommit, head }),
|
|
788
|
+
});
|
|
789
|
+
|
|
790
|
+
const chunks = planned.map((ch) => ({
|
|
791
|
+
baseCommit: ch.base,
|
|
792
|
+
headCommit: ch.head,
|
|
793
|
+
fileCount: ch.fileCount,
|
|
794
|
+
overLimit: Boolean(ch.overLimit),
|
|
795
|
+
}));
|
|
796
|
+
|
|
797
|
+
const chunkResults = [];
|
|
798
|
+
for (let i = 0; i < chunks.length; i += 1) {
|
|
799
|
+
const ch = chunks[i];
|
|
800
|
+
const logFile = join(
|
|
801
|
+
runDir,
|
|
802
|
+
'raw',
|
|
803
|
+
`coderabbit-${sanitizeLabel(component)}-window-${i + 1}-of-${chunks.length}-${String(ch.headCommit).slice(0, 12)}.log`
|
|
804
|
+
);
|
|
805
|
+
// eslint-disable-next-line no-await-in-loop
|
|
806
|
+
const rr = await withDetachedWorktree(
|
|
807
|
+
{ repoDir, headCommit: ch.headCommit, label: `coderabbit-${component}-${i + 1}-of-${chunks.length}`, env: process.env },
|
|
808
|
+
async (worktreeDir) => {
|
|
809
|
+
return await runCodeRabbitReview({
|
|
810
|
+
repoDir: worktreeDir,
|
|
811
|
+
baseRef: null,
|
|
812
|
+
baseCommit: ch.baseCommit,
|
|
813
|
+
env: process.env,
|
|
814
|
+
type: coderabbitType,
|
|
815
|
+
configFiles: coderabbitConfigFiles,
|
|
816
|
+
streamLabel: stream ? `${component}:coderabbit:${i + 1}/${chunks.length}` : undefined,
|
|
817
|
+
teeFile: logFile,
|
|
818
|
+
teeLabel: `${component}:coderabbit:${i + 1}/${chunks.length}`,
|
|
819
|
+
});
|
|
820
|
+
}
|
|
821
|
+
);
|
|
822
|
+
chunkResults.push({
|
|
823
|
+
index: i + 1,
|
|
824
|
+
of: chunks.length,
|
|
825
|
+
baseCommit: ch.baseCommit,
|
|
826
|
+
headCommit: ch.headCommit,
|
|
827
|
+
fileCount: ch.fileCount,
|
|
828
|
+
overLimit: ch.overLimit,
|
|
829
|
+
logFile,
|
|
830
|
+
ok: Boolean(rr.ok),
|
|
831
|
+
exitCode: rr.exitCode,
|
|
832
|
+
signal: rr.signal,
|
|
833
|
+
durationMs: rr.durationMs,
|
|
834
|
+
stdout: rr.stdout ?? '',
|
|
835
|
+
stderr: rr.stderr ?? '',
|
|
836
|
+
});
|
|
837
|
+
}
|
|
838
|
+
|
|
839
|
+
const okAll = chunkResults.every((r) => r.ok);
|
|
840
|
+
return {
|
|
841
|
+
reviewer,
|
|
842
|
+
ok: okAll,
|
|
843
|
+
exitCode: okAll ? 0 : 1,
|
|
844
|
+
signal: null,
|
|
845
|
+
durationMs: chunkResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
846
|
+
stdout: '',
|
|
847
|
+
stderr: '',
|
|
848
|
+
note: `chunked: ${chunkResults.length} windows (maxFiles=${maxFiles})`,
|
|
849
|
+
chunks: chunkResults,
|
|
850
|
+
};
|
|
851
|
+
}
|
|
852
|
+
if (reviewer === 'codex') {
|
|
853
|
+
const jsonMode = json;
|
|
854
|
+
if (reviewMode === 'audit') {
|
|
855
|
+
const prompt = monorepo
|
|
856
|
+
? buildCodexMonorepoAuditPrompt({
|
|
857
|
+
deep: depth === 'deep',
|
|
858
|
+
scopePaths: reviewPaths,
|
|
859
|
+
customPrompt: customReviewPrompt,
|
|
860
|
+
})
|
|
861
|
+
: buildCodexAuditPrompt({
|
|
862
|
+
component,
|
|
863
|
+
deep: depth === 'deep',
|
|
864
|
+
scopePaths: reviewPaths,
|
|
865
|
+
customPrompt: customReviewPrompt,
|
|
866
|
+
});
|
|
867
|
+
const logFile = join(runDir, 'raw', `codex-${sanitizeLabel(component)}.log`);
|
|
868
|
+
const res = await runCodexReview({
|
|
869
|
+
repoDir,
|
|
870
|
+
baseRef: null,
|
|
871
|
+
env: process.env,
|
|
872
|
+
jsonMode,
|
|
873
|
+
model: process.env.HAPPIER_STACK_CODEX_MODEL,
|
|
874
|
+
prompt,
|
|
875
|
+
streamLabel: stream && !jsonMode ? `${component}:codex` : undefined,
|
|
876
|
+
teeFile: logFile,
|
|
877
|
+
teeLabel: `${component}:codex`,
|
|
878
|
+
});
|
|
879
|
+
const extracted = jsonMode ? extractCodexReviewFromJsonl(res.stdout ?? '') : null;
|
|
880
|
+
return {
|
|
881
|
+
reviewer,
|
|
882
|
+
ok: Boolean(res.ok),
|
|
883
|
+
exitCode: res.exitCode,
|
|
884
|
+
signal: res.signal,
|
|
885
|
+
durationMs: res.durationMs,
|
|
886
|
+
stdout: res.stdout ?? '',
|
|
887
|
+
stderr: res.stderr ?? '',
|
|
888
|
+
review_output: extracted,
|
|
889
|
+
logFile,
|
|
890
|
+
};
|
|
891
|
+
}
|
|
892
|
+
// Prompt mode is required for deep reviews and for `--type=all` (we need to describe both diffs).
|
|
893
|
+
// For `--type=uncommitted` + normal depth, prefer Codex's built-in `--uncommitted` target.
|
|
894
|
+
// (Codex review targets do not support passing a custom prompt alongside --uncommitted).
|
|
895
|
+
const usePromptMode = depth === 'deep' || changeType === 'all';
|
|
896
|
+
const uncommittedOps = changeType === 'uncommitted' ? await getUncommittedOps({ cwd: repoDir, env: process.env }) : null;
|
|
897
|
+
const fileCount =
|
|
898
|
+
changeType === 'uncommitted'
|
|
899
|
+
? (uncommittedOps?.all?.size ?? 0)
|
|
900
|
+
: await countChangedFiles({ cwd: repoDir, env: process.env, base: base.baseRef });
|
|
901
|
+
const autoChunks = changeType === 'uncommitted' ? fileCount > maxFiles : usePromptMode && fileCount > maxFiles;
|
|
902
|
+
|
|
903
|
+
if (
|
|
904
|
+
monorepo &&
|
|
905
|
+
effectiveChunking === 'head-slice' &&
|
|
906
|
+
shouldUseUncommittedPathSlices({
|
|
907
|
+
reviewer: 'codex',
|
|
908
|
+
changeType,
|
|
909
|
+
fileCount,
|
|
910
|
+
maxFiles,
|
|
911
|
+
chunksPreference: wantChunksCodex,
|
|
912
|
+
})
|
|
913
|
+
) {
|
|
914
|
+
const ops = uncommittedOps ?? (await getUncommittedOps({ cwd: repoDir, env: process.env }));
|
|
915
|
+
const slices = planPathSlices({ changedPaths: Array.from(ops.all), maxFiles });
|
|
916
|
+
|
|
917
|
+
const sliceItems = slices.map((slice, i) => ({ slice, index: i + 1, of: slices.length }));
|
|
918
|
+
const sliceResults = await runSlicedJobs({
|
|
919
|
+
items: sliceItems,
|
|
920
|
+
limit: sliceConcurrency,
|
|
921
|
+
run: async ({ slice, index, of }) => {
|
|
922
|
+
const logFile = join(runDir, 'raw', `codex-slice-${index}-of-${of}-${sanitizeLabel(slice.label)}.log`);
|
|
923
|
+
const rr = await withDetachedWorktree(
|
|
924
|
+
{ repoDir, headCommit: 'HEAD', label: `codex-uncommitted-${index}-of-${of}`, env: process.env },
|
|
925
|
+
async (worktreeDir) => {
|
|
926
|
+
const allowed = new Set(slice.paths);
|
|
927
|
+
const sliceCheckout = subsetSet(ops.checkout, allowed);
|
|
928
|
+
const sliceRemove = subsetSet(ops.remove, allowed);
|
|
929
|
+
await applyUncommittedSlice({
|
|
930
|
+
srcRepoDir: repoDir,
|
|
931
|
+
worktreeDir,
|
|
932
|
+
checkoutPaths: sliceCheckout,
|
|
933
|
+
removePaths: sliceRemove,
|
|
934
|
+
});
|
|
935
|
+
const basePrompt =
|
|
936
|
+
depth === 'deep'
|
|
937
|
+
? buildCodexMonorepoDeepPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
938
|
+
: buildCodexMonorepoNormalPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt });
|
|
939
|
+
const prompt = usePromptMode
|
|
940
|
+
? buildUncommittedSlicePrompt({
|
|
941
|
+
sliceLabel: slice.label,
|
|
942
|
+
basePrompt,
|
|
943
|
+
})
|
|
944
|
+
: '';
|
|
945
|
+
return await runCodexReview({
|
|
946
|
+
repoDir: worktreeDir,
|
|
947
|
+
baseRef: null,
|
|
948
|
+
env: process.env,
|
|
949
|
+
jsonMode,
|
|
950
|
+
model: process.env.HAPPIER_STACK_CODEX_MODEL,
|
|
951
|
+
prompt,
|
|
952
|
+
streamLabel: stream && !jsonMode ? `monorepo:codex:${index}/${of}` : undefined,
|
|
953
|
+
teeFile: logFile,
|
|
954
|
+
teeLabel: `monorepo:codex:${index}/${of}`,
|
|
955
|
+
});
|
|
956
|
+
}
|
|
957
|
+
);
|
|
958
|
+
const extracted = jsonMode ? extractCodexReviewFromJsonl(rr.stdout ?? '') : null;
|
|
959
|
+
return {
|
|
960
|
+
index,
|
|
961
|
+
of,
|
|
962
|
+
slice: slice.label,
|
|
963
|
+
fileCount: slice.paths.length,
|
|
964
|
+
logFile,
|
|
965
|
+
ok: Boolean(rr.ok),
|
|
966
|
+
exitCode: rr.exitCode,
|
|
967
|
+
signal: rr.signal,
|
|
968
|
+
durationMs: rr.durationMs,
|
|
969
|
+
stdout: rr.stdout ?? '',
|
|
970
|
+
stderr: rr.stderr ?? '',
|
|
971
|
+
review_output: extracted,
|
|
972
|
+
};
|
|
973
|
+
},
|
|
974
|
+
shouldAbortEarly: (r) => detectCodexUsageLimit({ stdout: r?.stdout, stderr: r?.stderr }),
|
|
975
|
+
});
|
|
976
|
+
|
|
977
|
+
if (sliceResults.length === 1 && detectCodexUsageLimit(sliceResults[0])) {
|
|
978
|
+
const msg = `[review] codex usage limit detected; resolve Codex credits/limits, then re-run this review.`;
|
|
979
|
+
// eslint-disable-next-line no-console
|
|
980
|
+
console.error(msg);
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
const okAll = sliceResults.every((r) => r.ok);
|
|
984
|
+
return {
|
|
985
|
+
reviewer,
|
|
986
|
+
ok: okAll,
|
|
987
|
+
exitCode: okAll ? 0 : 1,
|
|
988
|
+
signal: null,
|
|
989
|
+
durationMs: sliceResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
990
|
+
stdout: '',
|
|
991
|
+
stderr: '',
|
|
992
|
+
note: `uncommitted slices: ${sliceResults.length} slices (maxFiles=${maxFiles})`,
|
|
993
|
+
slices: sliceResults,
|
|
994
|
+
};
|
|
995
|
+
}
|
|
996
|
+
|
|
997
|
+
if (monorepo && effectiveChunking === 'head-slice' && usePromptMode && (wantChunksCodex ?? autoChunks)) {
|
|
998
|
+
const headCommit = (await runCapture('git', ['rev-parse', 'HEAD'], { cwd: repoDir, env: process.env })).trim();
|
|
999
|
+
const baseCommit = (await runCapture('git', ['rev-parse', base.baseRef], { cwd: repoDir, env: process.env })).trim();
|
|
1000
|
+
const ops = await getChangedOps({ cwd: repoDir, baseRef: baseCommit, headRef: headCommit, env: process.env });
|
|
1001
|
+
const slices = planPathSlices({ changedPaths: Array.from(ops.all), maxFiles });
|
|
1002
|
+
|
|
1003
|
+
const sliceItems = slices.map((slice, i) => ({ slice, index: i + 1, of: slices.length }));
|
|
1004
|
+
const sliceResults = await runSlicedJobs({
|
|
1005
|
+
items: sliceItems,
|
|
1006
|
+
limit: sliceConcurrency,
|
|
1007
|
+
run: async ({ slice, index, of }) => {
|
|
1008
|
+
const logFile = join(runDir, 'raw', `codex-slice-${index}-of-${of}-${sanitizeLabel(slice.label)}.log`);
|
|
1009
|
+
const rr = await withDetachedWorktree(
|
|
1010
|
+
{ repoDir, headCommit: baseCommit, label: `codex-${index}-of-${of}`, env: process.env },
|
|
1011
|
+
async (worktreeDir) => {
|
|
1012
|
+
const { baseSliceCommit } = await createHeadSliceCommits({
|
|
1013
|
+
cwd: worktreeDir,
|
|
1014
|
+
env: process.env,
|
|
1015
|
+
baseRef: baseCommit,
|
|
1016
|
+
headCommit,
|
|
1017
|
+
ops,
|
|
1018
|
+
slicePaths: slice.paths,
|
|
1019
|
+
label: slice.label.replace(/\/+$/g, ''),
|
|
1020
|
+
});
|
|
1021
|
+
const prompt = buildCodexMonorepoSlicePrompt({
|
|
1022
|
+
sliceLabel: slice.label,
|
|
1023
|
+
baseCommit: baseSliceCommit,
|
|
1024
|
+
baseRef: base.baseRef,
|
|
1025
|
+
deep: depth === 'deep',
|
|
1026
|
+
customPrompt: customReviewPrompt,
|
|
1027
|
+
});
|
|
1028
|
+
return await runCodexReview({
|
|
1029
|
+
repoDir: worktreeDir,
|
|
1030
|
+
baseRef: null,
|
|
1031
|
+
env: process.env,
|
|
1032
|
+
jsonMode,
|
|
1033
|
+
model: process.env.HAPPIER_STACK_CODEX_MODEL,
|
|
1034
|
+
prompt,
|
|
1035
|
+
streamLabel: stream && !jsonMode ? `monorepo:codex:${index}/${of}` : undefined,
|
|
1036
|
+
teeFile: logFile,
|
|
1037
|
+
teeLabel: `monorepo:codex:${index}/${of}`,
|
|
1038
|
+
});
|
|
1039
|
+
}
|
|
1040
|
+
);
|
|
1041
|
+
const extracted = jsonMode ? extractCodexReviewFromJsonl(rr.stdout ?? '') : null;
|
|
1042
|
+
return {
|
|
1043
|
+
index,
|
|
1044
|
+
of,
|
|
1045
|
+
slice: slice.label,
|
|
1046
|
+
fileCount: slice.paths.length,
|
|
1047
|
+
logFile,
|
|
1048
|
+
ok: Boolean(rr.ok),
|
|
1049
|
+
exitCode: rr.exitCode,
|
|
1050
|
+
signal: rr.signal,
|
|
1051
|
+
durationMs: rr.durationMs,
|
|
1052
|
+
stdout: rr.stdout ?? '',
|
|
1053
|
+
stderr: rr.stderr ?? '',
|
|
1054
|
+
review_output: extracted,
|
|
1055
|
+
};
|
|
1056
|
+
},
|
|
1057
|
+
shouldAbortEarly: (r) => detectCodexUsageLimit({ stdout: r?.stdout, stderr: r?.stderr }),
|
|
1058
|
+
});
|
|
1059
|
+
|
|
1060
|
+
if (sliceResults.length === 1 && detectCodexUsageLimit(sliceResults[0])) {
|
|
1061
|
+
const msg = `[review] codex usage limit detected; resolve Codex credits/limits, then re-run this review.`;
|
|
1062
|
+
// eslint-disable-next-line no-console
|
|
1063
|
+
console.error(msg);
|
|
1064
|
+
}
|
|
1065
|
+
|
|
1066
|
+
const okAll = sliceResults.every((r) => r.ok);
|
|
1067
|
+
return {
|
|
1068
|
+
reviewer,
|
|
1069
|
+
ok: okAll,
|
|
1070
|
+
exitCode: okAll ? 0 : 1,
|
|
1071
|
+
signal: null,
|
|
1072
|
+
durationMs: sliceResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
1073
|
+
stdout: '',
|
|
1074
|
+
stderr: '',
|
|
1075
|
+
note: `monorepo head-slice: ${sliceResults.length} slices (maxFiles=${maxFiles})`,
|
|
1076
|
+
slices: sliceResults,
|
|
1077
|
+
};
|
|
1078
|
+
}
|
|
1079
|
+
|
|
1080
|
+
const prompt = usePromptMode
|
|
1081
|
+
? monorepo
|
|
1082
|
+
? depth === 'deep'
|
|
1083
|
+
? buildCodexMonorepoDeepPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1084
|
+
: buildCodexMonorepoNormalPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1085
|
+
: depth === 'deep'
|
|
1086
|
+
? buildCodexDeepPrompt({ component, baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1087
|
+
: buildCodexNormalPrompt({ component, baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1088
|
+
: '';
|
|
1089
|
+
const logFile = join(runDir, 'raw', `codex-${sanitizeLabel(component)}.log`);
|
|
1090
|
+
const res = await runCodexReview({
|
|
1091
|
+
repoDir,
|
|
1092
|
+
baseRef: usePromptMode ? null : changeType === 'uncommitted' ? null : base.baseRef,
|
|
1093
|
+
env: process.env,
|
|
1094
|
+
jsonMode,
|
|
1095
|
+
model: process.env.HAPPIER_STACK_CODEX_MODEL,
|
|
1096
|
+
prompt,
|
|
1097
|
+
streamLabel: stream && !jsonMode ? `${component}:codex` : undefined,
|
|
1098
|
+
teeFile: logFile,
|
|
1099
|
+
teeLabel: `${component}:codex`,
|
|
1100
|
+
});
|
|
1101
|
+
const extracted = jsonMode ? extractCodexReviewFromJsonl(res.stdout ?? '') : null;
|
|
1102
|
+
return {
|
|
1103
|
+
reviewer,
|
|
1104
|
+
ok: Boolean(res.ok),
|
|
1105
|
+
exitCode: res.exitCode,
|
|
1106
|
+
signal: res.signal,
|
|
1107
|
+
durationMs: res.durationMs,
|
|
1108
|
+
stdout: res.stdout ?? '',
|
|
1109
|
+
stderr: res.stderr ?? '',
|
|
1110
|
+
review_output: extracted,
|
|
1111
|
+
logFile,
|
|
1112
|
+
};
|
|
1113
|
+
}
|
|
1114
|
+
if (reviewer === 'claude') {
|
|
1115
|
+
const jsonMode = false;
|
|
1116
|
+
if (reviewMode === 'audit') {
|
|
1117
|
+
const prompt = monorepo
|
|
1118
|
+
? buildCodexMonorepoAuditPrompt({
|
|
1119
|
+
deep: depth === 'deep',
|
|
1120
|
+
scopePaths: reviewPaths,
|
|
1121
|
+
customPrompt: customReviewPrompt,
|
|
1122
|
+
})
|
|
1123
|
+
: buildCodexAuditPrompt({
|
|
1124
|
+
component,
|
|
1125
|
+
deep: depth === 'deep',
|
|
1126
|
+
scopePaths: reviewPaths,
|
|
1127
|
+
customPrompt: customReviewPrompt,
|
|
1128
|
+
});
|
|
1129
|
+
const logFile = join(runDir, 'raw', `claude-${sanitizeLabel(component)}.log`);
|
|
1130
|
+
const res = await runClaudeReview({
|
|
1131
|
+
repoDir,
|
|
1132
|
+
env: process.env,
|
|
1133
|
+
prompt,
|
|
1134
|
+
model: process.env.HAPPIER_STACK_CLAUDE_MODEL,
|
|
1135
|
+
streamLabel: stream ? `${component}:claude` : undefined,
|
|
1136
|
+
teeFile: logFile,
|
|
1137
|
+
teeLabel: `${component}:claude`,
|
|
1138
|
+
});
|
|
1139
|
+
return {
|
|
1140
|
+
reviewer,
|
|
1141
|
+
ok: Boolean(res.ok),
|
|
1142
|
+
exitCode: res.exitCode,
|
|
1143
|
+
signal: res.signal,
|
|
1144
|
+
durationMs: res.durationMs,
|
|
1145
|
+
stdout: res.stdout ?? '',
|
|
1146
|
+
stderr: res.stderr ?? '',
|
|
1147
|
+
logFile,
|
|
1148
|
+
};
|
|
1149
|
+
}
|
|
1150
|
+
const canChunk = changeType !== 'uncommitted';
|
|
1151
|
+
const uncommittedOps = changeType === 'uncommitted' ? await getUncommittedOps({ cwd: repoDir, env: process.env }) : null;
|
|
1152
|
+
const fileCount =
|
|
1153
|
+
changeType === 'uncommitted'
|
|
1154
|
+
? (uncommittedOps?.all?.size ?? 0)
|
|
1155
|
+
: await countChangedFiles({ cwd: repoDir, env: process.env, base: base.baseRef });
|
|
1156
|
+
const autoChunks = fileCount > maxFiles;
|
|
1157
|
+
|
|
1158
|
+
if (
|
|
1159
|
+
monorepo &&
|
|
1160
|
+
effectiveChunking === 'head-slice' &&
|
|
1161
|
+
shouldUseUncommittedPathSlices({
|
|
1162
|
+
reviewer: 'claude',
|
|
1163
|
+
changeType,
|
|
1164
|
+
fileCount,
|
|
1165
|
+
maxFiles,
|
|
1166
|
+
chunksPreference: globalChunks,
|
|
1167
|
+
})
|
|
1168
|
+
) {
|
|
1169
|
+
const ops = uncommittedOps ?? (await getUncommittedOps({ cwd: repoDir, env: process.env }));
|
|
1170
|
+
const slices = planPathSlices({ changedPaths: Array.from(ops.all), maxFiles });
|
|
1171
|
+
|
|
1172
|
+
const sliceItems = slices.map((slice, i) => ({ slice, index: i + 1, of: slices.length }));
|
|
1173
|
+
const sliceResults = await runSlicedJobs({
|
|
1174
|
+
items: sliceItems,
|
|
1175
|
+
limit: sliceConcurrency,
|
|
1176
|
+
run: async ({ slice, index, of }) => {
|
|
1177
|
+
const logFile = join(runDir, 'raw', `claude-slice-${index}-of-${of}-${sanitizeLabel(slice.label)}.log`);
|
|
1178
|
+
const rr = await withDetachedWorktree(
|
|
1179
|
+
{ repoDir, headCommit: 'HEAD', label: `claude-uncommitted-${index}-of-${of}`, env: process.env },
|
|
1180
|
+
async (worktreeDir) => {
|
|
1181
|
+
const allowed = new Set(slice.paths);
|
|
1182
|
+
const sliceCheckout = subsetSet(ops.checkout, allowed);
|
|
1183
|
+
const sliceRemove = subsetSet(ops.remove, allowed);
|
|
1184
|
+
await applyUncommittedSlice({
|
|
1185
|
+
srcRepoDir: repoDir,
|
|
1186
|
+
worktreeDir,
|
|
1187
|
+
checkoutPaths: sliceCheckout,
|
|
1188
|
+
removePaths: sliceRemove,
|
|
1189
|
+
});
|
|
1190
|
+
const basePrompt =
|
|
1191
|
+
depth === 'deep'
|
|
1192
|
+
? buildCodexMonorepoDeepPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1193
|
+
: buildCodexMonorepoNormalPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt });
|
|
1194
|
+
const prompt = buildUncommittedSlicePrompt({
|
|
1195
|
+
sliceLabel: slice.label,
|
|
1196
|
+
basePrompt,
|
|
1197
|
+
});
|
|
1198
|
+
return await runClaudeReview({
|
|
1199
|
+
repoDir: worktreeDir,
|
|
1200
|
+
env: process.env,
|
|
1201
|
+
prompt,
|
|
1202
|
+
model: process.env.HAPPIER_STACK_CLAUDE_MODEL,
|
|
1203
|
+
streamLabel: stream && !jsonMode ? `monorepo:claude:${index}/${of}` : undefined,
|
|
1204
|
+
teeFile: logFile,
|
|
1205
|
+
teeLabel: `monorepo:claude:${index}/${of}`,
|
|
1206
|
+
});
|
|
1207
|
+
}
|
|
1208
|
+
);
|
|
1209
|
+
return {
|
|
1210
|
+
index,
|
|
1211
|
+
of,
|
|
1212
|
+
slice: slice.label,
|
|
1213
|
+
fileCount: slice.paths.length,
|
|
1214
|
+
logFile,
|
|
1215
|
+
ok: Boolean(rr.ok),
|
|
1216
|
+
exitCode: rr.exitCode,
|
|
1217
|
+
signal: rr.signal,
|
|
1218
|
+
durationMs: rr.durationMs,
|
|
1219
|
+
stdout: rr.stdout ?? '',
|
|
1220
|
+
stderr: rr.stderr ?? '',
|
|
1221
|
+
};
|
|
1222
|
+
},
|
|
1223
|
+
shouldAbortEarly: (r) => detectClaudeAuthError({ stdout: r?.stdout, stderr: r?.stderr }),
|
|
1224
|
+
});
|
|
1225
|
+
|
|
1226
|
+
if (sliceResults.length === 1 && detectClaudeAuthError(sliceResults[0])) {
|
|
1227
|
+
const msg = `[review] claude auth/rate-limit issue detected; ensure Claude CLI auth/limits are healthy, then re-run this review.`;
|
|
1228
|
+
// eslint-disable-next-line no-console
|
|
1229
|
+
console.error(msg);
|
|
1230
|
+
}
|
|
1231
|
+
|
|
1232
|
+
const okAll = sliceResults.every((r) => r.ok);
|
|
1233
|
+
return {
|
|
1234
|
+
reviewer,
|
|
1235
|
+
ok: okAll,
|
|
1236
|
+
exitCode: okAll ? 0 : 1,
|
|
1237
|
+
signal: null,
|
|
1238
|
+
durationMs: sliceResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
1239
|
+
stdout: '',
|
|
1240
|
+
stderr: '',
|
|
1241
|
+
note: `uncommitted slices: ${sliceResults.length} slices (maxFiles=${maxFiles})`,
|
|
1242
|
+
slices: sliceResults,
|
|
1243
|
+
};
|
|
1244
|
+
}
|
|
1245
|
+
|
|
1246
|
+
if (monorepo && canChunk && effectiveChunking === 'head-slice' && (globalChunks ?? autoChunks)) {
|
|
1247
|
+
const headCommit = (await runCapture('git', ['rev-parse', 'HEAD'], { cwd: repoDir, env: process.env })).trim();
|
|
1248
|
+
const baseCommit = (await runCapture('git', ['rev-parse', base.baseRef], { cwd: repoDir, env: process.env })).trim();
|
|
1249
|
+
const ops = await getChangedOps({ cwd: repoDir, baseRef: baseCommit, headRef: headCommit, env: process.env });
|
|
1250
|
+
const slices = planPathSlices({ changedPaths: Array.from(ops.all), maxFiles });
|
|
1251
|
+
|
|
1252
|
+
const sliceItems = slices.map((slice, i) => ({ slice, index: i + 1, of: slices.length }));
|
|
1253
|
+
const sliceResults = await runSlicedJobs({
|
|
1254
|
+
items: sliceItems,
|
|
1255
|
+
limit: sliceConcurrency,
|
|
1256
|
+
run: async ({ slice, index, of }) => {
|
|
1257
|
+
const logFile = join(runDir, 'raw', `claude-slice-${index}-of-${of}-${sanitizeLabel(slice.label)}.log`);
|
|
1258
|
+
const rr = await withDetachedWorktree(
|
|
1259
|
+
{ repoDir, headCommit: baseCommit, label: `claude-${index}-of-${of}`, env: process.env },
|
|
1260
|
+
async (worktreeDir) => {
|
|
1261
|
+
const { baseSliceCommit } = await createHeadSliceCommits({
|
|
1262
|
+
cwd: worktreeDir,
|
|
1263
|
+
env: process.env,
|
|
1264
|
+
baseRef: baseCommit,
|
|
1265
|
+
headCommit,
|
|
1266
|
+
ops,
|
|
1267
|
+
slicePaths: slice.paths,
|
|
1268
|
+
label: slice.label.replace(/\/+$/g, ''),
|
|
1269
|
+
});
|
|
1270
|
+
const prompt = buildCodexMonorepoSlicePrompt({
|
|
1271
|
+
sliceLabel: slice.label,
|
|
1272
|
+
baseCommit: baseSliceCommit,
|
|
1273
|
+
baseRef: base.baseRef,
|
|
1274
|
+
deep: depth === 'deep',
|
|
1275
|
+
customPrompt: customReviewPrompt,
|
|
1276
|
+
});
|
|
1277
|
+
return await runClaudeReview({
|
|
1278
|
+
repoDir: worktreeDir,
|
|
1279
|
+
env: process.env,
|
|
1280
|
+
prompt,
|
|
1281
|
+
model: process.env.HAPPIER_STACK_CLAUDE_MODEL,
|
|
1282
|
+
streamLabel: stream && !jsonMode ? `monorepo:claude:${index}/${of}` : undefined,
|
|
1283
|
+
teeFile: logFile,
|
|
1284
|
+
teeLabel: `monorepo:claude:${index}/${of}`,
|
|
1285
|
+
});
|
|
1286
|
+
}
|
|
1287
|
+
);
|
|
1288
|
+
return {
|
|
1289
|
+
index,
|
|
1290
|
+
of,
|
|
1291
|
+
slice: slice.label,
|
|
1292
|
+
fileCount: slice.paths.length,
|
|
1293
|
+
logFile,
|
|
1294
|
+
ok: Boolean(rr.ok),
|
|
1295
|
+
exitCode: rr.exitCode,
|
|
1296
|
+
signal: rr.signal,
|
|
1297
|
+
durationMs: rr.durationMs,
|
|
1298
|
+
stdout: rr.stdout ?? '',
|
|
1299
|
+
stderr: rr.stderr ?? '',
|
|
1300
|
+
};
|
|
1301
|
+
},
|
|
1302
|
+
shouldAbortEarly: (r) => detectClaudeAuthError({ stdout: r?.stdout, stderr: r?.stderr }),
|
|
1303
|
+
});
|
|
1304
|
+
|
|
1305
|
+
if (sliceResults.length === 1 && detectClaudeAuthError(sliceResults[0])) {
|
|
1306
|
+
const msg = `[review] claude auth/rate-limit issue detected; ensure Claude CLI auth/limits are healthy, then re-run this review.`;
|
|
1307
|
+
// eslint-disable-next-line no-console
|
|
1308
|
+
console.error(msg);
|
|
1309
|
+
}
|
|
1310
|
+
|
|
1311
|
+
const okAll = sliceResults.every((r) => r.ok);
|
|
1312
|
+
return {
|
|
1313
|
+
reviewer,
|
|
1314
|
+
ok: okAll,
|
|
1315
|
+
exitCode: okAll ? 0 : 1,
|
|
1316
|
+
signal: null,
|
|
1317
|
+
durationMs: sliceResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
1318
|
+
stdout: '',
|
|
1319
|
+
stderr: '',
|
|
1320
|
+
note: `monorepo head-slice: ${sliceResults.length} slices (maxFiles=${maxFiles})`,
|
|
1321
|
+
slices: sliceResults,
|
|
1322
|
+
};
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
const prompt = monorepo
|
|
1326
|
+
? depth === 'deep'
|
|
1327
|
+
? buildCodexMonorepoDeepPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1328
|
+
: buildCodexMonorepoNormalPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1329
|
+
: depth === 'deep'
|
|
1330
|
+
? buildCodexDeepPrompt({ component, baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1331
|
+
: buildCodexNormalPrompt({ component, baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt });
|
|
1332
|
+
const logFile = join(runDir, 'raw', `claude-${sanitizeLabel(component)}.log`);
|
|
1333
|
+
const res = await runClaudeReview({
|
|
1334
|
+
repoDir,
|
|
1335
|
+
env: process.env,
|
|
1336
|
+
prompt,
|
|
1337
|
+
model: process.env.HAPPIER_STACK_CLAUDE_MODEL,
|
|
1338
|
+
streamLabel: stream && !jsonMode ? `${component}:claude` : undefined,
|
|
1339
|
+
teeFile: logFile,
|
|
1340
|
+
teeLabel: `${component}:claude`,
|
|
1341
|
+
});
|
|
1342
|
+
return {
|
|
1343
|
+
reviewer,
|
|
1344
|
+
ok: Boolean(res.ok),
|
|
1345
|
+
exitCode: res.exitCode,
|
|
1346
|
+
signal: res.signal,
|
|
1347
|
+
durationMs: res.durationMs,
|
|
1348
|
+
stdout: res.stdout ?? '',
|
|
1349
|
+
stderr: res.stderr ?? '',
|
|
1350
|
+
logFile,
|
|
1351
|
+
};
|
|
1352
|
+
}
|
|
1353
|
+
if (reviewer === 'augment') {
|
|
1354
|
+
// Augment CLI (`auggie`) always requires an instruction in `--print` mode.
|
|
1355
|
+
// Unlike Codex, it has no `--uncommitted` target we can rely on, so we always provide a prompt.
|
|
1356
|
+
const jsonMode = false;
|
|
1357
|
+
if (reviewMode === 'audit') {
|
|
1358
|
+
const prompt = monorepo
|
|
1359
|
+
? buildCodexMonorepoAuditPrompt({
|
|
1360
|
+
deep: depth === 'deep',
|
|
1361
|
+
scopePaths: reviewPaths,
|
|
1362
|
+
customPrompt: customReviewPrompt,
|
|
1363
|
+
})
|
|
1364
|
+
: buildCodexAuditPrompt({
|
|
1365
|
+
component,
|
|
1366
|
+
deep: depth === 'deep',
|
|
1367
|
+
scopePaths: reviewPaths,
|
|
1368
|
+
customPrompt: customReviewPrompt,
|
|
1369
|
+
});
|
|
1370
|
+
const logFile = join(runDir, 'raw', `augment-${sanitizeLabel(component)}.log`);
|
|
1371
|
+
const res = await runAugmentReview({
|
|
1372
|
+
repoDir,
|
|
1373
|
+
env: process.env,
|
|
1374
|
+
prompt,
|
|
1375
|
+
cacheDir: process.env.HAPPIER_STACK_AUGMENT_CACHE_DIR,
|
|
1376
|
+
model: process.env.HAPPIER_STACK_AUGMENT_MODEL,
|
|
1377
|
+
maxTurns: process.env.HAPPIER_STACK_AUGMENT_MAX_TURNS,
|
|
1378
|
+
rulesFiles: coderabbitConfigFiles,
|
|
1379
|
+
streamLabel: stream ? `${component}:augment` : undefined,
|
|
1380
|
+
teeFile: logFile,
|
|
1381
|
+
teeLabel: `${component}:augment`,
|
|
1382
|
+
});
|
|
1383
|
+
return {
|
|
1384
|
+
reviewer,
|
|
1385
|
+
ok: Boolean(res.ok),
|
|
1386
|
+
exitCode: res.exitCode,
|
|
1387
|
+
signal: res.signal,
|
|
1388
|
+
durationMs: res.durationMs,
|
|
1389
|
+
stdout: res.stdout ?? '',
|
|
1390
|
+
stderr: res.stderr ?? '',
|
|
1391
|
+
logFile,
|
|
1392
|
+
};
|
|
1393
|
+
}
|
|
1394
|
+
const usePromptMode = true;
|
|
1395
|
+
const uncommittedOps = changeType === 'uncommitted' ? await getUncommittedOps({ cwd: repoDir, env: process.env }) : null;
|
|
1396
|
+
const fileCount =
|
|
1397
|
+
changeType === 'uncommitted'
|
|
1398
|
+
? (uncommittedOps?.all?.size ?? 0)
|
|
1399
|
+
: await countChangedFiles({ cwd: repoDir, env: process.env, base: base.baseRef });
|
|
1400
|
+
const autoChunks = fileCount > maxFiles;
|
|
1401
|
+
const cacheDir = (process.env.HAPPIER_STACK_AUGMENT_CACHE_DIR ?? '').toString().trim();
|
|
1402
|
+
const model = (process.env.HAPPIER_STACK_AUGMENT_MODEL ?? '').toString().trim();
|
|
1403
|
+
const maxTurnsRaw = (process.env.HAPPIER_STACK_AUGMENT_MAX_TURNS ?? '').toString().trim();
|
|
1404
|
+
const maxTurns = maxTurnsRaw ? Number(maxTurnsRaw) : null;
|
|
1405
|
+
|
|
1406
|
+
if (monorepo && effectiveChunking === 'head-slice' && usePromptMode && (wantChunksAugment ?? autoChunks)) {
|
|
1407
|
+
const headCommit = (await runCapture('git', ['rev-parse', 'HEAD'], { cwd: repoDir, env: process.env })).trim();
|
|
1408
|
+
const baseCommit = (await runCapture('git', ['rev-parse', base.baseRef], { cwd: repoDir, env: process.env })).trim();
|
|
1409
|
+
const ops = await getChangedOps({ cwd: repoDir, baseRef: baseCommit, headRef: headCommit, env: process.env });
|
|
1410
|
+
const slices = planPathSlices({ changedPaths: Array.from(ops.all), maxFiles });
|
|
1411
|
+
|
|
1412
|
+
const sliceItems = slices.map((slice, i) => ({ slice, index: i + 1, of: slices.length }));
|
|
1413
|
+
const sliceResults = await runSlicedJobs({
|
|
1414
|
+
items: sliceItems,
|
|
1415
|
+
limit: sliceConcurrency,
|
|
1416
|
+
run: async ({ slice, index, of }) => {
|
|
1417
|
+
const logFile = join(runDir, 'raw', `augment-slice-${index}-of-${of}-${sanitizeLabel(slice.label)}.log`);
|
|
1418
|
+
const rr = await withDetachedWorktree(
|
|
1419
|
+
{ repoDir, headCommit: baseCommit, label: `augment-${index}-of-${of}`, env: process.env },
|
|
1420
|
+
async (worktreeDir) => {
|
|
1421
|
+
const { baseSliceCommit } = await createHeadSliceCommits({
|
|
1422
|
+
cwd: worktreeDir,
|
|
1423
|
+
env: process.env,
|
|
1424
|
+
baseRef: baseCommit,
|
|
1425
|
+
headCommit,
|
|
1426
|
+
ops,
|
|
1427
|
+
slicePaths: slice.paths,
|
|
1428
|
+
label: slice.label.replace(/\/+$/g, ''),
|
|
1429
|
+
});
|
|
1430
|
+
const prompt = buildCodexMonorepoSlicePrompt({
|
|
1431
|
+
sliceLabel: slice.label,
|
|
1432
|
+
baseCommit: baseSliceCommit,
|
|
1433
|
+
baseRef: base.baseRef,
|
|
1434
|
+
deep: depth === 'deep',
|
|
1435
|
+
customPrompt: customReviewPrompt,
|
|
1436
|
+
});
|
|
1437
|
+
return await runAugmentReview({
|
|
1438
|
+
repoDir: worktreeDir,
|
|
1439
|
+
prompt,
|
|
1440
|
+
env: process.env,
|
|
1441
|
+
cacheDir,
|
|
1442
|
+
model,
|
|
1443
|
+
maxTurns: Number.isFinite(maxTurns) ? String(maxTurns) : undefined,
|
|
1444
|
+
streamLabel: stream ? `monorepo:augment:${index}/${of}` : undefined,
|
|
1445
|
+
teeFile: logFile,
|
|
1446
|
+
teeLabel: `monorepo:augment:${index}/${of}`,
|
|
1447
|
+
});
|
|
1448
|
+
}
|
|
1449
|
+
);
|
|
1450
|
+
return {
|
|
1451
|
+
index,
|
|
1452
|
+
of,
|
|
1453
|
+
slice: slice.label,
|
|
1454
|
+
fileCount: slice.paths.length,
|
|
1455
|
+
logFile,
|
|
1456
|
+
ok: Boolean(rr.ok),
|
|
1457
|
+
exitCode: rr.exitCode,
|
|
1458
|
+
signal: rr.signal,
|
|
1459
|
+
durationMs: rr.durationMs,
|
|
1460
|
+
stdout: rr.stdout ?? '',
|
|
1461
|
+
stderr: rr.stderr ?? '',
|
|
1462
|
+
};
|
|
1463
|
+
},
|
|
1464
|
+
shouldAbortEarly: (r) => detectAugmentAuthError({ stdout: r?.stdout, stderr: r?.stderr }),
|
|
1465
|
+
});
|
|
1466
|
+
|
|
1467
|
+
if (sliceResults.length === 1 && detectAugmentAuthError(sliceResults[0])) {
|
|
1468
|
+
const msg = `[review] augment auth required: run 'auggie login' in an interactive session, then re-run this review.`;
|
|
1469
|
+
// eslint-disable-next-line no-console
|
|
1470
|
+
console.error(msg);
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1473
|
+
const okAll = sliceResults.every((r) => r.ok);
|
|
1474
|
+
return {
|
|
1475
|
+
reviewer,
|
|
1476
|
+
ok: okAll,
|
|
1477
|
+
exitCode: okAll ? 0 : 1,
|
|
1478
|
+
signal: null,
|
|
1479
|
+
durationMs: sliceResults.reduce((acc, r) => acc + (r.durationMs ?? 0), 0),
|
|
1480
|
+
stdout: '',
|
|
1481
|
+
stderr: '',
|
|
1482
|
+
note: `monorepo head-slice: ${sliceResults.length} slices (maxFiles=${maxFiles})`,
|
|
1483
|
+
slices: sliceResults,
|
|
1484
|
+
};
|
|
1485
|
+
}
|
|
1486
|
+
|
|
1487
|
+
const prompt = monorepo
|
|
1488
|
+
? depth === 'deep'
|
|
1489
|
+
? buildCodexMonorepoDeepPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1490
|
+
: buildCodexMonorepoNormalPrompt({ baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1491
|
+
: depth === 'deep'
|
|
1492
|
+
? buildCodexDeepPrompt({ component, baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt })
|
|
1493
|
+
: buildCodexNormalPrompt({ component, baseRef: base.baseRef, changeType, customPrompt: customReviewPrompt });
|
|
1494
|
+
const logFile = join(runDir, 'raw', `augment-${sanitizeLabel(component)}.log`);
|
|
1495
|
+
const res = await runAugmentReview({
|
|
1496
|
+
repoDir,
|
|
1497
|
+
prompt,
|
|
1498
|
+
env: process.env,
|
|
1499
|
+
cacheDir,
|
|
1500
|
+
model,
|
|
1501
|
+
maxTurns: Number.isFinite(maxTurns) ? String(maxTurns) : undefined,
|
|
1502
|
+
streamLabel: stream ? `${component}:augment` : undefined,
|
|
1503
|
+
teeFile: logFile,
|
|
1504
|
+
teeLabel: `${component}:augment`,
|
|
1505
|
+
});
|
|
1506
|
+
return {
|
|
1507
|
+
reviewer,
|
|
1508
|
+
ok: Boolean(res.ok),
|
|
1509
|
+
exitCode: res.exitCode,
|
|
1510
|
+
signal: res.signal,
|
|
1511
|
+
durationMs: res.durationMs,
|
|
1512
|
+
stdout: res.stdout ?? '',
|
|
1513
|
+
stderr: res.stderr ?? '',
|
|
1514
|
+
logFile,
|
|
1515
|
+
};
|
|
1516
|
+
}
|
|
1517
|
+
return { reviewer, ok: false, exitCode: null, signal: null, durationMs: 0, stdout: '', stderr: 'unknown reviewer\n' };
|
|
1518
|
+
},
|
|
1519
|
+
onError: (reviewer, error) => ({
|
|
1520
|
+
reviewer,
|
|
1521
|
+
ok: false,
|
|
1522
|
+
exitCode: null,
|
|
1523
|
+
signal: null,
|
|
1524
|
+
durationMs: 0,
|
|
1525
|
+
stdout: '',
|
|
1526
|
+
stderr: `[review] internal error while running reviewer '${reviewer}':\n${formatInternalError(error)}\n`,
|
|
1527
|
+
}),
|
|
1528
|
+
});
|
|
1529
|
+
|
|
1530
|
+
return { component, repoDir, base, results: perReviewer };
|
|
1531
|
+
} catch (error) {
|
|
1532
|
+
const stderr = `[review] internal error while preparing/running job '${component}':\n${formatInternalError(error)}\n`;
|
|
1533
|
+
const results = reviewers.map((reviewer) => ({
|
|
1534
|
+
reviewer,
|
|
1535
|
+
ok: false,
|
|
1536
|
+
exitCode: null,
|
|
1537
|
+
signal: null,
|
|
1538
|
+
durationMs: 0,
|
|
1539
|
+
stdout: '',
|
|
1540
|
+
stderr,
|
|
1541
|
+
}));
|
|
1542
|
+
return { component, repoDir, base, results };
|
|
1543
|
+
}
|
|
1544
|
+
},
|
|
1545
|
+
});
|
|
1546
|
+
|
|
1547
|
+
// Persist a structured triage checklist for the operator (human/LLM) to work through.
|
|
1548
|
+
try {
|
|
1549
|
+
const meta = {
|
|
1550
|
+
runLabel,
|
|
1551
|
+
startedAt: ts,
|
|
1552
|
+
stackName: stackName || null,
|
|
1553
|
+
reviewers,
|
|
1554
|
+
jobs: jobs.map((j) => ({ component: j.component, repoDir: j.repoDir, monorepo: j.monorepo })),
|
|
1555
|
+
depth,
|
|
1556
|
+
chunkMaxFiles: Number.isFinite(chunkMaxFiles) ? chunkMaxFiles : null,
|
|
1557
|
+
coderabbitMaxFiles,
|
|
1558
|
+
chunkingMode,
|
|
1559
|
+
argv,
|
|
1560
|
+
};
|
|
1561
|
+
await writeFile(join(runDir, 'meta.json'), JSON.stringify(meta, null, 2), 'utf-8');
|
|
1562
|
+
|
|
1563
|
+
const allFindings = [];
|
|
1564
|
+
let cr = 0;
|
|
1565
|
+
let cx = 0;
|
|
1566
|
+
let au = 0;
|
|
1567
|
+
let cl = 0;
|
|
1568
|
+
|
|
1569
|
+
for (const job of jobResults) {
|
|
1570
|
+
for (const rr of job.results) {
|
|
1571
|
+
if (rr.reviewer === 'coderabbit') {
|
|
1572
|
+
const sliceLike = rr.slices ?? rr.chunks ?? null;
|
|
1573
|
+
if (Array.isArray(sliceLike)) {
|
|
1574
|
+
for (const s of sliceLike) {
|
|
1575
|
+
const parsed = parseCodeRabbitPlainOutput(s.stdout ?? '');
|
|
1576
|
+
for (const f of parsed) {
|
|
1577
|
+
cr += 1;
|
|
1578
|
+
allFindings.push({
|
|
1579
|
+
...f,
|
|
1580
|
+
id: `CR-${String(cr).padStart(3, '0')}`,
|
|
1581
|
+
job: job.component,
|
|
1582
|
+
slice: s.slice ?? `${s.index}/${s.of}`,
|
|
1583
|
+
sourceLog: s.logFile ?? null,
|
|
1584
|
+
});
|
|
1585
|
+
}
|
|
1586
|
+
}
|
|
1587
|
+
} else {
|
|
1588
|
+
const parsed = parseCodeRabbitPlainOutput(rr.stdout ?? '');
|
|
1589
|
+
for (const f of parsed) {
|
|
1590
|
+
cr += 1;
|
|
1591
|
+
allFindings.push({
|
|
1592
|
+
...f,
|
|
1593
|
+
id: `CR-${String(cr).padStart(3, '0')}`,
|
|
1594
|
+
job: job.component,
|
|
1595
|
+
slice: null,
|
|
1596
|
+
sourceLog: rr.logFile ?? null,
|
|
1597
|
+
});
|
|
1598
|
+
}
|
|
1599
|
+
}
|
|
1600
|
+
}
|
|
1601
|
+
|
|
1602
|
+
if (rr.reviewer === 'codex') {
|
|
1603
|
+
const sliceLike = rr.slices ?? rr.chunks ?? null;
|
|
1604
|
+
const consumeText = (reviewText, slice, sourceLog) => {
|
|
1605
|
+
const parsed = parseCodexReviewText(reviewText);
|
|
1606
|
+
for (const f of parsed) {
|
|
1607
|
+
cx += 1;
|
|
1608
|
+
allFindings.push({
|
|
1609
|
+
...f,
|
|
1610
|
+
id: `CX-${String(cx).padStart(3, '0')}`,
|
|
1611
|
+
job: job.component,
|
|
1612
|
+
slice,
|
|
1613
|
+
sourceLog: sourceLog ?? null,
|
|
1614
|
+
});
|
|
1615
|
+
}
|
|
1616
|
+
};
|
|
1617
|
+
|
|
1618
|
+
if (Array.isArray(sliceLike)) {
|
|
1619
|
+
for (const s of sliceLike) {
|
|
1620
|
+
const reviewText = s.review_output ?? extractCodexReviewFromJsonl(s.stdout ?? '') ?? (s.stdout ?? '');
|
|
1621
|
+
consumeText(reviewText, s.slice ?? `${s.index}/${s.of}`, s.logFile ?? null);
|
|
1622
|
+
}
|
|
1623
|
+
} else {
|
|
1624
|
+
const reviewText = rr.review_output ?? extractCodexReviewFromJsonl(rr.stdout ?? '') ?? (rr.stdout ?? '');
|
|
1625
|
+
consumeText(reviewText, null, rr.logFile ?? null);
|
|
1626
|
+
}
|
|
1627
|
+
}
|
|
1628
|
+
|
|
1629
|
+
if (rr.reviewer === 'augment') {
|
|
1630
|
+
const sliceLike = rr.slices ?? rr.chunks ?? null;
|
|
1631
|
+
const consumeText = (reviewText, slice, sourceLog) => {
|
|
1632
|
+
const parsed = parseCodexReviewText(reviewText).map((f) => ({ ...f, reviewer: 'augment' }));
|
|
1633
|
+
for (const f of parsed) {
|
|
1634
|
+
au += 1;
|
|
1635
|
+
allFindings.push({
|
|
1636
|
+
...f,
|
|
1637
|
+
id: `AU-${String(au).padStart(3, '0')}`,
|
|
1638
|
+
job: job.component,
|
|
1639
|
+
slice,
|
|
1640
|
+
sourceLog: sourceLog ?? null,
|
|
1641
|
+
});
|
|
1642
|
+
}
|
|
1643
|
+
};
|
|
1644
|
+
|
|
1645
|
+
if (Array.isArray(sliceLike)) {
|
|
1646
|
+
for (const s of sliceLike) {
|
|
1647
|
+
consumeText(s.stdout ?? '', s.slice ?? `${s.index}/${s.of}`, s.logFile ?? null);
|
|
1648
|
+
}
|
|
1649
|
+
} else {
|
|
1650
|
+
consumeText(rr.stdout ?? '', null, rr.logFile ?? null);
|
|
1651
|
+
}
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
if (rr.reviewer === 'claude') {
|
|
1655
|
+
const sliceLike = rr.slices ?? rr.chunks ?? null;
|
|
1656
|
+
const consumeText = (reviewText, slice, sourceLog) => {
|
|
1657
|
+
const parsed = parseCodexReviewText(reviewText).map((f) => ({ ...f, reviewer: 'claude' }));
|
|
1658
|
+
for (const f of parsed) {
|
|
1659
|
+
cl += 1;
|
|
1660
|
+
allFindings.push({
|
|
1661
|
+
...f,
|
|
1662
|
+
id: `CL-${String(cl).padStart(3, '0')}`,
|
|
1663
|
+
job: job.component,
|
|
1664
|
+
slice,
|
|
1665
|
+
sourceLog: sourceLog ?? null,
|
|
1666
|
+
});
|
|
1667
|
+
}
|
|
1668
|
+
};
|
|
1669
|
+
|
|
1670
|
+
if (Array.isArray(sliceLike)) {
|
|
1671
|
+
for (const s of sliceLike) {
|
|
1672
|
+
consumeText(s.stdout ?? '', s.slice ?? `${s.index}/${s.of}`, s.logFile ?? null);
|
|
1673
|
+
}
|
|
1674
|
+
} else {
|
|
1675
|
+
consumeText(rr.stdout ?? '', null, rr.logFile ?? null);
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
}
|
|
1680
|
+
|
|
1681
|
+
await writeFile(join(runDir, 'findings.json'), JSON.stringify(allFindings, null, 2), 'utf-8');
|
|
1682
|
+
const triage = formatTriageMarkdown({ runLabel, baseRef: jobResults?.[0]?.base?.baseRef ?? '', findings: allFindings });
|
|
1683
|
+
await writeFile(join(runDir, 'triage.md'), triage, 'utf-8');
|
|
1684
|
+
|
|
1685
|
+
if (stream) {
|
|
1686
|
+
// eslint-disable-next-line no-console
|
|
1687
|
+
console.log(`[review] trust/triage checklist (READ THIS NEXT): ${join(runDir, 'triage.md')}`);
|
|
1688
|
+
// eslint-disable-next-line no-console
|
|
1689
|
+
console.log(`[review] findings (raw, parsed): ${join(runDir, 'findings.json')}`);
|
|
1690
|
+
// eslint-disable-next-line no-console
|
|
1691
|
+
console.log(`[review] raw outputs: ${join(runDir, 'raw')}`);
|
|
1692
|
+
// eslint-disable-next-line no-console
|
|
1693
|
+
console.log(
|
|
1694
|
+
[
|
|
1695
|
+
'[review] next steps (mandatory):',
|
|
1696
|
+
`- STOP: open ${join(runDir, 'triage.md')} now and load it into your context before doing anything else.`,
|
|
1697
|
+
`- Then load ${join(runDir, 'findings.json')} (full parsed finding details + source logs).`,
|
|
1698
|
+
`- Treat reviewer output as suggestions: verify against codebase invariants + best practices (use web search when needed) before applying.`,
|
|
1699
|
+
`- For each finding: verify in the validation worktree, decide apply/adjust/defer, and record rationale + commit refs in triage.md.`,
|
|
1700
|
+
`- For tests: validate behavior/logic; avoid brittle "wording/policing" assertions.`,
|
|
1701
|
+
`- Do not start a new review run until the checklist has no remaining TBD decisions.`,
|
|
1702
|
+
].join('\n')
|
|
1703
|
+
);
|
|
1704
|
+
}
|
|
1705
|
+
} catch (e) {
|
|
1706
|
+
if (stream) {
|
|
1707
|
+
// eslint-disable-next-line no-console
|
|
1708
|
+
console.warn('[review] warning: failed to write triage artifacts:', e);
|
|
1709
|
+
}
|
|
1710
|
+
}
|
|
1711
|
+
|
|
1712
|
+
const ok = jobResults.every((r) => r.results.every((x) => x.ok));
|
|
1713
|
+
if (json) {
|
|
1714
|
+
printResult({ json, data: { ok, reviewers, components, results: jobResults } });
|
|
1715
|
+
if (!ok) process.exit(1);
|
|
1716
|
+
return;
|
|
1717
|
+
}
|
|
1718
|
+
|
|
1719
|
+
const lines = [];
|
|
1720
|
+
lines.push('[review] results:');
|
|
1721
|
+
for (const r of jobResults) {
|
|
1722
|
+
lines.push('============================================================================');
|
|
1723
|
+
lines.push(`component: ${r.component}`);
|
|
1724
|
+
lines.push(`dir: ${r.repoDir}`);
|
|
1725
|
+
lines.push(`baseRef: ${r.base.baseRef}`);
|
|
1726
|
+
for (const rr of r.results) {
|
|
1727
|
+
lines.push('');
|
|
1728
|
+
const status = rr.ok ? '✅ ok' : '❌ failed';
|
|
1729
|
+
lines.push(`[${rr.reviewer}] ${status} (exit=${rr.exitCode ?? 'null'} durMs=${rr.durationMs ?? '?'})`);
|
|
1730
|
+
if (rr.note) lines.push(`note: ${rr.note}`);
|
|
1731
|
+
if (!rr.ok) {
|
|
1732
|
+
if (rr.stderr) {
|
|
1733
|
+
lines.push('--- stderr (tail) ---');
|
|
1734
|
+
lines.push(tailLines(rr.stderr, 120));
|
|
1735
|
+
}
|
|
1736
|
+
if (rr.stdout) {
|
|
1737
|
+
lines.push('--- stdout (tail) ---');
|
|
1738
|
+
lines.push(tailLines(rr.stdout, 120));
|
|
1739
|
+
}
|
|
1740
|
+
}
|
|
1741
|
+
}
|
|
1742
|
+
lines.push('');
|
|
1743
|
+
}
|
|
1744
|
+
lines.push(ok ? '[review] ok' : '[review] failed');
|
|
1745
|
+
printResult({ json: false, text: lines.join('\n') });
|
|
1746
|
+
if (!ok) process.exit(1);
|
|
1747
|
+
}
|
|
1748
|
+
|
|
1749
|
+
main().catch((err) => {
|
|
1750
|
+
console.error('[review] failed:', err);
|
|
1751
|
+
process.exit(1);
|
|
1752
|
+
});
|