@jonit-dev/night-watch-cli 1.7.80 → 1.7.81
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/analytics.d.ts +14 -0
- package/dist/commands/analytics.d.ts.map +1 -0
- package/dist/commands/analytics.js +74 -0
- package/dist/commands/analytics.js.map +1 -0
- package/dist/scripts/night-watch-cron.sh +80 -0
- package/dist/scripts/night-watch-helpers.sh +13 -0
- package/dist/web/assets/index-BjhCFjZi.js +381 -0
- package/package.json +1 -1
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Analytics command - runs the Amplitude analytics job
|
|
3
|
+
*/
|
|
4
|
+
import { Command } from 'commander';
|
|
5
|
+
export interface IAnalyticsOptions {
|
|
6
|
+
dryRun: boolean;
|
|
7
|
+
timeout?: string;
|
|
8
|
+
provider?: string;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Register the analytics command with the program
|
|
12
|
+
*/
|
|
13
|
+
export declare function analyticsCommand(program: Command): void;
|
|
14
|
+
//# sourceMappingURL=analytics.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"analytics.d.ts","sourceRoot":"","sources":["../../src/commands/analytics.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAcpC,MAAM,WAAW,iBAAiB;IAChC,MAAM,EAAE,OAAO,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,OAAO,GAAG,IAAI,CAyEvD"}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Analytics command - runs the Amplitude analytics job
|
|
3
|
+
*/
|
|
4
|
+
import { createSpinner, createTable, header, info, loadConfig, resolveJobProvider, runAnalytics, } from '@night-watch/core';
|
|
5
|
+
import { maybeApplyCronSchedulingDelay } from './shared/env-builder.js';
|
|
6
|
+
/**
|
|
7
|
+
* Register the analytics command with the program
|
|
8
|
+
*/
|
|
9
|
+
export function analyticsCommand(program) {
|
|
10
|
+
program
|
|
11
|
+
.command('analytics')
|
|
12
|
+
.description('Run Amplitude analytics job now')
|
|
13
|
+
.option('--dry-run', 'Show what would be executed without running')
|
|
14
|
+
.option('--timeout <seconds>', 'Override max runtime in seconds')
|
|
15
|
+
.option('--provider <string>', 'AI provider to use (claude or codex)')
|
|
16
|
+
.action(async (options) => {
|
|
17
|
+
const projectDir = process.cwd();
|
|
18
|
+
let config = loadConfig(projectDir);
|
|
19
|
+
if (options.timeout) {
|
|
20
|
+
const timeout = parseInt(options.timeout, 10);
|
|
21
|
+
if (!isNaN(timeout)) {
|
|
22
|
+
config = { ...config, analytics: { ...config.analytics, maxRuntime: timeout } };
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
if (options.provider) {
|
|
26
|
+
config = {
|
|
27
|
+
...config,
|
|
28
|
+
_cliProviderOverride: options.provider,
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
if (!config.analytics.enabled && !options.dryRun) {
|
|
32
|
+
info('Analytics is disabled in config; skipping run.');
|
|
33
|
+
process.exit(0);
|
|
34
|
+
}
|
|
35
|
+
// Validate Amplitude keys
|
|
36
|
+
const apiKey = config.providerEnv?.AMPLITUDE_API_KEY;
|
|
37
|
+
const secretKey = config.providerEnv?.AMPLITUDE_SECRET_KEY;
|
|
38
|
+
if (!apiKey || !secretKey) {
|
|
39
|
+
info('AMPLITUDE_API_KEY and AMPLITUDE_SECRET_KEY must be set in providerEnv to run analytics.');
|
|
40
|
+
process.exit(1);
|
|
41
|
+
}
|
|
42
|
+
if (options.dryRun) {
|
|
43
|
+
header('Dry Run: Analytics Job');
|
|
44
|
+
const analyticsProvider = resolveJobProvider(config, 'analytics');
|
|
45
|
+
header('Configuration');
|
|
46
|
+
const configTable = createTable({ head: ['Setting', 'Value'] });
|
|
47
|
+
configTable.push(['Provider', analyticsProvider]);
|
|
48
|
+
configTable.push(['Max Runtime', `${config.analytics.maxRuntime}s`]);
|
|
49
|
+
configTable.push(['Lookback Days', String(config.analytics.lookbackDays)]);
|
|
50
|
+
configTable.push(['Target Column', config.analytics.targetColumn]);
|
|
51
|
+
configTable.push(['Amplitude API Key', apiKey ? '***' + apiKey.slice(-4) : 'not set']);
|
|
52
|
+
console.log(configTable.toString());
|
|
53
|
+
console.log();
|
|
54
|
+
process.exit(0);
|
|
55
|
+
}
|
|
56
|
+
const spinner = createSpinner('Running analytics job...');
|
|
57
|
+
spinner.start();
|
|
58
|
+
try {
|
|
59
|
+
await maybeApplyCronSchedulingDelay(config, 'analytics', projectDir);
|
|
60
|
+
const result = await runAnalytics(config, projectDir);
|
|
61
|
+
if (result.issuesCreated > 0) {
|
|
62
|
+
spinner.succeed(`Analytics complete — ${result.summary}`);
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
spinner.succeed('Analytics complete — no actionable insights found');
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
catch (err) {
|
|
69
|
+
spinner.fail(`Analytics failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
70
|
+
process.exit(1);
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
//# sourceMappingURL=analytics.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"analytics.js","sourceRoot":"","sources":["../../src/commands/analytics.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAEL,aAAa,EACb,WAAW,EAEX,MAAM,EACN,IAAI,EACJ,UAAU,EACV,kBAAkB,EAClB,YAAY,GACb,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,6BAA6B,EAAE,MAAM,yBAAyB,CAAC;AAQxE;;GAEG;AACH,MAAM,UAAU,gBAAgB,CAAC,OAAgB;IAC/C,OAAO;SACJ,OAAO,CAAC,WAAW,CAAC;SACpB,WAAW,CAAC,iCAAiC,CAAC;SAC9C,MAAM,CAAC,WAAW,EAAE,6CAA6C,CAAC;SAClE,MAAM,CAAC,qBAAqB,EAAE,iCAAiC,CAAC;SAChE,MAAM,CAAC,qBAAqB,EAAE,sCAAsC,CAAC;SACrE,MAAM,CAAC,KAAK,EAAE,OAA0B,EAAE,EAAE;QAC3C,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;QACjC,IAAI,MAAM,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;QAEpC,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;YACpB,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;YAC9C,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE,CAAC;gBACpB,MAAM,GAAG,EAAE,GAAG,MAAM,EAAE,SAAS,EAAE,EAAE,GAAG,MAAM,CAAC,SAAS,EAAE,UAAU,EAAE,OAAO,EAAE,EAAE,CAAC;YAClF,CAAC;QACH,CAAC;QAED,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;YACrB,MAAM,GAAG;gBACP,GAAG,MAAM;gBACT,oBAAoB,EAAE,OAAO,CAAC,QAAyC;aACxE,CAAC;QACJ,CAAC;QAED,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC;YACjD,IAAI,CAAC,gDAAgD,CAAC,CAAC;YACvD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QAED,0BAA0B;QAC1B,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,EAAE,iBAAiB,CAAC;QACrD,MAAM,SAAS,GAAG,MAAM,CAAC,WAAW,EAAE,oBAAoB,CAAC;QAC3D,IAAI,CAAC,MAAM,IAAI,CAAC,SAAS,EAAE,CAAC;YAC1B,IAAI,CAAC,yFAAyF,CAAC,CAAC;YAChG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QAED,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,CAAC,wBAAwB,CAAC,CAAC;YAEjC,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;YAElE,MAAM,CAAC,eAAe,CAAC,CAAC;YACxB,MAAM,WAAW,GAAG,WAAW,CAAC,EAAE,IAAI,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC,EAAE,CAAC,CAAC;YAChE,WAAW,CAAC,IAAI,CAAC,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAC,CAAC;YAClD,WAAW,CAAC,IAAI,CAAC,CAAC,aAAa,EAAE,GAAG,MAAM,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC;YACrE,WAAW,CAAC,IAAI,CAAC,CAAC,eAAe,EAAE,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YAC3E,WAAW,CAAC,IAAI,CAAC,CAAC,eAAe,EAAE,MAAM,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC;YACnE,WAAW,CAAC,IAAI,CAAC,CAAC,mBAAmB,EAAE,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC;YACvF,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpC,OAAO,CAAC,GAAG,EAAE,CAAC;YAEd,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;QAED,MAAM,OAAO,GAAG,aAAa,CAAC,0BAA0B,CAAC,CAAC;QAC1D,OAAO,CAAC,KAAK,EAAE,CAAC;QAEhB,IAAI,CAAC;YACH,MAAM,6BAA6B,CAAC,MAAM,EAAE,WAAW,EAAE,UAAU,CAAC,CAAC;YACrE,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;YAEtD,IAAI,MAAM,CAAC,aAAa,GAAG,CAAC,EAAE,CAAC;gBAC7B,OAAO,CAAC,OAAO,CAAC,wBAAwB,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;YAC5D,CAAC;iBAAM,CAAC;gBACN,OAAO,CAAC,OAAO,CAAC,mDAAmD,CAAC,CAAC;YACvE,CAAC;QACH,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,OAAO,CAAC,IAAI,CAAC,qBAAqB,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACtF,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClB,CAAC;IACH,CAAC,CAAC,CAAC;AACP,CAAC"}
|
|
@@ -601,6 +601,74 @@ while [ "${ATTEMPT}" -lt "${MAX_RETRIES}" ]; do
|
|
|
601
601
|
BACKOFF_MIN=$(( BACKOFF / 60 ))
|
|
602
602
|
log "RATE-LIMITED: Attempt ${ATTEMPT}/${MAX_RETRIES}, retrying in ${BACKOFF_MIN}m"
|
|
603
603
|
sleep "${BACKOFF}"
|
|
604
|
+
elif check_context_exhausted "${LOG_FILE}" "${LOG_LINE_BEFORE}"; then
|
|
605
|
+
# Context window exhausted — checkpoint progress and resume in a fresh session
|
|
606
|
+
ATTEMPT=$((ATTEMPT + 1))
|
|
607
|
+
if [ "${ATTEMPT}" -ge "${MAX_RETRIES}" ]; then
|
|
608
|
+
log "CONTEXT-EXHAUSTED: All ${MAX_RETRIES} resume attempts exhausted for ${ELIGIBLE_PRD}"
|
|
609
|
+
break
|
|
610
|
+
fi
|
|
611
|
+
log "CONTEXT-EXHAUSTED: Session ${ATTEMPT_NUM} hit context limit — checkpointing and resuming (${ATTEMPT}/${MAX_RETRIES})"
|
|
612
|
+
checkpoint_timeout_progress "${WORKTREE_DIR}" "${BRANCH_NAME}" "${ELIGIBLE_PRD}"
|
|
613
|
+
git -C "${WORKTREE_DIR}" push origin "${BRANCH_NAME}" --force-with-lease >> "${LOG_FILE}" 2>&1 || true
|
|
614
|
+
# Switch prompt to "continue" mode for the next attempt (fresh context)
|
|
615
|
+
if [ -n "${ISSUE_NUMBER}" ]; then
|
|
616
|
+
PROMPT="Continue implementing PRD (GitHub issue #${ISSUE_NUMBER}: ${ISSUE_TITLE_RAW}).
|
|
617
|
+
|
|
618
|
+
The previous session ran out of context window. Progress has been committed on branch ${BRANCH_NAME}.
|
|
619
|
+
|
|
620
|
+
## Your task
|
|
621
|
+
1. Review the current state: check git log, existing code changes, and any task list
|
|
622
|
+
2. Compare against the original PRD requirements (issue #${ISSUE_NUMBER}) to identify what is already done vs remaining
|
|
623
|
+
3. Continue implementing the remaining phases/tasks
|
|
624
|
+
4. Do NOT redo work that is already completed and committed
|
|
625
|
+
|
|
626
|
+
## Setup
|
|
627
|
+
- You are already inside an isolated worktree at: ${WORKTREE_DIR}
|
|
628
|
+
- Current branch is already checked out: ${BRANCH_NAME}
|
|
629
|
+
- Do NOT run git checkout/switch in ${PROJECT_DIR}
|
|
630
|
+
- Do NOT create or remove worktrees; the cron script manages that
|
|
631
|
+
|
|
632
|
+
## Implementation — PRD Executor Workflow
|
|
633
|
+
Read ${EXECUTOR_PROMPT_REF} and follow the FULL execution pipeline for remaining phases only.
|
|
634
|
+
Follow all CLAUDE.md conventions (if present).
|
|
635
|
+
|
|
636
|
+
## Finalize
|
|
637
|
+
- Commit all changes, push, and open a PR:
|
|
638
|
+
git push -u origin ${BRANCH_NAME}
|
|
639
|
+
gh pr create --title \"feat: <short title>\" --body \"Closes #${ISSUE_NUMBER}
|
|
640
|
+
|
|
641
|
+
<summary>\"
|
|
642
|
+
- Do NOT process any other issues — only issue #${ISSUE_NUMBER}"
|
|
643
|
+
else
|
|
644
|
+
PROMPT="Continue implementing the PRD at ${PRD_DIR_REL}/${ELIGIBLE_PRD}
|
|
645
|
+
|
|
646
|
+
The previous session ran out of context window. Progress has been committed on branch ${BRANCH_NAME}.
|
|
647
|
+
|
|
648
|
+
## Your task
|
|
649
|
+
1. Review the current state: check git log, existing code changes, and any task list
|
|
650
|
+
2. Compare against the original PRD to identify what is already done vs remaining
|
|
651
|
+
3. Continue implementing the remaining phases/tasks
|
|
652
|
+
4. Do NOT redo work that is already completed and committed
|
|
653
|
+
|
|
654
|
+
## Setup
|
|
655
|
+
- You are already inside an isolated worktree at: ${WORKTREE_DIR}
|
|
656
|
+
- Current branch is already checked out: ${BRANCH_NAME}
|
|
657
|
+
- Do NOT run git checkout/switch in ${PROJECT_DIR}
|
|
658
|
+
- Do NOT create or remove worktrees; the cron script manages that
|
|
659
|
+
|
|
660
|
+
## Implementation — PRD Executor Workflow
|
|
661
|
+
Read ${EXECUTOR_PROMPT_REF} and follow the FULL execution pipeline for remaining phases only.
|
|
662
|
+
Follow all CLAUDE.md conventions (if present).
|
|
663
|
+
|
|
664
|
+
## Finalize
|
|
665
|
+
- Commit all changes, push, and open a PR:
|
|
666
|
+
git push -u origin ${BRANCH_NAME}
|
|
667
|
+
gh pr create --title \"feat: <short title>\" --body \"<summary referencing PRD>\"
|
|
668
|
+
- Do NOT move the PRD to done/ — the cron script handles that
|
|
669
|
+
- Do NOT process any other PRDs — only ${ELIGIBLE_PRD}"
|
|
670
|
+
fi
|
|
671
|
+
# No backoff — context exhaustion is not rate-limiting
|
|
604
672
|
else
|
|
605
673
|
# Non-retryable failure
|
|
606
674
|
break
|
|
@@ -737,6 +805,18 @@ elif [ "${DOUBLE_RATE_LIMITED}" = "1" ]; then
|
|
|
737
805
|
fi
|
|
738
806
|
night_watch_history record "${PROJECT_DIR}" "${ELIGIBLE_PRD}" rate_limited --exit-code "${EXIT_CODE}" 2>/dev/null || true
|
|
739
807
|
emit_result "rate_limited" "prd=${ELIGIBLE_PRD}|branch=${BRANCH_NAME}|reason=double_rate_limit"
|
|
808
|
+
elif check_context_exhausted "${LOG_FILE}" "${LOG_LINE_BEFORE}"; then
|
|
809
|
+
# All resume attempts for context exhaustion were used up
|
|
810
|
+
log "FAIL: Context window exhausted after ${MAX_RETRIES} resume attempts for ${ELIGIBLE_PRD}"
|
|
811
|
+
checkpoint_timeout_progress "${WORKTREE_DIR}" "${BRANCH_NAME}" "${ELIGIBLE_PRD}"
|
|
812
|
+
git -C "${WORKTREE_DIR}" push origin "${BRANCH_NAME}" --force-with-lease >> "${LOG_FILE}" 2>&1 || true
|
|
813
|
+
if [ -n "${ISSUE_NUMBER}" ]; then
|
|
814
|
+
"${NW_CLI}" board move-issue "${ISSUE_NUMBER}" --column "Ready" 2>>"${LOG_FILE}" || true
|
|
815
|
+
"${NW_CLI}" board comment "${ISSUE_NUMBER}" \
|
|
816
|
+
--body "Context window exhausted after ${MAX_RETRIES} resume attempts (${TOTAL_ELAPSED}s total, via ${EFFECTIVE_PROVIDER_LABEL}). Progress checkpointed on branch \`${BRANCH_NAME}\`. Will resume on next run." 2>>"${LOG_FILE}" || true
|
|
817
|
+
fi
|
|
818
|
+
night_watch_history record "${PROJECT_DIR}" "${ELIGIBLE_PRD}" context_exhausted --exit-code "${EXIT_CODE}" 2>/dev/null || true
|
|
819
|
+
emit_result "failure" "prd=${ELIGIBLE_PRD}|branch=${BRANCH_NAME}|reason=context_exhausted|exit_code=${EXIT_CODE}"
|
|
740
820
|
else
|
|
741
821
|
PROVIDER_ERROR_DETAIL=$(latest_failure_detail "${LOG_FILE}" "${LOG_LINE_BEFORE}")
|
|
742
822
|
log "FAIL: Night watch exited with code ${EXIT_CODE} while processing ${ELIGIBLE_PRD}"
|
|
@@ -698,6 +698,19 @@ check_rate_limited() {
|
|
|
698
698
|
fi
|
|
699
699
|
}
|
|
700
700
|
|
|
701
|
+
# Detect context window exhaustion from Claude API logs.
|
|
702
|
+
# Usage: check_context_exhausted <log_file> [start_line]
|
|
703
|
+
# Returns 0 if context exhausted, 1 otherwise.
|
|
704
|
+
check_context_exhausted() {
|
|
705
|
+
local log_file="${1:?log_file required}"
|
|
706
|
+
local start_line="${2:-0}"
|
|
707
|
+
if [ "${start_line}" -gt 0 ] 2>/dev/null; then
|
|
708
|
+
tail -n "+$((start_line + 1))" "${log_file}" 2>/dev/null | grep -qi "context window"
|
|
709
|
+
else
|
|
710
|
+
tail -20 "${log_file}" 2>/dev/null | grep -qi "context window"
|
|
711
|
+
fi
|
|
712
|
+
}
|
|
713
|
+
|
|
701
714
|
# Resolve URL host from a URL-like string.
|
|
702
715
|
# Example: "https://api.z.ai/api/anthropic" -> "api.z.ai"
|
|
703
716
|
extract_url_host() {
|