@jonit-dev/night-watch-cli 1.7.34 → 1.7.36
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +6 -0
- package/dist/scripts/night-watch-audit-cron.sh +168 -0
- package/dist/scripts/night-watch-cron.sh +484 -0
- package/dist/scripts/night-watch-helpers.sh +515 -0
- package/dist/scripts/night-watch-pr-reviewer-cron.sh +528 -0
- package/dist/scripts/night-watch-qa-cron.sh +281 -0
- package/dist/scripts/night-watch-slicer-cron.sh +90 -0
- package/dist/scripts/test-helpers.bats +77 -0
- package/dist/web/assets/index-BiJf9LFT.js +458 -0
- package/dist/web/assets/index-OpSgvsYu.css +1 -0
- package/dist/web/avatars/carlos.webp +0 -0
- package/dist/web/avatars/dev.webp +0 -0
- package/dist/web/avatars/maya.webp +0 -0
- package/dist/web/avatars/priya.webp +0 -0
- package/dist/web/index.html +82 -0
- package/package.json +1 -1
|
@@ -0,0 +1,515 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# Night Watch helper functions — shared by cron scripts.
|
|
3
|
+
# Source this file, don't execute it directly.
|
|
4
|
+
|
|
5
|
+
# ── Provider validation ───────────────────────────────────────────────────────
|
|
6
|
+
|
|
7
|
+
# Validates that the provider command is supported.
|
|
8
|
+
# Returns 0 if valid, 1 if unknown.
|
|
9
|
+
# Supported providers: claude, codex
|
|
10
|
+
validate_provider() {
|
|
11
|
+
local provider="${1:?provider required}"
|
|
12
|
+
case "${provider}" in
|
|
13
|
+
claude|codex)
|
|
14
|
+
return 0
|
|
15
|
+
;;
|
|
16
|
+
*)
|
|
17
|
+
return 1
|
|
18
|
+
;;
|
|
19
|
+
esac
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
# Resolve a usable night-watch CLI binary for nested script calls.
|
|
23
|
+
# Resolution order:
|
|
24
|
+
# 1) NW_CLI_BIN from parent environment (absolute path set by installer/runtime)
|
|
25
|
+
# 2) `night-watch` found in PATH
|
|
26
|
+
# 3) bundled bin path next to scripts/ in this package checkout/install
|
|
27
|
+
resolve_night_watch_cli() {
|
|
28
|
+
if [ -n "${NW_CLI_BIN:-}" ] && [ -x "${NW_CLI_BIN}" ]; then
|
|
29
|
+
printf "%s" "${NW_CLI_BIN}"
|
|
30
|
+
return 0
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
if command -v night-watch >/dev/null 2>&1; then
|
|
34
|
+
printf "%s" "night-watch"
|
|
35
|
+
return 0
|
|
36
|
+
fi
|
|
37
|
+
|
|
38
|
+
local script_dir
|
|
39
|
+
if [ -n "${SCRIPT_DIR:-}" ]; then
|
|
40
|
+
script_dir="${SCRIPT_DIR}"
|
|
41
|
+
else
|
|
42
|
+
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
43
|
+
fi
|
|
44
|
+
|
|
45
|
+
local bundled_bin="${script_dir}/../bin/night-watch.mjs"
|
|
46
|
+
if [ -x "${bundled_bin}" ]; then
|
|
47
|
+
printf "%s" "${bundled_bin}"
|
|
48
|
+
return 0
|
|
49
|
+
fi
|
|
50
|
+
|
|
51
|
+
return 1
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
night_watch_history() {
|
|
55
|
+
local cli_bin
|
|
56
|
+
cli_bin=$(resolve_night_watch_cli) || return 127
|
|
57
|
+
"${cli_bin}" history "$@"
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
# ── Logging ──────────────────────────────────────────────────────────────────
|
|
61
|
+
|
|
62
|
+
log() {
|
|
63
|
+
local log_file="${LOG_FILE:?LOG_FILE not set}"
|
|
64
|
+
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" >> "${log_file}"
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
# ── Log rotation ─────────────────────────────────────────────────────────────
|
|
68
|
+
|
|
69
|
+
rotate_log() {
|
|
70
|
+
local log_file="${LOG_FILE:?LOG_FILE not set}"
|
|
71
|
+
local max_size="${MAX_LOG_SIZE:-524288}"
|
|
72
|
+
|
|
73
|
+
if [ -f "${log_file}" ] && [ "$(stat -c%s "${log_file}" 2>/dev/null || echo 0)" -gt "${max_size}" ]; then
|
|
74
|
+
mv "${log_file}" "${log_file}.old"
|
|
75
|
+
fi
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
# ── Lock management ──────────────────────────────────────────────────────────
|
|
79
|
+
|
|
80
|
+
project_runtime_key() {
|
|
81
|
+
local project_dir="${1:?project_dir required}"
|
|
82
|
+
local project_name
|
|
83
|
+
local project_hash=""
|
|
84
|
+
project_name=$(basename "${project_dir}")
|
|
85
|
+
|
|
86
|
+
if command -v sha1sum >/dev/null 2>&1; then
|
|
87
|
+
project_hash=$(printf '%s' "${project_dir}" | sha1sum | awk '{print $1}')
|
|
88
|
+
elif command -v shasum >/dev/null 2>&1; then
|
|
89
|
+
project_hash=$(printf '%s' "${project_dir}" | shasum -a 1 | awk '{print $1}')
|
|
90
|
+
elif command -v openssl >/dev/null 2>&1; then
|
|
91
|
+
project_hash=$(printf '%s' "${project_dir}" | openssl sha1 | awk '{print $NF}')
|
|
92
|
+
else
|
|
93
|
+
return 1
|
|
94
|
+
fi
|
|
95
|
+
|
|
96
|
+
printf '%s-%s' "${project_name}" "${project_hash:0:12}"
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
acquire_lock() {
|
|
100
|
+
local lock_file="${1:?lock_file required}"
|
|
101
|
+
|
|
102
|
+
if [ -f "${lock_file}" ]; then
|
|
103
|
+
local lock_pid
|
|
104
|
+
lock_pid=$(cat "${lock_file}" 2>/dev/null || echo "")
|
|
105
|
+
if [ -n "${lock_pid}" ] && kill -0 "${lock_pid}" 2>/dev/null; then
|
|
106
|
+
log "SKIP: Previous run (PID ${lock_pid}) still active"
|
|
107
|
+
return 1
|
|
108
|
+
fi
|
|
109
|
+
log "WARN: Stale lock file found (PID ${lock_pid}), removing"
|
|
110
|
+
rm -f "${lock_file}"
|
|
111
|
+
fi
|
|
112
|
+
|
|
113
|
+
trap "rm -f '${lock_file}'" EXIT
|
|
114
|
+
echo $$ > "${lock_file}"
|
|
115
|
+
return 0
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
# ── Detect default branch ───────────────────────────────────────────────────
|
|
119
|
+
|
|
120
|
+
get_branch_tip_timestamp() {
|
|
121
|
+
local project_dir="${1:?project_dir required}"
|
|
122
|
+
local branch="${2:?branch required}"
|
|
123
|
+
local remote_ts=""
|
|
124
|
+
local local_ts=""
|
|
125
|
+
local latest_ts=""
|
|
126
|
+
|
|
127
|
+
remote_ts=$(git -C "${project_dir}" log -1 --format=%ct "refs/remotes/origin/${branch}" 2>/dev/null || true)
|
|
128
|
+
local_ts=$(git -C "${project_dir}" log -1 --format=%ct "refs/heads/${branch}" 2>/dev/null || true)
|
|
129
|
+
|
|
130
|
+
if [ -n "${remote_ts}" ]; then
|
|
131
|
+
latest_ts="${remote_ts}"
|
|
132
|
+
fi
|
|
133
|
+
if [ -n "${local_ts}" ] && { [ -z "${latest_ts}" ] || [ "${local_ts}" -gt "${latest_ts}" ]; }; then
|
|
134
|
+
latest_ts="${local_ts}"
|
|
135
|
+
fi
|
|
136
|
+
|
|
137
|
+
printf "%s" "${latest_ts}"
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
detect_default_branch() {
|
|
141
|
+
local project_dir="${1:?project_dir required}"
|
|
142
|
+
local main_ts=""
|
|
143
|
+
local master_ts=""
|
|
144
|
+
local remote_head=""
|
|
145
|
+
|
|
146
|
+
main_ts=$(get_branch_tip_timestamp "${project_dir}" "main")
|
|
147
|
+
master_ts=$(get_branch_tip_timestamp "${project_dir}" "master")
|
|
148
|
+
|
|
149
|
+
if [ -n "${main_ts}" ] && [ -n "${master_ts}" ]; then
|
|
150
|
+
if [ "${main_ts}" -ge "${master_ts}" ]; then
|
|
151
|
+
echo "main"
|
|
152
|
+
else
|
|
153
|
+
echo "master"
|
|
154
|
+
fi
|
|
155
|
+
return 0
|
|
156
|
+
fi
|
|
157
|
+
|
|
158
|
+
if [ -n "${main_ts}" ]; then
|
|
159
|
+
echo "main"
|
|
160
|
+
return 0
|
|
161
|
+
fi
|
|
162
|
+
|
|
163
|
+
if [ -n "${master_ts}" ]; then
|
|
164
|
+
echo "master"
|
|
165
|
+
return 0
|
|
166
|
+
fi
|
|
167
|
+
|
|
168
|
+
remote_head=$(git -C "${project_dir}" symbolic-ref refs/remotes/origin/HEAD 2>/dev/null \
|
|
169
|
+
| sed 's@^refs/remotes/origin/@@' || true)
|
|
170
|
+
if [ -n "${remote_head}" ]; then
|
|
171
|
+
echo "${remote_head}"
|
|
172
|
+
return 0
|
|
173
|
+
fi
|
|
174
|
+
|
|
175
|
+
echo "main"
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
# ── Claim management ─────────────────────────────────────────────────────────
|
|
179
|
+
|
|
180
|
+
claim_prd() {
|
|
181
|
+
local prd_dir="${1:?prd_dir required}"
|
|
182
|
+
local prd_file="${2:?prd_file required}"
|
|
183
|
+
local claim_file="${prd_dir}/${prd_file}.claim"
|
|
184
|
+
|
|
185
|
+
printf '{"timestamp":%d,"hostname":"%s","pid":%d}\n' \
|
|
186
|
+
"$(date +%s)" "$(hostname)" "$$" > "${claim_file}"
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
release_claim() {
|
|
190
|
+
local prd_dir="${1:?prd_dir required}"
|
|
191
|
+
local prd_file="${2:?prd_file required}"
|
|
192
|
+
local claim_file="${prd_dir}/${prd_file}.claim"
|
|
193
|
+
|
|
194
|
+
rm -f "${claim_file}"
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
is_claimed() {
|
|
198
|
+
local prd_dir="${1:?prd_dir required}"
|
|
199
|
+
local prd_file="${2:?prd_file required}"
|
|
200
|
+
local max_runtime="${3:-7200}"
|
|
201
|
+
local claim_file="${prd_dir}/${prd_file}.claim"
|
|
202
|
+
|
|
203
|
+
if [ ! -f "${claim_file}" ]; then
|
|
204
|
+
return 1
|
|
205
|
+
fi
|
|
206
|
+
|
|
207
|
+
local claim_ts
|
|
208
|
+
claim_ts=$(grep -o '"timestamp":[0-9]*' "${claim_file}" 2>/dev/null | grep -o '[0-9]*' || echo "0")
|
|
209
|
+
local now
|
|
210
|
+
now=$(date +%s)
|
|
211
|
+
local age=$(( now - claim_ts ))
|
|
212
|
+
|
|
213
|
+
if [ "${age}" -lt "${max_runtime}" ]; then
|
|
214
|
+
return 0 # actively claimed
|
|
215
|
+
else
|
|
216
|
+
# Stale claim — remove it
|
|
217
|
+
rm -f "${claim_file}"
|
|
218
|
+
return 1
|
|
219
|
+
fi
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
# ── Find next eligible PRD ───────────────────────────────────────────────────
|
|
223
|
+
|
|
224
|
+
find_eligible_prd() {
|
|
225
|
+
local prd_dir="${1:?prd_dir required}"
|
|
226
|
+
local max_runtime="${2:-7200}"
|
|
227
|
+
local project_dir="${3:-}"
|
|
228
|
+
local done_dir="${prd_dir}/done"
|
|
229
|
+
|
|
230
|
+
local prd_files
|
|
231
|
+
prd_files=$(find "${prd_dir}" -maxdepth 1 -name '*.md' ! -name 'NIGHT-WATCH-SUMMARY.md' -type f 2>/dev/null | sort)
|
|
232
|
+
|
|
233
|
+
if [ -z "${prd_files}" ]; then
|
|
234
|
+
return 0
|
|
235
|
+
fi
|
|
236
|
+
|
|
237
|
+
# Apply priority ordering if NW_PRD_PRIORITY is set (colon-separated PRD names)
|
|
238
|
+
if [ -n "${NW_PRD_PRIORITY:-}" ]; then
|
|
239
|
+
local ordered=""
|
|
240
|
+
IFS=':' read -ra prio_list <<< "${NW_PRD_PRIORITY}"
|
|
241
|
+
for pname in "${prio_list[@]}"; do
|
|
242
|
+
local match
|
|
243
|
+
match=$(echo "${prd_files}" | grep "/${pname}\.md$" || true)
|
|
244
|
+
if [ -n "${match}" ]; then
|
|
245
|
+
ordered="${ordered}${match}"$'\n'
|
|
246
|
+
fi
|
|
247
|
+
done
|
|
248
|
+
# Append remaining files not in priority list
|
|
249
|
+
while IFS= read -r pf; do
|
|
250
|
+
if [ -n "${pf}" ] && ! echo "${ordered}" | grep -qF "${pf}"; then
|
|
251
|
+
ordered="${ordered}${pf}"$'\n'
|
|
252
|
+
fi
|
|
253
|
+
done <<< "${prd_files}"
|
|
254
|
+
prd_files=$(echo "${ordered}" | sed '/^$/d')
|
|
255
|
+
fi
|
|
256
|
+
|
|
257
|
+
local open_branches
|
|
258
|
+
open_branches=$(gh pr list --state open --json headRefName --jq '.[].headRefName' 2>/dev/null || echo "")
|
|
259
|
+
|
|
260
|
+
for prd_path in ${prd_files}; do
|
|
261
|
+
local prd_file
|
|
262
|
+
prd_file=$(basename "${prd_path}")
|
|
263
|
+
local prd_name="${prd_file%.md}"
|
|
264
|
+
|
|
265
|
+
# Skip if claimed by another process
|
|
266
|
+
if is_claimed "${prd_dir}" "${prd_file}" "${max_runtime}"; then
|
|
267
|
+
log "SKIP-PRD: ${prd_file} — claimed by another process"
|
|
268
|
+
continue
|
|
269
|
+
fi
|
|
270
|
+
|
|
271
|
+
# Skip if in cooldown after a recent failure (checked via execution history ledger)
|
|
272
|
+
if [ -n "${project_dir}" ] && night_watch_history check "${project_dir}" "${prd_file}" --cooldown "${max_runtime}" 2>/dev/null; then
|
|
273
|
+
log "SKIP-PRD: ${prd_file} — in cooldown after recent failure"
|
|
274
|
+
continue
|
|
275
|
+
fi
|
|
276
|
+
|
|
277
|
+
# Skip if a PR already exists for this PRD
|
|
278
|
+
if echo "${open_branches}" | grep -qF "${prd_name}"; then
|
|
279
|
+
log "SKIP-PRD: ${prd_file} — open PR already exists"
|
|
280
|
+
continue
|
|
281
|
+
fi
|
|
282
|
+
|
|
283
|
+
# Check dependencies
|
|
284
|
+
local depends_on
|
|
285
|
+
depends_on=$(grep -i 'depends on' "${prd_path}" 2>/dev/null \
|
|
286
|
+
| head -1 \
|
|
287
|
+
| grep -oP '[a-z0-9_-]+\.md' || echo "")
|
|
288
|
+
if [ -n "${depends_on}" ]; then
|
|
289
|
+
local dep_met=true
|
|
290
|
+
for dep_file in ${depends_on}; do
|
|
291
|
+
if [ ! -f "${done_dir}/${dep_file}" ]; then
|
|
292
|
+
log "SKIP-PRD: ${prd_file} — unmet dependency: ${dep_file}"
|
|
293
|
+
dep_met=false
|
|
294
|
+
break
|
|
295
|
+
fi
|
|
296
|
+
done
|
|
297
|
+
if [ "${dep_met}" = false ]; then
|
|
298
|
+
continue
|
|
299
|
+
fi
|
|
300
|
+
fi
|
|
301
|
+
|
|
302
|
+
echo "${prd_file}"
|
|
303
|
+
return 0
|
|
304
|
+
done
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
# ── Clean up worktrees ───────────────────────────────────────────────────────
|
|
308
|
+
# Removes night-watch worktrees for this project.
|
|
309
|
+
# Optional second argument narrows cleanup to worktrees containing that token.
|
|
310
|
+
# This prevents parallel reviewer workers from deleting each other's worktrees.
|
|
311
|
+
|
|
312
|
+
cleanup_worktrees() {
|
|
313
|
+
local project_dir="${1:?project_dir required}"
|
|
314
|
+
local scope="${2:-}"
|
|
315
|
+
local project_name
|
|
316
|
+
project_name=$(basename "${project_dir}")
|
|
317
|
+
|
|
318
|
+
local match_token="${project_name}-nw"
|
|
319
|
+
if [ -n "${scope}" ]; then
|
|
320
|
+
match_token="${scope}"
|
|
321
|
+
fi
|
|
322
|
+
|
|
323
|
+
git -C "${project_dir}" worktree list --porcelain 2>/dev/null \
|
|
324
|
+
| grep '^worktree ' \
|
|
325
|
+
| awk '{print $2}' \
|
|
326
|
+
| grep -F "${match_token}" \
|
|
327
|
+
| while read -r wt; do
|
|
328
|
+
log "CLEANUP: Removing leftover worktree ${wt}"
|
|
329
|
+
git -C "${project_dir}" worktree remove --force "${wt}" 2>/dev/null || true
|
|
330
|
+
done || true
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
# Pick the best available ref for creating a new detached worktree.
|
|
334
|
+
resolve_worktree_base_ref() {
|
|
335
|
+
local project_dir="${1:?project_dir required}"
|
|
336
|
+
local default_branch="${2:?default_branch required}"
|
|
337
|
+
|
|
338
|
+
if git -C "${project_dir}" rev-parse --verify --quiet "refs/remotes/origin/${default_branch}" >/dev/null; then
|
|
339
|
+
printf "%s" "origin/${default_branch}"
|
|
340
|
+
return 0
|
|
341
|
+
fi
|
|
342
|
+
|
|
343
|
+
if git -C "${project_dir}" rev-parse --verify --quiet "refs/heads/${default_branch}" >/dev/null; then
|
|
344
|
+
printf "%s" "${default_branch}"
|
|
345
|
+
return 0
|
|
346
|
+
fi
|
|
347
|
+
|
|
348
|
+
if git -C "${project_dir}" rev-parse --verify --quiet "refs/remotes/origin/HEAD" >/dev/null; then
|
|
349
|
+
printf "%s" "origin/HEAD"
|
|
350
|
+
return 0
|
|
351
|
+
fi
|
|
352
|
+
|
|
353
|
+
# Final fallback: use current HEAD (handles local-only repos with no remote)
|
|
354
|
+
if git -C "${project_dir}" rev-parse --verify --quiet HEAD >/dev/null; then
|
|
355
|
+
printf "%s" "HEAD"
|
|
356
|
+
return 0
|
|
357
|
+
fi
|
|
358
|
+
|
|
359
|
+
return 1
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
# Create an isolated worktree on a branch without checking out that branch
|
|
363
|
+
# in the user's current project directory.
|
|
364
|
+
prepare_branch_worktree() {
|
|
365
|
+
local project_dir="${1:?project_dir required}"
|
|
366
|
+
local worktree_dir="${2:?worktree_dir required}"
|
|
367
|
+
local branch_name="${3:?branch_name required}"
|
|
368
|
+
local default_branch="${4:?default_branch required}"
|
|
369
|
+
local log_file="${5:-${LOG_FILE:-/dev/null}}"
|
|
370
|
+
local base_ref=""
|
|
371
|
+
|
|
372
|
+
git -C "${project_dir}" fetch origin "${default_branch}" >> "${log_file}" 2>&1 || true
|
|
373
|
+
base_ref=$(resolve_worktree_base_ref "${project_dir}" "${default_branch}") || return 1
|
|
374
|
+
|
|
375
|
+
if git -C "${project_dir}" rev-parse --verify --quiet "refs/heads/${branch_name}" >/dev/null; then
|
|
376
|
+
git -C "${project_dir}" worktree add "${worktree_dir}" "${branch_name}" >> "${log_file}" 2>&1
|
|
377
|
+
return $?
|
|
378
|
+
fi
|
|
379
|
+
|
|
380
|
+
if git -C "${project_dir}" rev-parse --verify --quiet "refs/remotes/origin/${branch_name}" >/dev/null; then
|
|
381
|
+
git -C "${project_dir}" worktree add -b "${branch_name}" "${worktree_dir}" "origin/${branch_name}" >> "${log_file}" 2>&1
|
|
382
|
+
return $?
|
|
383
|
+
fi
|
|
384
|
+
|
|
385
|
+
git -C "${project_dir}" worktree add -b "${branch_name}" "${worktree_dir}" "${base_ref}" >> "${log_file}" 2>&1
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
# Create an isolated detached worktree (useful for reviewer/controller flows).
|
|
389
|
+
prepare_detached_worktree() {
|
|
390
|
+
local project_dir="${1:?project_dir required}"
|
|
391
|
+
local worktree_dir="${2:?worktree_dir required}"
|
|
392
|
+
local default_branch="${3:?default_branch required}"
|
|
393
|
+
local log_file="${4:-${LOG_FILE:-/dev/null}}"
|
|
394
|
+
local base_ref=""
|
|
395
|
+
|
|
396
|
+
# Remove stale directory that exists on disk but is not registered in git's
|
|
397
|
+
# worktree list (left over from a killed or interrupted previous run).
|
|
398
|
+
if [ -d "${worktree_dir}" ]; then
|
|
399
|
+
if ! git -C "${project_dir}" worktree list --porcelain 2>/dev/null \
|
|
400
|
+
| grep -qF "worktree ${worktree_dir}"; then
|
|
401
|
+
log "WARN: Removing unregistered stale worktree directory ${worktree_dir}"
|
|
402
|
+
rm -rf "${worktree_dir}"
|
|
403
|
+
fi
|
|
404
|
+
fi
|
|
405
|
+
|
|
406
|
+
git -C "${project_dir}" fetch origin "${default_branch}" >> "${log_file}" 2>&1 || true
|
|
407
|
+
base_ref=$(resolve_worktree_base_ref "${project_dir}" "${default_branch}") || return 1
|
|
408
|
+
|
|
409
|
+
git -C "${project_dir}" worktree add --detach "${worktree_dir}" "${base_ref}" >> "${log_file}" 2>&1
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
# ── Mark PRD as done ─────────────────────────────────────────────────────────
|
|
413
|
+
|
|
414
|
+
mark_prd_done() {
|
|
415
|
+
local prd_dir="${1:?prd_dir required}"
|
|
416
|
+
local prd_file="${2:?prd_file required}"
|
|
417
|
+
local done_dir="${prd_dir}/done"
|
|
418
|
+
|
|
419
|
+
mkdir -p "${done_dir}"
|
|
420
|
+
|
|
421
|
+
if [ -f "${prd_dir}/${prd_file}" ]; then
|
|
422
|
+
mv "${prd_dir}/${prd_file}" "${done_dir}/${prd_file}"
|
|
423
|
+
log "DONE-PRD: Moved ${prd_file} to done/"
|
|
424
|
+
return 0
|
|
425
|
+
else
|
|
426
|
+
log "WARN: PRD file not found: ${prd_dir}/${prd_file}"
|
|
427
|
+
return 1
|
|
428
|
+
fi
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
# ── Rate limit detection ────────────────────────────────────────────────────
|
|
432
|
+
|
|
433
|
+
# Check if the log contains a 429 rate limit error since a given line number.
|
|
434
|
+
# Usage: check_rate_limited <log_file> [start_line]
|
|
435
|
+
# When start_line is provided, only lines after that position are checked,
|
|
436
|
+
# preventing false positives from 429 errors in previous runs.
|
|
437
|
+
# Returns 0 if rate limited, 1 otherwise.
|
|
438
|
+
check_rate_limited() {
|
|
439
|
+
local log_file="${1:?log_file required}"
|
|
440
|
+
local start_line="${2:-0}"
|
|
441
|
+
if [ "${start_line}" -gt 0 ] 2>/dev/null; then
|
|
442
|
+
tail -n "+$((start_line + 1))" "${log_file}" 2>/dev/null | grep -q "429"
|
|
443
|
+
else
|
|
444
|
+
tail -20 "${log_file}" 2>/dev/null | grep -q "429"
|
|
445
|
+
fi
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
# Send an immediate Telegram warning when the rate-limit fallback is triggered.
|
|
449
|
+
# Preferred input: NW_TELEGRAM_RATE_LIMIT_WEBHOOKS (JSON array with botToken/chatId).
|
|
450
|
+
# Legacy fallback: NW_TELEGRAM_BOT_TOKEN + NW_TELEGRAM_CHAT_ID.
|
|
451
|
+
# Usage: send_rate_limit_fallback_warning <model> <project_name>
|
|
452
|
+
send_rate_limit_fallback_warning() {
|
|
453
|
+
local model="${1:-native Claude}"
|
|
454
|
+
local project_name="${2:-unknown}"
|
|
455
|
+
local msg="⚠️ Rate Limit Fallback
|
|
456
|
+
|
|
457
|
+
Project: ${project_name}
|
|
458
|
+
Proxy quota exhausted - falling back to native Claude (${model})"
|
|
459
|
+
|
|
460
|
+
# Preferred path: iterate all opted-in Telegram webhooks.
|
|
461
|
+
if [ -n "${NW_TELEGRAM_RATE_LIMIT_WEBHOOKS:-}" ] && command -v jq >/dev/null 2>&1; then
|
|
462
|
+
local sent=0
|
|
463
|
+
local webhook_json
|
|
464
|
+
while IFS= read -r webhook_json; do
|
|
465
|
+
[ -z "${webhook_json}" ] && continue
|
|
466
|
+
local bot_token
|
|
467
|
+
local chat_id
|
|
468
|
+
bot_token=$(printf '%s' "${webhook_json}" | jq -r '.botToken // empty' 2>/dev/null || true)
|
|
469
|
+
chat_id=$(printf '%s' "${webhook_json}" | jq -r '.chatId // empty' 2>/dev/null || true)
|
|
470
|
+
if [ -n "${bot_token}" ] && [ -n "${chat_id}" ]; then
|
|
471
|
+
curl -s -X POST "https://api.telegram.org/bot${bot_token}/sendMessage" \
|
|
472
|
+
--data-urlencode "chat_id=${chat_id}" \
|
|
473
|
+
--data-urlencode "text=${msg}" > /dev/null 2>&1 || true
|
|
474
|
+
sent=1
|
|
475
|
+
fi
|
|
476
|
+
done < <(printf '%s' "${NW_TELEGRAM_RATE_LIMIT_WEBHOOKS}" | jq -c '.[]?' 2>/dev/null || true)
|
|
477
|
+
|
|
478
|
+
if [ "${sent}" -eq 1 ]; then
|
|
479
|
+
return 0
|
|
480
|
+
fi
|
|
481
|
+
fi
|
|
482
|
+
|
|
483
|
+
# Legacy single-webhook fallback.
|
|
484
|
+
if [ -z "${NW_TELEGRAM_BOT_TOKEN:-}" ] || [ -z "${NW_TELEGRAM_CHAT_ID:-}" ]; then
|
|
485
|
+
return 0
|
|
486
|
+
fi
|
|
487
|
+
curl -s -X POST "https://api.telegram.org/bot${NW_TELEGRAM_BOT_TOKEN}/sendMessage" \
|
|
488
|
+
--data-urlencode "chat_id=${NW_TELEGRAM_CHAT_ID}" \
|
|
489
|
+
--data-urlencode "text=${msg}" > /dev/null 2>&1 || true
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
# ── Board mode issue discovery ────────────────────────────────────────────────
|
|
493
|
+
|
|
494
|
+
# Get the next eligible issue from the board provider.
|
|
495
|
+
# Prints the JSON of the first "Ready" issue to stdout, or nothing if none found.
|
|
496
|
+
# Returns 0 on success, 1 if no issue found or CLI unavailable.
|
|
497
|
+
find_eligible_board_issue() {
|
|
498
|
+
local cli_bin
|
|
499
|
+
cli_bin=$(resolve_night_watch_cli) || {
|
|
500
|
+
log "WARN: Cannot find night-watch CLI for board mode"
|
|
501
|
+
return 1
|
|
502
|
+
}
|
|
503
|
+
local result
|
|
504
|
+
result=$("${cli_bin}" board next-issue --column "Ready" --json 2>/dev/null) || true
|
|
505
|
+
if [ -z "${result}" ]; then
|
|
506
|
+
return 1
|
|
507
|
+
fi
|
|
508
|
+
# Require valid JSON with an issue number to avoid treating plain text output
|
|
509
|
+
# as a runnable board issue.
|
|
510
|
+
if ! printf '%s' "${result}" | jq -e '.number and (.number | type == "number")' >/dev/null 2>&1; then
|
|
511
|
+
return 1
|
|
512
|
+
fi
|
|
513
|
+
printf '%s' "${result}"
|
|
514
|
+
return 0
|
|
515
|
+
}
|