loki-mode 6.62.0 → 6.63.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/autonomy/run.sh CHANGED
@@ -183,7 +183,7 @@ if [[ -z "${LOKI_RUNNING_FROM_TEMP:-}" ]] && [[ "${BASH_SOURCE[0]}" == "${0}" ]]
183
183
  cp "${BASH_SOURCE[0]}" "$TEMP_SCRIPT"
184
184
  chmod 700 "$TEMP_SCRIPT"
185
185
  # BUG-XC-011: Set trap BEFORE exec so the temp file gets cleaned up
186
- trap "rm -f '$TEMP_SCRIPT'" EXIT
186
+ trap 'rm -f "$TEMP_SCRIPT"' EXIT
187
187
  export LOKI_RUNNING_FROM_TEMP=1
188
188
  export LOKI_ORIGINAL_SCRIPT_DIR="$SCRIPT_DIR"
189
189
  export LOKI_ORIGINAL_PROJECT_DIR="$PROJECT_DIR"
@@ -508,6 +508,13 @@ mapping = {
508
508
  'model.fast': 'LOKI_MODEL_FAST',
509
509
  'notify.slack': 'LOKI_SLACK_WEBHOOK',
510
510
  'notify.discord': 'LOKI_DISCORD_WEBHOOK',
511
+ 'provider': 'LOKI_PROVIDER',
512
+ 'issue.provider': 'LOKI_ISSUE_PROVIDER',
513
+ 'blind_validation': 'LOKI_BLIND_VALIDATION',
514
+ 'adversarial_testing': 'LOKI_ADVERSARIAL_TESTING',
515
+ 'spawn_timeout': 'LOKI_SPAWN_TIMEOUT',
516
+ 'spawn_retries': 'LOKI_SPAWN_RETRIES',
517
+ 'budget': 'LOKI_BUDGET_LIMIT',
511
518
  }
512
519
  for key, env_var in mapping.items():
513
520
  # Try nested dict lookup first, then flat key, then underscore variant
@@ -2108,8 +2115,10 @@ create_worktree() {
2108
2115
  git -C "$TARGET_DIR" worktree add "$worktree_path" -b "$branch_name" 2>/dev/null && wt_exit=0 || \
2109
2116
  { git -C "$TARGET_DIR" worktree add "$worktree_path" "$branch_name" 2>/dev/null && wt_exit=0; }
2110
2117
  else
2111
- # Track main branch
2112
- git -C "$TARGET_DIR" worktree add "$worktree_path" main 2>/dev/null && wt_exit=0 || \
2118
+ # BUG-PAR-001: Testing/docs worktrees use -b parallel-<stream> main (not bare main checkout)
2119
+ # This avoids "already checked out" errors and keeps each worktree on its own branch
2120
+ git -C "$TARGET_DIR" worktree add "$worktree_path" -b "parallel-${stream_name}" main 2>/dev/null && wt_exit=0 || \
2121
+ { git -C "$TARGET_DIR" worktree add "$worktree_path" "parallel-${stream_name}" 2>/dev/null && wt_exit=0; } || \
2113
2122
  { git -C "$TARGET_DIR" worktree add "$worktree_path" HEAD 2>/dev/null && wt_exit=0; }
2114
2123
  fi
2115
2124
 
@@ -2164,8 +2173,11 @@ remove_worktree() {
2164
2173
 
2165
2174
  # Remove worktree (with safety check for rm -rf)
2166
2175
  git -C "$TARGET_DIR" worktree remove "$worktree_path" --force 2>/dev/null || {
2167
- # Safety check: only rm -rf if path looks like a worktree (contains .git or is under TARGET_DIR)
2168
- if [[ -n "$worktree_path" && "$worktree_path" != "/" && "$worktree_path" == "$TARGET_DIR"* ]]; then
2176
+ # BUG-PAR-005: Safety check uses dirname with trailing / to prevent prefix-match false positives
2177
+ # e.g. TARGET_DIR=/foo/bar must not match /foo/bar-other
2178
+ local parent_dir
2179
+ parent_dir="$(dirname "$TARGET_DIR")/"
2180
+ if [[ -n "$worktree_path" && "$worktree_path" != "/" && "$worktree_path" == "${parent_dir}"* ]]; then
2169
2181
  rm -rf "$worktree_path" 2>/dev/null
2170
2182
  else
2171
2183
  log_warn "Skipping unsafe rm -rf for path: $worktree_path"
@@ -2198,7 +2210,11 @@ spawn_worktree_session() {
2198
2210
  done
2199
2211
 
2200
2212
  if [ "$active_count" -ge "$MAX_PARALLEL_SESSIONS" ]; then
2201
- log_warn "Max parallel sessions reached ($MAX_PARALLEL_SESSIONS). Waiting..."
2213
+ # BUG-PAR-014: Max-sessions rejection queues spawn for retry
2214
+ log_warn "Max parallel sessions reached ($MAX_PARALLEL_SESSIONS). Queuing $stream_name for retry."
2215
+ mkdir -p "${TARGET_DIR:-.}/.loki/signals"
2216
+ echo "{\"stream\":\"$stream_name\",\"task\":\"$(echo "$task_prompt" | head -c 200)\",\"timestamp\":\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"}" \
2217
+ > "${TARGET_DIR:-.}/.loki/signals/SPAWN_QUEUED_${stream_name}"
2202
2218
  return 1
2203
2219
  fi
2204
2220
 
@@ -2249,19 +2265,31 @@ spawn_worktree_session() {
2249
2265
 
2250
2266
  # Completion signaling (v6.7.0)
2251
2267
  if [ $_wt_exit -eq 0 ]; then
2252
- # Commit any uncommitted work
2253
- git -C "$worktree_path" add -A 2>/dev/null
2268
+ # BUG-PAR-006: git add excludes .env, *.key, *.pem, credentials*
2269
+ git -C "$worktree_path" add -A \
2270
+ ':!.env' ':!*.key' ':!*.pem' ':!credentials*' 2>/dev/null
2254
2271
  git -C "$worktree_path" commit -m "feat($stream_name): worktree work complete" 2>/dev/null || true
2255
- # Signal merge readiness to main orchestrator
2272
+ # BUG-PAR-008: Signal files written atomically (temp + mv)
2256
2273
  mkdir -p "${TARGET_DIR:-.}/.loki/signals"
2257
- cat > "${TARGET_DIR:-.}/.loki/signals/MERGE_REQUESTED_${stream_name}" <<EOSIG
2274
+ local _sig_tmp
2275
+ _sig_tmp=$(mktemp "${TARGET_DIR:-.}/.loki/signals/.tmp.XXXXXX") || true
2276
+ cat > "$_sig_tmp" <<EOSIG
2258
2277
  {"stream":"$stream_name","branch":"$(git -C "$worktree_path" branch --show-current 2>/dev/null)","worktree":"$worktree_path","timestamp":"$(date -u +%Y-%m-%dT%H:%M:%SZ)","exit_code":$_wt_exit}
2259
2278
  EOSIG
2279
+ mv "$_sig_tmp" "${TARGET_DIR:-.}/.loki/signals/MERGE_REQUESTED_${stream_name}" 2>/dev/null || \
2280
+ cp "$_sig_tmp" "${TARGET_DIR:-.}/.loki/signals/MERGE_REQUESTED_${stream_name}" 2>/dev/null
2281
+ rm -f "$_sig_tmp" 2>/dev/null
2260
2282
  echo "WORKTREE_COMPLETE: $stream_name" >> "$log_file"
2261
2283
  else
2284
+ # BUG-PAR-008: Signal files written atomically (temp + mv)
2262
2285
  mkdir -p "${TARGET_DIR:-.}/.loki/signals"
2286
+ local _fail_tmp
2287
+ _fail_tmp=$(mktemp "${TARGET_DIR:-.}/.loki/signals/.tmp.XXXXXX") || true
2263
2288
  echo "{\"stream\":\"$stream_name\",\"status\":\"failed\",\"exit_code\":$_wt_exit,\"timestamp\":\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"}" \
2264
- > "${TARGET_DIR:-.}/.loki/signals/WORKTREE_FAILED_${stream_name}"
2289
+ > "$_fail_tmp"
2290
+ mv "$_fail_tmp" "${TARGET_DIR:-.}/.loki/signals/WORKTREE_FAILED_${stream_name}" 2>/dev/null || \
2291
+ cp "$_fail_tmp" "${TARGET_DIR:-.}/.loki/signals/WORKTREE_FAILED_${stream_name}" 2>/dev/null
2292
+ rm -f "$_fail_tmp" 2>/dev/null
2265
2293
  fi
2266
2294
  ) &
2267
2295
 
@@ -2284,9 +2312,12 @@ merge_worktree() {
2284
2312
  return 1
2285
2313
  fi
2286
2314
 
2315
+ # BUG-PAR-013: Signal file parsing falls back to jq when python3 unavailable
2287
2316
  local branch worktree_path
2288
- branch=$(python3 -c "import json; print(json.load(open('$signal_file'))['branch'])" 2>/dev/null)
2289
- worktree_path=$(python3 -c "import json; print(json.load(open('$signal_file'))['worktree'])" 2>/dev/null)
2317
+ branch=$(python3 -c "import json; print(json.load(open('$signal_file'))['branch'])" 2>/dev/null) || \
2318
+ branch=$(jq -r '.branch' "$signal_file" 2>/dev/null) || true
2319
+ worktree_path=$(python3 -c "import json; print(json.load(open('$signal_file'))['worktree'])" 2>/dev/null) || \
2320
+ worktree_path=$(jq -r '.worktree' "$signal_file" 2>/dev/null) || true
2290
2321
 
2291
2322
  if [ -z "$branch" ]; then
2292
2323
  log_error "Could not determine branch for: $stream_name"
@@ -2295,9 +2326,16 @@ merge_worktree() {
2295
2326
 
2296
2327
  log_step "Merging worktree: $stream_name (branch: $branch)"
2297
2328
 
2298
- # Merge into current branch
2329
+ # BUG-PAR-009: Verify git checkout main before merge
2299
2330
  local current_branch
2300
2331
  current_branch=$(git -C "${TARGET_DIR:-.}" branch --show-current 2>/dev/null)
2332
+ if [ "$current_branch" != "main" ]; then
2333
+ log_info "Switching to main before merge (was on: $current_branch)"
2334
+ if ! git -C "${TARGET_DIR:-.}" checkout main 2>/dev/null; then
2335
+ log_error "Failed to checkout main for merge: $stream_name"
2336
+ return 1
2337
+ fi
2338
+ fi
2301
2339
 
2302
2340
  if git -C "${TARGET_DIR:-.}" merge --no-ff "$branch" -m "merge($stream_name): auto-merge from parallel worktree" 2>&1; then
2303
2341
  log_info "Merge successful: $stream_name"
@@ -2440,50 +2478,51 @@ Output ONLY the resolved file content with no conflict markers. No explanations.
2440
2478
  }
2441
2479
 
2442
2480
  # Merge a completed feature branch (with AI conflict resolution)
2481
+ # BUG-PAR-011: Not in a subshell -- uses git -C instead of cd
2482
+ # BUG-PAR-003: Strips feature- prefix to avoid feature/feature-auth double-prefix
2443
2483
  merge_feature() {
2444
2484
  local feature="$1"
2445
- local branch="feature/$feature"
2485
+ # BUG-PAR-003: Strip feature- prefix if present to avoid double-prefix (feature/feature-auth)
2486
+ local clean_feature="${feature#feature-}"
2487
+ local branch="feature/$clean_feature"
2446
2488
 
2447
- log_step "Merging feature: $feature"
2489
+ log_step "Merging feature: $clean_feature"
2448
2490
 
2449
- (
2450
- cd "$TARGET_DIR" || exit 1
2491
+ # BUG-PAR-011: Ensure we're on main using git -C (no subshell)
2492
+ git -C "$TARGET_DIR" checkout main 2>/dev/null
2451
2493
 
2452
- # Ensure we're on main
2453
- git checkout main 2>/dev/null
2454
-
2455
- # Attempt merge with no-ff for clear history
2456
- if git merge "$branch" --no-ff -m "feat: Merge $feature" 2>/dev/null; then
2457
- log_info "Merged cleanly: $feature"
2494
+ # Attempt merge with no-ff for clear history
2495
+ if git -C "$TARGET_DIR" merge "$branch" --no-ff -m "feat: Merge $clean_feature" 2>/dev/null; then
2496
+ log_info "Merged cleanly: $clean_feature"
2497
+ else
2498
+ # Merge has conflicts - try AI resolution
2499
+ log_warn "Merge conflicts detected - attempting AI resolution"
2500
+
2501
+ if resolve_conflicts_with_ai "$clean_feature"; then
2502
+ # AI resolved conflicts, commit the merge
2503
+ git -C "$TARGET_DIR" commit -m "feat: Merge $clean_feature (AI-resolved conflicts)"
2504
+ audit_agent_action "git_commit" "Committed changes" "merge=$clean_feature,resolution=ai"
2505
+ log_info "Merged with AI conflict resolution: $clean_feature"
2458
2506
  else
2459
- # Merge has conflicts - try AI resolution
2460
- log_warn "Merge conflicts detected - attempting AI resolution"
2461
-
2462
- if resolve_conflicts_with_ai "$feature"; then
2463
- # AI resolved conflicts, commit the merge
2464
- git commit -m "feat: Merge $feature (AI-resolved conflicts)"
2465
- audit_agent_action "git_commit" "Committed changes" "merge=$feature,resolution=ai"
2466
- log_info "Merged with AI conflict resolution: $feature"
2467
- else
2468
- # AI resolution failed, abort merge
2469
- log_error "AI conflict resolution failed: $feature"
2470
- git merge --abort 2>/dev/null || true
2471
- return 1
2472
- fi
2507
+ # AI resolution failed, abort merge
2508
+ log_error "AI conflict resolution failed: $clean_feature"
2509
+ git -C "$TARGET_DIR" merge --abort 2>/dev/null || true
2510
+ return 1
2473
2511
  fi
2512
+ fi
2474
2513
 
2475
- # Remove signal
2476
- rm -f ".loki/signals/MERGE_REQUESTED_$feature"
2514
+ # Remove signal
2515
+ rm -f "$TARGET_DIR/.loki/signals/MERGE_REQUESTED_$feature"
2477
2516
 
2478
- # Remove worktree
2479
- remove_worktree "feature-$feature"
2517
+ # Remove worktree
2518
+ remove_worktree "feature-$clean_feature"
2480
2519
 
2481
- # Delete branch
2482
- git branch -d "$branch" 2>/dev/null || true
2520
+ # Delete branch
2521
+ git -C "$TARGET_DIR" branch -d "$branch" 2>/dev/null || true
2483
2522
 
2484
- # Signal for docs update
2485
- touch ".loki/signals/DOCS_NEEDED"
2486
- )
2523
+ # Signal for docs update
2524
+ mkdir -p "$TARGET_DIR/.loki/signals"
2525
+ touch "$TARGET_DIR/.loki/signals/DOCS_NEEDED"
2487
2526
  }
2488
2527
 
2489
2528
  # Initialize parallel workflow streams
@@ -2525,7 +2564,9 @@ spawn_feature_stream() {
2525
2564
  local task_description="$2"
2526
2565
 
2527
2566
  # Check worktree limit
2528
- local worktree_count=$(git -C "$TARGET_DIR" worktree list 2>/dev/null | wc -l)
2567
+ # BUG-PAR-012: Worktree count subtracts 1 for main (git worktree list includes main)
2568
+ local worktree_count_raw=$(git -C "$TARGET_DIR" worktree list 2>/dev/null | wc -l)
2569
+ local worktree_count=$((worktree_count_raw > 0 ? worktree_count_raw - 1 : 0))
2529
2570
  if [ "$worktree_count" -ge "$MAX_WORKTREES" ]; then
2530
2571
  log_warn "Max worktrees reached ($MAX_WORKTREES). Queuing feature: $feature_name"
2531
2572
  return 1
@@ -2594,12 +2635,37 @@ run_parallel_orchestrator() {
2594
2635
 
2595
2636
  # Main orchestrator loop
2596
2637
  local running=true
2597
- trap 'running=false; cleanup_parallel_streams' INT TERM
2638
+ # BUG-PAR-004: Orchestrator trap handles SIGTERM properly (cleanup + restore global trap + exit)
2639
+ trap 'running=false; cleanup_parallel_streams; trap cleanup INT TERM; exit 0' TERM
2640
+ trap 'running=false; cleanup_parallel_streams' INT
2598
2641
 
2599
2642
  while $running; do
2600
2643
  # Check for merge requests
2601
2644
  check_merge_queue
2602
2645
 
2646
+ # BUG-PAR-014: Retry queued spawns when sessions free up
2647
+ local active_count=0
2648
+ for _qpid in "${WORKTREE_PIDS[@]}"; do
2649
+ if kill -0 "$_qpid" 2>/dev/null; then
2650
+ ((active_count++))
2651
+ fi
2652
+ done
2653
+ if [ "$active_count" -lt "$MAX_PARALLEL_SESSIONS" ]; then
2654
+ for queued_signal in "${TARGET_DIR:-.}"/.loki/signals/SPAWN_QUEUED_*; do
2655
+ [ -f "$queued_signal" ] || continue
2656
+ local queued_stream
2657
+ queued_stream=$(basename "$queued_signal" | sed 's/SPAWN_QUEUED_//')
2658
+ local queued_task=""
2659
+ queued_task=$(python3 -c "import json; print(json.load(open('$queued_signal'))['task'])" 2>/dev/null) || \
2660
+ queued_task=$(jq -r '.task' "$queued_signal" 2>/dev/null) || true
2661
+ if [ -n "$queued_task" ] && [ -n "${WORKTREE_PATHS[$queued_stream]:-}" ]; then
2662
+ rm -f "$queued_signal"
2663
+ spawn_worktree_session "$queued_stream" "$queued_task" && \
2664
+ log_info "Retried queued spawn: $queued_stream"
2665
+ fi
2666
+ done
2667
+ fi
2668
+
2603
2669
  # Check session health
2604
2670
  for stream in "${!WORKTREE_PIDS[@]}"; do
2605
2671
  local pid="${WORKTREE_PIDS[$stream]}"
@@ -2613,22 +2679,28 @@ run_parallel_orchestrator() {
2613
2679
  local state_file="$TARGET_DIR/.loki/state/parallel-streams.json"
2614
2680
  mkdir -p "$(dirname "$state_file")"
2615
2681
 
2682
+ # BUG-PAR-007: Empty worktree map produces valid JSON
2683
+ local worktree_json=""
2684
+ if [ ${#WORKTREE_PATHS[@]} -gt 0 ]; then
2685
+ worktree_json=$(for stream in "${!WORKTREE_PATHS[@]}"; do
2686
+ local path="${WORKTREE_PATHS[$stream]}"
2687
+ local pid="null"
2688
+ if [ -n "${WORKTREE_PIDS[$stream]+x}" ]; then
2689
+ pid="${WORKTREE_PIDS[$stream]}"
2690
+ fi
2691
+ local status="stopped"
2692
+ if [ "$pid" != "null" ] && kill -0 "$pid" 2>/dev/null; then
2693
+ status="running"
2694
+ fi
2695
+ echo " \"$stream\": {\"path\": \"$path\", \"pid\": $pid, \"status\": \"$status\"},"
2696
+ done | sed '$ s/,$//')
2697
+ fi
2698
+
2616
2699
  cat > "$state_file" << EOF
2617
2700
  {
2618
2701
  "timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
2619
2702
  "worktrees": {
2620
- $(for stream in "${!WORKTREE_PATHS[@]}"; do
2621
- local path="${WORKTREE_PATHS[$stream]}"
2622
- local pid="null"
2623
- if [ -n "${WORKTREE_PIDS[$stream]+x}" ]; then
2624
- pid="${WORKTREE_PIDS[$stream]}"
2625
- fi
2626
- local status="stopped"
2627
- if [ "$pid" != "null" ] && kill -0 "$pid" 2>/dev/null; then
2628
- status="running"
2629
- fi
2630
- echo " \"$stream\": {\"path\": \"$path\", \"pid\": $pid, \"status\": \"$status\"},"
2631
- done | sed '$ s/,$//')
2703
+ ${worktree_json}
2632
2704
  },
2633
2705
  "active_sessions": ${#WORKTREE_PIDS[@]},
2634
2706
  "max_sessions": $MAX_PARALLEL_SESSIONS
@@ -3445,10 +3517,66 @@ track_iteration_start() {
3445
3517
  "provider=${PROVIDER_NAME:-claude}"
3446
3518
  fi
3447
3519
 
3520
+ # Read next pending task for context (enrich iteration with PRD task details)
3521
+ local next_task_context=""
3522
+ if [[ -f ".loki/queue/pending.json" ]]; then
3523
+ next_task_context=$(python3 -c "
3524
+ import json
3525
+ try:
3526
+ with open('.loki/queue/pending.json') as f:
3527
+ tasks = json.load(f)
3528
+ if isinstance(tasks, dict):
3529
+ tasks = tasks.get('tasks', [])
3530
+ pending = [t for t in tasks if isinstance(t, dict) and t.get('status','pending') == 'pending']
3531
+ if pending:
3532
+ t = pending[0]
3533
+ print(json.dumps({
3534
+ 'current_task': t.get('title',''),
3535
+ 'description': t.get('description',''),
3536
+ 'acceptance_criteria': t.get('acceptance_criteria', []),
3537
+ 'user_story': t.get('user_story', ''),
3538
+ 'source': t.get('source', ''),
3539
+ 'project': t.get('project', '')
3540
+ }))
3541
+ except: pass
3542
+ " 2>/dev/null || true)
3543
+ fi
3544
+
3448
3545
  # Create task entry (escape PRD path for safe JSON embedding)
3449
3546
  local prd_escaped
3450
3547
  prd_escaped=$(printf '%s' "${prd:-Codebase Analysis}" | sed 's/\\/\\\\/g; s/"/\\"/g; s/\t/\\t/g')
3451
- local task_json=$(cat <<EOF
3548
+
3549
+ # Build enriched task JSON with pending task context
3550
+ local task_json
3551
+ if [[ -n "$next_task_context" ]]; then
3552
+ task_json=$(python3 -c "
3553
+ import json, sys
3554
+ ctx = json.loads('''$next_task_context''')
3555
+ task = {
3556
+ 'id': 'iteration-$iteration',
3557
+ 'type': 'iteration',
3558
+ 'title': ctx.get('current_task') or 'Iteration $iteration',
3559
+ 'description': ctx.get('description') or 'PRD: ${prd_escaped}',
3560
+ 'status': 'in_progress',
3561
+ 'priority': 'medium',
3562
+ 'startedAt': '$(date -u +%Y-%m-%dT%H:%M:%SZ)',
3563
+ 'provider': '${PROVIDER_NAME:-claude}'
3564
+ }
3565
+ if ctx.get('acceptance_criteria'):
3566
+ task['acceptance_criteria'] = ctx['acceptance_criteria']
3567
+ if ctx.get('user_story'):
3568
+ task['user_story'] = ctx['user_story']
3569
+ if ctx.get('source'):
3570
+ task['source'] = ctx['source']
3571
+ if ctx.get('project'):
3572
+ task['project'] = ctx['project']
3573
+ print(json.dumps(task, indent=2))
3574
+ " 2>/dev/null)
3575
+ fi
3576
+
3577
+ # Fallback to basic task JSON if enrichment failed
3578
+ if [[ -z "$task_json" ]]; then
3579
+ task_json=$(cat <<EOF
3452
3580
  {
3453
3581
  "id": "$task_id",
3454
3582
  "type": "iteration",
@@ -3461,6 +3589,7 @@ track_iteration_start() {
3461
3589
  }
3462
3590
  EOF
3463
3591
  )
3592
+ fi
3464
3593
 
3465
3594
  # Add to in-progress queue
3466
3595
  # BUG-XC-003: Use flock for atomic queue modification
@@ -7834,11 +7963,12 @@ load_state() {
7834
7963
 
7835
7964
  # Load tasks from queue files for prompt injection
7836
7965
  # Supports both array format [...] and object format {"tasks": [...]}
7966
+ # Enhanced in v6.63.0 to include rich task details (description, acceptance criteria, user stories)
7837
7967
  load_queue_tasks() {
7838
7968
  local task_injection=""
7839
7969
 
7840
- # Helper Python script to extract and format tasks
7841
- # Handles both formats, truncates long actions, normalizes newlines
7970
+ # Helper Python script to extract and format tasks with rich details
7971
+ # Handles both formats, includes description, acceptance criteria, and user stories
7842
7972
  local extract_script='
7843
7973
  import json
7844
7974
  import sys
@@ -7856,23 +7986,45 @@ def extract_tasks(filepath, prefix):
7856
7986
  if not isinstance(task, dict):
7857
7987
  continue
7858
7988
  task_id = task.get("id") or "unknown"
7859
- task_type = task.get("type") or "unknown"
7860
- payload = task.get("payload", {})
7861
-
7862
- # Extract action from payload
7863
- if isinstance(payload, dict):
7864
- action = payload.get("action") or payload.get("goal") or ""
7989
+ source = task.get("source", "")
7990
+
7991
+ # Rich PRD-sourced tasks (v6.63.0)
7992
+ if source == "prd" or task_id.startswith("prd-"):
7993
+ title = task.get("title", "Task")
7994
+ lines = [f"{prefix}[{i+1}] {task_id}: {title}"]
7995
+ desc = task.get("description", "")
7996
+ if desc and desc != title:
7997
+ # First 300 chars of description, normalized
7998
+ desc_short = desc.replace("\n", " ").replace("\r", "")[:300]
7999
+ if len(desc) > 300:
8000
+ desc_short += "..."
8001
+ lines.append(f" Description: {desc_short}")
8002
+ criteria = task.get("acceptance_criteria", [])
8003
+ if criteria:
8004
+ criteria_str = "; ".join(str(c) for c in criteria[:5])
8005
+ lines.append(f" Acceptance: {criteria_str}")
8006
+ story = task.get("user_story", "")
8007
+ if story:
8008
+ lines.append(f" User Story: {story}")
8009
+ results.append("\n".join(lines))
7865
8010
  else:
7866
- action = str(payload) if payload else ""
7867
-
7868
- # Normalize: remove newlines, truncate to 500 chars
7869
- action = str(action).replace("\n", " ").replace("\r", "")[:500]
7870
- if len(str(task.get("payload", {}).get("action", ""))) > 500:
7871
- action += "..."
7872
-
7873
- results.append(f"{prefix}[{i+1}] id={task_id} type={task_type}: {action}")
7874
-
7875
- return " ".join(results)
8011
+ # Legacy format: extract action from payload
8012
+ task_type = task.get("type") or "unknown"
8013
+ payload = task.get("payload", {})
8014
+ if isinstance(payload, dict):
8015
+ action = payload.get("action") or payload.get("goal") or ""
8016
+ else:
8017
+ action = str(payload) if payload else ""
8018
+ # Also check top-level title/description for non-payload tasks
8019
+ if not action:
8020
+ action = task.get("title", task.get("description", ""))
8021
+ # Normalize: remove newlines, truncate to 500 chars
8022
+ action = str(action).replace("\n", " ").replace("\r", "")[:500]
8023
+ if len(str(action)) > 500:
8024
+ action += "..."
8025
+ results.append(f"{prefix}[{i+1}] id={task_id} type={task_type}: {action}")
8026
+
8027
+ return "\n".join(results)
7876
8028
  except:
7877
8029
  return ""
7878
8030
 
@@ -7882,11 +8034,11 @@ pending = extract_tasks(".loki/queue/pending.json", "PENDING")
7882
8034
 
7883
8035
  output = []
7884
8036
  if in_progress:
7885
- output.append(f"IN-PROGRESS TASKS (EXECUTE THESE): {in_progress}")
8037
+ output.append(f"IN-PROGRESS TASKS (EXECUTE THESE):\n{in_progress}")
7886
8038
  if pending:
7887
- output.append(f"PENDING: {pending}")
8039
+ output.append(f"PENDING:\n{pending}")
7888
8040
 
7889
- print(" | ".join(output))
8041
+ print("\n---\n".join(output))
7890
8042
  '
7891
8043
 
7892
8044
  # First check in-progress tasks (highest priority)
@@ -8628,6 +8780,212 @@ MIROFISH_QUEUE_EOF
8628
8780
  log_info "MiroFish queue population complete"
8629
8781
  }
8630
8782
 
8783
+ # Populate task queue from plain PRD markdown (if no adapter populated tasks)
8784
+ # Extracts features/requirements from markdown structure into rich task entries
8785
+ populate_prd_queue() {
8786
+ local prd_file="${1:-}"
8787
+ if [[ -z "$prd_file" ]] || [[ ! -f "$prd_file" ]]; then
8788
+ return 0
8789
+ fi
8790
+ # Skip if already populated
8791
+ if [[ -f ".loki/queue/.prd-populated" ]]; then
8792
+ return 0
8793
+ fi
8794
+ # Skip if OpenSpec, BMAD, or MiroFish already populated tasks
8795
+ if [[ -f ".loki/queue/.openspec-populated" ]] || [[ -f ".loki/queue/.bmad-populated" ]] || [[ -f ".loki/queue/.mirofish-populated" ]]; then
8796
+ log_info "Task queue already populated by adapter, skipping PRD parsing"
8797
+ return 0
8798
+ fi
8799
+
8800
+ log_step "Parsing PRD into structured tasks..."
8801
+ mkdir -p ".loki/queue"
8802
+
8803
+ LOKI_PRD_FILE="$prd_file" python3 << 'PRD_PARSE_EOF'
8804
+ import json, re, os, sys
8805
+
8806
+ prd_path = os.environ.get("LOKI_PRD_FILE", "")
8807
+ if not prd_path or not os.path.isfile(prd_path):
8808
+ sys.exit(0)
8809
+
8810
+ with open(prd_path, "r", errors="replace") as f:
8811
+ content = f.read()
8812
+
8813
+ # Parse PRD structure
8814
+ sections = {}
8815
+ current_section = "Overview"
8816
+ current_content = []
8817
+
8818
+ for line in content.split("\n"):
8819
+ heading_match = re.match(r'^#{1,3}\s+(.+)', line)
8820
+ if heading_match:
8821
+ if current_content:
8822
+ sections[current_section] = "\n".join(current_content).strip()
8823
+ current_section = heading_match.group(1).strip()
8824
+ current_content = []
8825
+ else:
8826
+ current_content.append(line)
8827
+ if current_content:
8828
+ sections[current_section] = "\n".join(current_content).strip()
8829
+
8830
+ # Extract project name from first H1
8831
+ project_name = "Project"
8832
+ for line in content.split("\n"):
8833
+ m = re.match(r'^#\s+(.+)', line)
8834
+ if m:
8835
+ project_name = m.group(1).strip()
8836
+ break
8837
+
8838
+ # Find feature/requirement sections
8839
+ feature_keywords = [
8840
+ "features", "requirements", "key features", "core features",
8841
+ "functional requirements", "user stories", "deliverables",
8842
+ "scope", "functionality", "capabilities", "modules"
8843
+ ]
8844
+
8845
+ # Extract features from bullet points in feature sections
8846
+ features = []
8847
+ for section_name, section_content in sections.items():
8848
+ is_feature_section = any(kw in section_name.lower() for kw in feature_keywords)
8849
+ if is_feature_section:
8850
+ # Extract numbered items or bullet points
8851
+ for line in section_content.split("\n"):
8852
+ line = line.strip()
8853
+ # Match: "1. Feature name" or "- Feature name" or "* Feature name"
8854
+ m = re.match(r'^(?:\d+[\.\)]\s*|\-\s+|\*\s+)(.+)', line)
8855
+ if m:
8856
+ feature_text = m.group(1).strip()
8857
+ if len(feature_text) > 10: # Skip very short lines
8858
+ features.append({
8859
+ "title": feature_text,
8860
+ "section": section_name,
8861
+ })
8862
+
8863
+ # If no bullet features found, extract from ## headings that look like features
8864
+ if not features:
8865
+ skip_sections = {"overview", "introduction", "summary", "target audience",
8866
+ "tech stack", "technology", "deployment", "timeline",
8867
+ "out of scope", "non-functional", "appendix", "references",
8868
+ "problem statement", "value proposition", "background"}
8869
+ for section_name, section_content in sections.items():
8870
+ if section_name.lower() not in skip_sections and len(section_content) > 20:
8871
+ features.append({
8872
+ "title": section_name,
8873
+ "section": "Requirements",
8874
+ })
8875
+
8876
+ if not features:
8877
+ print("No features extracted from PRD", file=sys.stderr)
8878
+ sys.exit(0)
8879
+
8880
+ # Build acceptance criteria from section content
8881
+ def extract_acceptance_criteria(section_name, sections):
8882
+ """Extract testable criteria from section content."""
8883
+ criteria = []
8884
+ content = sections.get(section_name, "")
8885
+ for line in content.split("\n"):
8886
+ line = line.strip()
8887
+ if line.startswith(("- ", "* ", " - ", " * ")):
8888
+ text = re.sub(r'^[\-\*]\s+', '', line).strip()
8889
+ if len(text) > 5:
8890
+ criteria.append(text)
8891
+ # Also check for acceptance criteria section
8892
+ for key in ["acceptance criteria", "success criteria", "definition of done"]:
8893
+ for sname, scontent in sections.items():
8894
+ if key in sname.lower():
8895
+ for cline in scontent.split("\n"):
8896
+ cline = cline.strip()
8897
+ m = re.match(r'^(?:\d+[\.\)]\s*|\-\s+|\*\s+|\[.\]\s*)(.+)', cline)
8898
+ if m:
8899
+ criteria.append(m.group(1).strip())
8900
+ return criteria[:10] # Cap at 10
8901
+
8902
+ # Determine priority based on position (earlier = higher)
8903
+ def get_priority(index, total):
8904
+ if total <= 3:
8905
+ return "high"
8906
+ third = total / 3
8907
+ if index < third:
8908
+ return "high"
8909
+ elif index < 2 * third:
8910
+ return "medium"
8911
+ return "low"
8912
+
8913
+ # Build task queue entries
8914
+ pending_path = ".loki/queue/pending.json"
8915
+ existing = []
8916
+ if os.path.exists(pending_path):
8917
+ try:
8918
+ with open(pending_path, "r") as f:
8919
+ data = json.load(f)
8920
+ if isinstance(data, list):
8921
+ existing = data
8922
+ elif isinstance(data, dict) and "tasks" in data:
8923
+ existing = data["tasks"]
8924
+ except (json.JSONDecodeError, FileNotFoundError):
8925
+ existing = []
8926
+
8927
+ existing_ids = {t.get("id") for t in existing if isinstance(t, dict)}
8928
+ added = 0
8929
+
8930
+ for i, feat in enumerate(features):
8931
+ task_id = f"prd-{i+1:03d}"
8932
+ if task_id in existing_ids:
8933
+ continue
8934
+
8935
+ criteria = extract_acceptance_criteria(feat["section"], sections)
8936
+
8937
+ # Build a rich description
8938
+ section_content = sections.get(feat["section"], "")
8939
+ desc_parts = [feat["title"]]
8940
+ if section_content and len(section_content) > len(feat["title"]):
8941
+ # Include relevant context (first 500 chars of section)
8942
+ desc_parts.append(section_content[:500])
8943
+
8944
+ task = {
8945
+ "id": task_id,
8946
+ "title": feat["title"],
8947
+ "description": "\n".join(desc_parts),
8948
+ "priority": get_priority(i, len(features)),
8949
+ "status": "pending",
8950
+ "source": "prd",
8951
+ "project": project_name,
8952
+ }
8953
+
8954
+ if criteria:
8955
+ task["acceptance_criteria"] = criteria
8956
+
8957
+ # Add user story format
8958
+ # Try to extract target audience for user story
8959
+ audience = "a user"
8960
+ for key in ["target audience", "users", "user personas", "audience"]:
8961
+ for sname in sections:
8962
+ if key in sname.lower():
8963
+ # Extract first line
8964
+ first_line = sections[sname].split("\n")[0].strip()
8965
+ if first_line:
8966
+ audience = first_line[:100]
8967
+ break
8968
+
8969
+ task["user_story"] = f"As {audience}, I want to {feat['title'].lower().rstrip('.')}, so that the product delivers its core value."
8970
+
8971
+ existing.append(task)
8972
+ added += 1
8973
+
8974
+ with open(pending_path, "w") as f:
8975
+ json.dump(existing, f, indent=2)
8976
+
8977
+ print(f"Extracted {added} tasks from PRD ({len(features)} features found)", file=sys.stderr)
8978
+ PRD_PARSE_EOF
8979
+
8980
+ if [[ $? -ne 0 ]]; then
8981
+ log_warn "Failed to parse PRD into tasks"
8982
+ return 0
8983
+ fi
8984
+
8985
+ touch ".loki/queue/.prd-populated"
8986
+ log_info "PRD task parsing complete"
8987
+ }
8988
+
8631
8989
  #===============================================================================
8632
8990
  # Main Autonomous Loop
8633
8991
  #===============================================================================
@@ -8740,6 +9098,9 @@ run_autonomous() {
8740
9098
  # Populate task queue from MiroFish advisory (if present, runs once)
8741
9099
  populate_mirofish_queue
8742
9100
 
9101
+ # Populate task queue from PRD (if no adapters already populated, runs once)
9102
+ populate_prd_queue "$prd_path"
9103
+
8743
9104
  # Check max iterations before starting
8744
9105
  if check_max_iterations; then
8745
9106
  log_error "Max iterations already reached. Reset with: rm .loki/autonomy-state.json"