shipwright-cli 1.9.0 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/.claude/hooks/post-tool-use.sh +12 -5
  2. package/package.json +2 -2
  3. package/scripts/sw +9 -1
  4. package/scripts/sw-adversarial.sh +1 -1
  5. package/scripts/sw-architecture-enforcer.sh +1 -1
  6. package/scripts/sw-checkpoint.sh +79 -1
  7. package/scripts/sw-cleanup.sh +192 -7
  8. package/scripts/sw-connect.sh +1 -1
  9. package/scripts/sw-cost.sh +1 -1
  10. package/scripts/sw-daemon.sh +409 -37
  11. package/scripts/sw-dashboard.sh +1 -1
  12. package/scripts/sw-developer-simulation.sh +1 -1
  13. package/scripts/sw-docs.sh +1 -1
  14. package/scripts/sw-doctor.sh +1 -1
  15. package/scripts/sw-fix.sh +1 -1
  16. package/scripts/sw-fleet.sh +1 -1
  17. package/scripts/sw-github-checks.sh +1 -1
  18. package/scripts/sw-github-deploy.sh +1 -1
  19. package/scripts/sw-github-graphql.sh +1 -1
  20. package/scripts/sw-heartbeat.sh +1 -1
  21. package/scripts/sw-init.sh +1 -1
  22. package/scripts/sw-intelligence.sh +1 -1
  23. package/scripts/sw-jira.sh +1 -1
  24. package/scripts/sw-launchd.sh +4 -4
  25. package/scripts/sw-linear.sh +1 -1
  26. package/scripts/sw-logs.sh +1 -1
  27. package/scripts/sw-loop.sh +444 -49
  28. package/scripts/sw-memory.sh +198 -3
  29. package/scripts/sw-pipeline-composer.sh +8 -8
  30. package/scripts/sw-pipeline-vitals.sh +1096 -0
  31. package/scripts/sw-pipeline.sh +1692 -84
  32. package/scripts/sw-predictive.sh +1 -1
  33. package/scripts/sw-prep.sh +1 -1
  34. package/scripts/sw-ps.sh +4 -3
  35. package/scripts/sw-reaper.sh +5 -3
  36. package/scripts/sw-remote.sh +1 -1
  37. package/scripts/sw-self-optimize.sh +109 -8
  38. package/scripts/sw-session.sh +31 -9
  39. package/scripts/sw-setup.sh +1 -1
  40. package/scripts/sw-status.sh +192 -1
  41. package/scripts/sw-templates.sh +1 -1
  42. package/scripts/sw-tmux.sh +1 -1
  43. package/scripts/sw-tracker.sh +1 -1
  44. package/scripts/sw-upgrade.sh +1 -1
  45. package/scripts/sw-worktree.sh +1 -1
  46. package/templates/pipelines/autonomous.json +8 -1
  47. package/templates/pipelines/cost-aware.json +21 -0
  48. package/templates/pipelines/deployed.json +40 -6
  49. package/templates/pipelines/enterprise.json +16 -2
  50. package/templates/pipelines/fast.json +19 -0
  51. package/templates/pipelines/full.json +16 -2
  52. package/templates/pipelines/hotfix.json +19 -0
  53. package/templates/pipelines/standard.json +19 -0
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="1.9.0"
9
+ VERSION="1.10.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
  REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
12
 
@@ -218,7 +218,7 @@ memory_capture_failure() {
218
218
 
219
219
  (
220
220
  if command -v flock &>/dev/null; then
221
- flock -w 10 200 2>/dev/null || { warn "Memory lock timeout"; }
221
+ flock -w 10 200 2>/dev/null || { warn "Memory lock timeout"; return 1; }
222
222
  fi
223
223
  local tmp_file
224
224
  tmp_file=$(mktemp "${failures_file}.tmp.XXXXXX")
@@ -284,7 +284,7 @@ memory_record_fix_outcome() {
284
284
 
285
285
  (
286
286
  if command -v flock &>/dev/null; then
287
- flock -w 10 200 2>/dev/null || { warn "Memory lock timeout"; }
287
+ flock -w 10 200 2>/dev/null || { warn "Memory lock timeout"; return 1; }
288
288
  fi
289
289
  local tmp_file
290
290
  tmp_file=$(mktemp "${failures_file}.tmp.XXXXXX")
@@ -312,6 +312,178 @@ memory_record_fix_outcome() {
312
312
  "resolved=${fix_resolved}"
313
313
  }
314
314
 
315
+ # memory_track_fix <error_sig> <success_bool>
316
+ # Convenience wrapper for memory_record_fix_outcome
317
+ memory_track_fix() {
318
+ local error_sig="${1:-}"
319
+ local success="${2:-false}"
320
+ [[ -z "$error_sig" ]] && return 0
321
+ memory_record_fix_outcome "$error_sig" "true" "$success" 2>/dev/null || true
322
+ }
323
+
324
+ # memory_query_fix_for_error <error_pattern>
325
+ # Searches failure memory for known fixes matching the given error pattern.
326
+ # Returns JSON with the best fix (highest effectiveness rate) or empty.
327
+ memory_query_fix_for_error() {
328
+ local error_pattern="$1"
329
+ [[ -z "$error_pattern" ]] && return 0
330
+
331
+ ensure_memory_dir
332
+ local mem_dir
333
+ mem_dir="$(repo_memory_dir)"
334
+ local failures_file="$mem_dir/failures.json"
335
+
336
+ [[ ! -f "$failures_file" ]] && return 0
337
+
338
+ # Search for matching failures with successful fixes
339
+ local matches
340
+ matches=$(jq -r --arg pat "$error_pattern" '
341
+ [.failures[]
342
+ | select(.pattern != null and .pattern != "")
343
+ | select(.pattern | test($pat; "i") // false)
344
+ | select(.fix != null and .fix != "")
345
+ | select((.fix_effectiveness_rate // 0) > 30)
346
+ | {fix, fix_effectiveness_rate, seen_count, category, stage, pattern}]
347
+ | sort_by(-.fix_effectiveness_rate)
348
+ | .[0] // null
349
+ ' "$failures_file" 2>/dev/null) || true
350
+
351
+ if [[ -n "$matches" && "$matches" != "null" ]]; then
352
+ echo "$matches"
353
+ fi
354
+ }
355
+
356
+ # memory_closed_loop_inject <error_sig>
357
+ # Combines error → memory → fix into injectable text for build retries.
358
+ # Returns a one-line summary suitable for goal augmentation.
359
+ memory_closed_loop_inject() {
360
+ local error_sig="$1"
361
+ [[ -z "$error_sig" ]] && return 0
362
+
363
+ local fix_json
364
+ fix_json=$(memory_query_fix_for_error "$error_sig") || true
365
+ [[ -z "$fix_json" || "$fix_json" == "null" ]] && return 0
366
+
367
+ local fix_text success_rate category
368
+ fix_text=$(echo "$fix_json" | jq -r '.fix // ""')
369
+ success_rate=$(echo "$fix_json" | jq -r '.fix_effectiveness_rate // 0')
370
+ category=$(echo "$fix_json" | jq -r '.category // "unknown"')
371
+
372
+ [[ -z "$fix_text" ]] && return 0
373
+
374
+ echo "[$category, ${success_rate}% success rate] $fix_text"
375
+ }
376
+
377
+ memory_capture_failure_from_log() {
378
+ local artifacts_dir="${1:-}"
379
+ local error_log="${artifacts_dir}/error-log.jsonl"
380
+ [[ ! -f "$error_log" ]] && return 0
381
+
382
+ ensure_memory_dir
383
+ local mem_dir
384
+ mem_dir="$(repo_memory_dir)"
385
+ local failures_file="$mem_dir/failures.json"
386
+
387
+ # Read last 50 entries
388
+ local entries
389
+ entries=$(tail -50 "$error_log" 2>/dev/null) || return 0
390
+ [[ -z "$entries" ]] && return 0
391
+
392
+ local captured=0
393
+ while IFS= read -r line; do
394
+ [[ -z "$line" ]] && continue
395
+
396
+ local err_type err_text
397
+ err_type=$(echo "$line" | jq -r '.type // "unknown"' 2>/dev/null) || continue
398
+ err_text=$(echo "$line" | jq -r '.error // ""' 2>/dev/null) || continue
399
+ [[ -z "$err_text" ]] && continue
400
+
401
+ # Deduplicate: skip if this exact pattern already exists in failures
402
+ local pattern_short
403
+ pattern_short=$(echo "$err_text" | head -1 | cut -c1-200)
404
+ local already_exists
405
+ already_exists=$(jq --arg pat "$pattern_short" \
406
+ '[.failures[] | select(.pattern == $pat)] | length' \
407
+ "$failures_file" 2>/dev/null || echo "0")
408
+ if [[ "${already_exists:-0}" -gt 0 ]]; then
409
+ continue
410
+ fi
411
+
412
+ # Feed into memory_capture_failure with the error type as stage
413
+ memory_capture_failure "$err_type" "$err_text" 2>/dev/null || true
414
+ captured=$((captured + 1))
415
+ done <<< "$entries"
416
+
417
+ if [[ "$captured" -gt 0 ]]; then
418
+ emit_event "memory.error_log_processed" "captured=$captured"
419
+ fi
420
+ }
421
+
422
+ # _memory_aggregate_global
423
+ # Promotes high-frequency failure patterns to global.json for cross-repo learning
424
+ _memory_aggregate_global() {
425
+ ensure_memory_dir
426
+ local mem_dir
427
+ mem_dir="$(repo_memory_dir)"
428
+ local failures_file="$mem_dir/failures.json"
429
+ [[ ! -f "$failures_file" ]] && return 0
430
+
431
+ local global_file="$GLOBAL_MEMORY"
432
+ [[ ! -f "$global_file" ]] && return 0
433
+
434
+ # Find patterns with seen_count >= 3
435
+ local frequent_patterns
436
+ frequent_patterns=$(jq -r '.failures[] | select(.seen_count >= 3) | .pattern' \
437
+ "$failures_file" 2>/dev/null) || return 0
438
+ [[ -z "$frequent_patterns" ]] && return 0
439
+
440
+ local promoted=0
441
+ while IFS= read -r pattern; do
442
+ [[ -z "$pattern" ]] && continue
443
+
444
+ # Check if already in global
445
+ local exists
446
+ exists=$(jq --arg p "$pattern" \
447
+ '[.common_patterns[] | select(.pattern == $p)] | length' \
448
+ "$global_file" 2>/dev/null || echo "0")
449
+ if [[ "${exists:-0}" -gt 0 ]]; then
450
+ continue
451
+ fi
452
+
453
+ # Add to global, cap at 100 entries
454
+ local tmp_global
455
+ tmp_global=$(mktemp "${global_file}.tmp.XXXXXX")
456
+ jq --arg p "$pattern" \
457
+ --arg ts "$(now_iso)" \
458
+ --arg cat "general" \
459
+ '.common_patterns += [{pattern: $p, promoted_at: $ts, category: $cat, source: "aggregate"}] |
460
+ .common_patterns = (.common_patterns | .[-100:])' \
461
+ "$global_file" > "$tmp_global" && mv "$tmp_global" "$global_file" || rm -f "$tmp_global"
462
+ promoted=$((promoted + 1))
463
+ done <<< "$frequent_patterns"
464
+
465
+ if [[ "$promoted" -gt 0 ]]; then
466
+ emit_event "memory.global_aggregated" "promoted=$promoted"
467
+ fi
468
+ }
469
+
470
+ # memory_finalize_pipeline <state_file> <artifacts_dir>
471
+ # Single call that closes multiple feedback loops at pipeline completion
472
+ memory_finalize_pipeline() {
473
+ local state_file="${1:-}"
474
+ local artifacts_dir="${2:-}"
475
+ [[ -z "$state_file" || ! -f "$state_file" ]] && return 0
476
+
477
+ # Step 1: Capture pipeline-level learnings
478
+ memory_capture_pipeline "$state_file" "$artifacts_dir" 2>/dev/null || true
479
+
480
+ # Step 2: Process error log into failures.json
481
+ memory_capture_failure_from_log "$artifacts_dir" 2>/dev/null || true
482
+
483
+ # Step 3: Aggregate high-frequency patterns to global memory
484
+ _memory_aggregate_global 2>/dev/null || true
485
+ }
486
+
315
487
  # memory_analyze_failure <log_file> <stage>
316
488
  # Uses Claude to analyze a pipeline failure and fill in root_cause/fix/category.
317
489
  memory_analyze_failure() {
@@ -793,6 +965,29 @@ memory_inject_context() {
793
965
  ;;
794
966
  esac
795
967
 
968
+ # ── Cross-repo memory injection (global learnings) ──
969
+ if [[ -f "$GLOBAL_MEMORY" ]]; then
970
+ local global_patterns
971
+ global_patterns=$(jq -r --arg stage "$stage_id" '
972
+ .common_patterns // [] | .[] |
973
+ select(.category == $stage or .category == "general" or .category == null) |
974
+ .summary // .description // empty
975
+ ' "$GLOBAL_MEMORY" 2>/dev/null | head -5 || true)
976
+
977
+ local cross_repo_learnings
978
+ cross_repo_learnings=$(jq -r '
979
+ .cross_repo_learnings // [] | .[-5:][] |
980
+ "- [\(.repo // "unknown")] \(.type // "learning"): bugs=\(.bugs // 0), warnings=\(.warnings // 0)"
981
+ ' "$GLOBAL_MEMORY" 2>/dev/null | head -5 || true)
982
+
983
+ if [[ -n "$global_patterns" || -n "$cross_repo_learnings" ]]; then
984
+ echo ""
985
+ echo "## Cross-Repo Learnings (Global)"
986
+ [[ -n "$global_patterns" ]] && echo "$global_patterns"
987
+ [[ -n "$cross_repo_learnings" ]] && echo "$cross_repo_learnings"
988
+ fi
989
+ fi
990
+
796
991
  echo ""
797
992
  emit_event "memory.inject" "stage=${stage_id}"
798
993
  }
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="1.9.0"
9
+ VERSION="1.10.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
  REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
12
 
@@ -114,7 +114,7 @@ composer_create_pipeline() {
114
114
  else
115
115
  repo_context="$ci_history"
116
116
  fi
117
- info "CI history: p90 duration=${p90_timeout}s — using for timeout tuning"
117
+ info "CI history: p90 duration=${p90_timeout}s — using for timeout tuning" >&2
118
118
  fi
119
119
 
120
120
  # Try intelligence-driven composition
@@ -122,7 +122,7 @@ composer_create_pipeline() {
122
122
  [[ -n "$issue_analysis" ]] && \
123
123
  type intelligence_compose_pipeline &>/dev/null; then
124
124
 
125
- info "Composing pipeline with intelligence engine..."
125
+ info "Composing pipeline with intelligence engine..." >&2
126
126
 
127
127
  local composed=""
128
128
  composed=$(intelligence_compose_pipeline "$issue_analysis" "$repo_context" "$budget_json" 2>/dev/null) || true
@@ -138,7 +138,7 @@ composer_create_pipeline() {
138
138
 
139
139
  local stage_count
140
140
  stage_count=$(echo "$composed" | jq '.stages | length')
141
- success "Composed pipeline: ${stage_count} stages"
141
+ success "Composed pipeline: ${stage_count} stages" >&2
142
142
 
143
143
  emit_event "composer.created" \
144
144
  "stages=${stage_count}" \
@@ -148,17 +148,17 @@ composer_create_pipeline() {
148
148
  echo "$output_file"
149
149
  return 0
150
150
  else
151
- warn "Intelligence pipeline failed validation, falling back to template"
151
+ warn "Intelligence pipeline failed validation, falling back to template" >&2
152
152
  fi
153
153
  else
154
- warn "Intelligence composition returned invalid JSON, falling back to template"
154
+ warn "Intelligence composition returned invalid JSON, falling back to template" >&2
155
155
  fi
156
156
  fi
157
157
 
158
158
  # Fallback: use static template
159
159
  local fallback_template="${TEMPLATES_DIR}/standard.json"
160
160
  if [[ -f "$fallback_template" ]]; then
161
- info "Using fallback template: standard"
161
+ info "Using fallback template: standard" >&2
162
162
  local tmp_file
163
163
  tmp_file=$(mktemp "${output_file}.XXXXXX")
164
164
  cp "$fallback_template" "$tmp_file"
@@ -173,7 +173,7 @@ composer_create_pipeline() {
173
173
  return 0
174
174
  fi
175
175
 
176
- error "No templates available for fallback"
176
+ error "No templates available for fallback" >&2
177
177
  return 1
178
178
  }
179
179