shipwright-cli 2.2.0 โ 2.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -16
- package/config/policy.schema.json +104 -29
- package/docs/AGI-PLATFORM-PLAN.md +11 -7
- package/docs/AGI-WHATS-NEXT.md +26 -20
- package/docs/README.md +2 -0
- package/package.json +1 -1
- package/scripts/check-version-consistency.sh +72 -0
- package/scripts/lib/daemon-adaptive.sh +610 -0
- package/scripts/lib/daemon-dispatch.sh +489 -0
- package/scripts/lib/daemon-failure.sh +387 -0
- package/scripts/lib/daemon-patrol.sh +1113 -0
- package/scripts/lib/daemon-poll.sh +1202 -0
- package/scripts/lib/daemon-state.sh +550 -0
- package/scripts/lib/daemon-triage.sh +490 -0
- package/scripts/lib/helpers.sh +81 -1
- package/scripts/lib/pipeline-detection.sh +278 -0
- package/scripts/lib/pipeline-github.sh +196 -0
- package/scripts/lib/pipeline-intelligence.sh +1706 -0
- package/scripts/lib/pipeline-quality-checks.sh +1054 -0
- package/scripts/lib/pipeline-quality.sh +11 -0
- package/scripts/lib/pipeline-stages.sh +2508 -0
- package/scripts/lib/pipeline-state.sh +529 -0
- package/scripts/sw +26 -4
- package/scripts/sw-activity.sh +1 -1
- package/scripts/sw-adaptive.sh +2 -2
- package/scripts/sw-adversarial.sh +1 -1
- package/scripts/sw-architecture-enforcer.sh +1 -1
- package/scripts/sw-auth.sh +1 -1
- package/scripts/sw-autonomous.sh +1 -1
- package/scripts/sw-changelog.sh +1 -1
- package/scripts/sw-checkpoint.sh +1 -1
- package/scripts/sw-ci.sh +1 -1
- package/scripts/sw-cleanup.sh +1 -1
- package/scripts/sw-code-review.sh +1 -1
- package/scripts/sw-connect.sh +1 -1
- package/scripts/sw-context.sh +1 -1
- package/scripts/sw-cost.sh +1 -1
- package/scripts/sw-daemon.sh +52 -4816
- package/scripts/sw-dashboard.sh +1 -1
- package/scripts/sw-db.sh +1 -1
- package/scripts/sw-decompose.sh +1 -1
- package/scripts/sw-deps.sh +1 -1
- package/scripts/sw-developer-simulation.sh +1 -1
- package/scripts/sw-discovery.sh +1 -1
- package/scripts/sw-doc-fleet.sh +1 -1
- package/scripts/sw-docs-agent.sh +1 -1
- package/scripts/sw-docs.sh +1 -1
- package/scripts/sw-doctor.sh +42 -1
- package/scripts/sw-dora.sh +1 -1
- package/scripts/sw-durable.sh +1 -1
- package/scripts/sw-e2e-orchestrator.sh +1 -1
- package/scripts/sw-eventbus.sh +1 -1
- package/scripts/sw-feedback.sh +1 -1
- package/scripts/sw-fix.sh +1 -1
- package/scripts/sw-fleet-discover.sh +1 -1
- package/scripts/sw-fleet-viz.sh +3 -3
- package/scripts/sw-fleet.sh +1 -1
- package/scripts/sw-github-app.sh +1 -1
- package/scripts/sw-github-checks.sh +1 -1
- package/scripts/sw-github-deploy.sh +1 -1
- package/scripts/sw-github-graphql.sh +1 -1
- package/scripts/sw-guild.sh +1 -1
- package/scripts/sw-heartbeat.sh +1 -1
- package/scripts/sw-hygiene.sh +1 -1
- package/scripts/sw-incident.sh +1 -1
- package/scripts/sw-init.sh +1 -1
- package/scripts/sw-instrument.sh +1 -1
- package/scripts/sw-intelligence.sh +1 -1
- package/scripts/sw-jira.sh +1 -1
- package/scripts/sw-launchd.sh +1 -1
- package/scripts/sw-linear.sh +1 -1
- package/scripts/sw-logs.sh +1 -1
- package/scripts/sw-loop.sh +1 -1
- package/scripts/sw-memory.sh +1 -1
- package/scripts/sw-mission-control.sh +1 -1
- package/scripts/sw-model-router.sh +1 -1
- package/scripts/sw-otel.sh +4 -4
- package/scripts/sw-oversight.sh +1 -1
- package/scripts/sw-pipeline-composer.sh +1 -1
- package/scripts/sw-pipeline-vitals.sh +1 -1
- package/scripts/sw-pipeline.sh +23 -56
- package/scripts/sw-pipeline.sh.mock +7 -0
- package/scripts/sw-pm.sh +1 -1
- package/scripts/sw-pr-lifecycle.sh +1 -1
- package/scripts/sw-predictive.sh +1 -1
- package/scripts/sw-prep.sh +1 -1
- package/scripts/sw-ps.sh +1 -1
- package/scripts/sw-public-dashboard.sh +1 -1
- package/scripts/sw-quality.sh +1 -1
- package/scripts/sw-reaper.sh +1 -1
- package/scripts/sw-recruit.sh +9 -1
- package/scripts/sw-regression.sh +1 -1
- package/scripts/sw-release-manager.sh +1 -1
- package/scripts/sw-release.sh +1 -1
- package/scripts/sw-remote.sh +1 -1
- package/scripts/sw-replay.sh +1 -1
- package/scripts/sw-retro.sh +1 -1
- package/scripts/sw-scale.sh +8 -5
- package/scripts/sw-security-audit.sh +1 -1
- package/scripts/sw-self-optimize.sh +158 -7
- package/scripts/sw-session.sh +1 -1
- package/scripts/sw-setup.sh +1 -1
- package/scripts/sw-standup.sh +3 -3
- package/scripts/sw-status.sh +1 -1
- package/scripts/sw-strategic.sh +1 -1
- package/scripts/sw-stream.sh +8 -2
- package/scripts/sw-swarm.sh +7 -10
- package/scripts/sw-team-stages.sh +1 -1
- package/scripts/sw-templates.sh +1 -1
- package/scripts/sw-testgen.sh +1 -1
- package/scripts/sw-tmux-pipeline.sh +1 -1
- package/scripts/sw-tmux.sh +1 -1
- package/scripts/sw-trace.sh +1 -1
- package/scripts/sw-tracker.sh +24 -6
- package/scripts/sw-triage.sh +1 -1
- package/scripts/sw-upgrade.sh +1 -1
- package/scripts/sw-ux.sh +1 -1
- package/scripts/sw-webhook.sh +1 -1
- package/scripts/sw-widgets.sh +1 -1
- package/scripts/sw-worktree.sh +1 -1
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
# pipeline-detection.sh โ Auto-detection (test cmd, lang, reviewers, task type) for sw-pipeline.sh
|
|
2
|
+
# Source from sw-pipeline.sh. Requires SCRIPT_DIR, REPO_DIR.
|
|
3
|
+
[[ -n "${_PIPELINE_DETECTION_LOADED:-}" ]] && return 0
|
|
4
|
+
_PIPELINE_DETECTION_LOADED=1
|
|
5
|
+
|
|
6
|
+
detect_test_cmd() {
|
|
7
|
+
local root="$PROJECT_ROOT"
|
|
8
|
+
|
|
9
|
+
# Node.js: check package.json scripts
|
|
10
|
+
if [[ -f "$root/package.json" ]]; then
|
|
11
|
+
local has_test
|
|
12
|
+
has_test=$(jq -r '.scripts.test // ""' "$root/package.json" 2>/dev/null)
|
|
13
|
+
if [[ -n "$has_test" && "$has_test" != "null" && "$has_test" != *"no test specified"* ]]; then
|
|
14
|
+
# Detect package manager
|
|
15
|
+
if [[ -f "$root/pnpm-lock.yaml" ]]; then
|
|
16
|
+
echo "pnpm test"; return
|
|
17
|
+
elif [[ -f "$root/yarn.lock" ]]; then
|
|
18
|
+
echo "yarn test"; return
|
|
19
|
+
elif [[ -f "$root/bun.lockb" ]]; then
|
|
20
|
+
echo "bun test"; return
|
|
21
|
+
else
|
|
22
|
+
echo "npm test"; return
|
|
23
|
+
fi
|
|
24
|
+
fi
|
|
25
|
+
fi
|
|
26
|
+
|
|
27
|
+
# Python: check for pytest, unittest
|
|
28
|
+
if [[ -f "$root/pytest.ini" || -f "$root/pyproject.toml" || -f "$root/setup.py" ]]; then
|
|
29
|
+
if [[ -f "$root/pyproject.toml" ]] && grep -q "pytest" "$root/pyproject.toml" 2>/dev/null; then
|
|
30
|
+
echo "pytest"; return
|
|
31
|
+
elif [[ -d "$root/tests" ]]; then
|
|
32
|
+
echo "pytest"; return
|
|
33
|
+
fi
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
# Rust
|
|
37
|
+
if [[ -f "$root/Cargo.toml" ]]; then
|
|
38
|
+
echo "cargo test"; return
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
# Go
|
|
42
|
+
if [[ -f "$root/go.mod" ]]; then
|
|
43
|
+
echo "go test ./..."; return
|
|
44
|
+
fi
|
|
45
|
+
|
|
46
|
+
# Ruby
|
|
47
|
+
if [[ -f "$root/Gemfile" ]]; then
|
|
48
|
+
if grep -q "rspec" "$root/Gemfile" 2>/dev/null; then
|
|
49
|
+
echo "bundle exec rspec"; return
|
|
50
|
+
fi
|
|
51
|
+
echo "bundle exec rake test"; return
|
|
52
|
+
fi
|
|
53
|
+
|
|
54
|
+
# Java/Kotlin (Maven)
|
|
55
|
+
if [[ -f "$root/pom.xml" ]]; then
|
|
56
|
+
echo "mvn test"; return
|
|
57
|
+
fi
|
|
58
|
+
|
|
59
|
+
# Java/Kotlin (Gradle)
|
|
60
|
+
if [[ -f "$root/build.gradle" || -f "$root/build.gradle.kts" ]]; then
|
|
61
|
+
echo "./gradlew test"; return
|
|
62
|
+
fi
|
|
63
|
+
|
|
64
|
+
# Makefile
|
|
65
|
+
if [[ -f "$root/Makefile" ]] && grep -q "^test:" "$root/Makefile" 2>/dev/null; then
|
|
66
|
+
echo "make test"; return
|
|
67
|
+
fi
|
|
68
|
+
|
|
69
|
+
# Fallback
|
|
70
|
+
echo ""
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# Detect project language/framework
|
|
74
|
+
detect_project_lang() {
|
|
75
|
+
local root="$PROJECT_ROOT"
|
|
76
|
+
local detected=""
|
|
77
|
+
|
|
78
|
+
# Fast heuristic detection (grep-based)
|
|
79
|
+
if [[ -f "$root/package.json" ]]; then
|
|
80
|
+
if grep -q "typescript" "$root/package.json" 2>/dev/null; then
|
|
81
|
+
detected="typescript"
|
|
82
|
+
elif grep -q "\"next\"" "$root/package.json" 2>/dev/null; then
|
|
83
|
+
detected="nextjs"
|
|
84
|
+
elif grep -q "\"react\"" "$root/package.json" 2>/dev/null; then
|
|
85
|
+
detected="react"
|
|
86
|
+
else
|
|
87
|
+
detected="nodejs"
|
|
88
|
+
fi
|
|
89
|
+
elif [[ -f "$root/Cargo.toml" ]]; then
|
|
90
|
+
detected="rust"
|
|
91
|
+
elif [[ -f "$root/go.mod" ]]; then
|
|
92
|
+
detected="go"
|
|
93
|
+
elif [[ -f "$root/pyproject.toml" || -f "$root/setup.py" || -f "$root/requirements.txt" ]]; then
|
|
94
|
+
detected="python"
|
|
95
|
+
elif [[ -f "$root/Gemfile" ]]; then
|
|
96
|
+
detected="ruby"
|
|
97
|
+
elif [[ -f "$root/pom.xml" || -f "$root/build.gradle" ]]; then
|
|
98
|
+
detected="java"
|
|
99
|
+
else
|
|
100
|
+
detected="unknown"
|
|
101
|
+
fi
|
|
102
|
+
|
|
103
|
+
# Intelligence: holistic analysis for polyglot/monorepo detection
|
|
104
|
+
if [[ "$detected" == "unknown" ]] && type intelligence_search_memory &>/dev/null 2>&1 && command -v claude &>/dev/null; then
|
|
105
|
+
local config_files
|
|
106
|
+
config_files=$(ls "$root" 2>/dev/null | grep -E '\.(json|toml|yaml|yml|xml|gradle|lock|mod)$' | head -15)
|
|
107
|
+
if [[ -n "$config_files" ]]; then
|
|
108
|
+
local ai_lang
|
|
109
|
+
ai_lang=$(claude --print --output-format text -p "Based on these config files in a project root, what is the primary language/framework? Reply with ONE word (e.g., typescript, python, rust, go, java, ruby, nodejs):
|
|
110
|
+
|
|
111
|
+
Files: ${config_files}" --model haiku < /dev/null 2>/dev/null || true)
|
|
112
|
+
ai_lang=$(echo "$ai_lang" | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]')
|
|
113
|
+
case "$ai_lang" in
|
|
114
|
+
typescript|python|rust|go|java|ruby|nodejs|react|nextjs|kotlin|swift|elixir|scala)
|
|
115
|
+
detected="$ai_lang" ;;
|
|
116
|
+
esac
|
|
117
|
+
fi
|
|
118
|
+
fi
|
|
119
|
+
|
|
120
|
+
echo "$detected"
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
# Detect likely reviewers from CODEOWNERS or git log
|
|
124
|
+
detect_reviewers() {
|
|
125
|
+
local root="$PROJECT_ROOT"
|
|
126
|
+
|
|
127
|
+
# Check CODEOWNERS โ common paths first, then broader search
|
|
128
|
+
local codeowners=""
|
|
129
|
+
for f in "$root/.github/CODEOWNERS" "$root/CODEOWNERS" "$root/docs/CODEOWNERS"; do
|
|
130
|
+
if [[ -f "$f" ]]; then
|
|
131
|
+
codeowners="$f"
|
|
132
|
+
break
|
|
133
|
+
fi
|
|
134
|
+
done
|
|
135
|
+
# Broader search if not found at common locations
|
|
136
|
+
if [[ -z "$codeowners" ]]; then
|
|
137
|
+
codeowners=$(find "$root" -maxdepth 3 -name "CODEOWNERS" -type f 2>/dev/null | head -1 || true)
|
|
138
|
+
fi
|
|
139
|
+
|
|
140
|
+
if [[ -n "$codeowners" ]]; then
|
|
141
|
+
# Extract GitHub usernames from CODEOWNERS (lines like: * @user1 @user2)
|
|
142
|
+
local owners
|
|
143
|
+
owners=$(grep -oE '@[a-zA-Z0-9_-]+' "$codeowners" 2>/dev/null | sed 's/@//' | sort -u | head -3 | tr '\n' ',')
|
|
144
|
+
owners="${owners%,}" # trim trailing comma
|
|
145
|
+
if [[ -n "$owners" ]]; then
|
|
146
|
+
echo "$owners"
|
|
147
|
+
return
|
|
148
|
+
fi
|
|
149
|
+
fi
|
|
150
|
+
|
|
151
|
+
# Fallback: try to extract GitHub usernames from recent commit emails
|
|
152
|
+
# Format: user@users.noreply.github.com โ user, or noreply+user@... โ user
|
|
153
|
+
local current_user
|
|
154
|
+
current_user=$(gh api user --jq '.login' 2>/dev/null || true)
|
|
155
|
+
local contributors
|
|
156
|
+
contributors=$(git log --format='%aE' -100 2>/dev/null | \
|
|
157
|
+
grep -oE '[a-zA-Z0-9_-]+@users\.noreply\.github\.com' | \
|
|
158
|
+
sed 's/@users\.noreply\.github\.com//' | sed 's/^[0-9]*+//' | \
|
|
159
|
+
sort | uniq -c | sort -rn | \
|
|
160
|
+
awk '{print $NF}' | \
|
|
161
|
+
grep -v "^${current_user:-___}$" 2>/dev/null | \
|
|
162
|
+
head -2 | tr '\n' ',')
|
|
163
|
+
contributors="${contributors%,}"
|
|
164
|
+
echo "$contributors"
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
# Get branch prefix from task type โ checks git history for conventions first
|
|
168
|
+
branch_prefix_for_type() {
|
|
169
|
+
local task_type="$1"
|
|
170
|
+
|
|
171
|
+
# Analyze recent branches for naming conventions
|
|
172
|
+
local branch_prefixes
|
|
173
|
+
branch_prefixes=$(git branch -r 2>/dev/null | sed 's#origin/##' | grep -oE '^[a-z]+/' | sort | uniq -c | sort -rn | head -5 || true)
|
|
174
|
+
if [[ -n "$branch_prefixes" ]]; then
|
|
175
|
+
local total_branches dominant_prefix dominant_count
|
|
176
|
+
total_branches=$(echo "$branch_prefixes" | awk '{s+=$1} END {print s}' || echo "0")
|
|
177
|
+
dominant_prefix=$(echo "$branch_prefixes" | head -1 | awk '{print $2}' | tr -d '/' || true)
|
|
178
|
+
dominant_count=$(echo "$branch_prefixes" | head -1 | awk '{print $1}' || echo "0")
|
|
179
|
+
# If >80% of branches use a pattern, adopt it for the matching type
|
|
180
|
+
if [[ "$total_branches" -gt 5 ]] && [[ "$dominant_count" -gt 0 ]]; then
|
|
181
|
+
local pct=$(( (dominant_count * 100) / total_branches ))
|
|
182
|
+
if [[ "$pct" -gt 80 && -n "$dominant_prefix" ]]; then
|
|
183
|
+
# Map task type to the repo's convention
|
|
184
|
+
local mapped=""
|
|
185
|
+
case "$task_type" in
|
|
186
|
+
bug) mapped=$(echo "$branch_prefixes" | awk '{print $2}' | tr -d '/' | grep -E '^(fix|bug|hotfix)$' | head -1 || true) ;;
|
|
187
|
+
feature) mapped=$(echo "$branch_prefixes" | awk '{print $2}' | tr -d '/' | grep -E '^(feat|feature)$' | head -1 || true) ;;
|
|
188
|
+
esac
|
|
189
|
+
if [[ -n "$mapped" ]]; then
|
|
190
|
+
echo "$mapped"
|
|
191
|
+
return
|
|
192
|
+
fi
|
|
193
|
+
fi
|
|
194
|
+
fi
|
|
195
|
+
fi
|
|
196
|
+
|
|
197
|
+
# Fallback: hardcoded mapping
|
|
198
|
+
case "$task_type" in
|
|
199
|
+
bug) echo "fix" ;;
|
|
200
|
+
refactor) echo "refactor" ;;
|
|
201
|
+
testing) echo "test" ;;
|
|
202
|
+
security) echo "security" ;;
|
|
203
|
+
docs) echo "docs" ;;
|
|
204
|
+
devops) echo "ci" ;;
|
|
205
|
+
migration) echo "migrate" ;;
|
|
206
|
+
architecture) echo "arch" ;;
|
|
207
|
+
*) echo "feat" ;;
|
|
208
|
+
esac
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
# โโโ State Management โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
212
|
+
|
|
213
|
+
PIPELINE_STATUS="pending"
|
|
214
|
+
CURRENT_STAGE=""
|
|
215
|
+
STARTED_AT=""
|
|
216
|
+
UPDATED_AT=""
|
|
217
|
+
STAGE_STATUSES=""
|
|
218
|
+
LOG_ENTRIES=""
|
|
219
|
+
|
|
220
|
+
detect_task_type() {
|
|
221
|
+
local goal="$1"
|
|
222
|
+
|
|
223
|
+
# Intelligence: Claude classification with confidence score
|
|
224
|
+
if type intelligence_search_memory &>/dev/null 2>&1 && command -v claude &>/dev/null; then
|
|
225
|
+
local ai_result
|
|
226
|
+
ai_result=$(claude --print --output-format text -p "Classify this task into exactly ONE category. Reply in format: CATEGORY|CONFIDENCE (0-100)
|
|
227
|
+
|
|
228
|
+
Categories: bug, refactor, testing, security, docs, devops, migration, architecture, feature
|
|
229
|
+
|
|
230
|
+
Task: ${goal}" --model haiku < /dev/null 2>/dev/null || true)
|
|
231
|
+
if [[ -n "$ai_result" ]]; then
|
|
232
|
+
local ai_type ai_conf
|
|
233
|
+
ai_type=$(echo "$ai_result" | head -1 | cut -d'|' -f1 | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]')
|
|
234
|
+
ai_conf=$(echo "$ai_result" | head -1 | cut -d'|' -f2 | grep -oE '[0-9]+' | head -1 || echo "0")
|
|
235
|
+
# Use AI classification if confidence >= 70
|
|
236
|
+
case "$ai_type" in
|
|
237
|
+
bug|refactor|testing|security|docs|devops|migration|architecture|feature)
|
|
238
|
+
if [[ "${ai_conf:-0}" -ge 70 ]] 2>/dev/null; then
|
|
239
|
+
echo "$ai_type"
|
|
240
|
+
return
|
|
241
|
+
fi
|
|
242
|
+
;;
|
|
243
|
+
esac
|
|
244
|
+
fi
|
|
245
|
+
fi
|
|
246
|
+
|
|
247
|
+
# Fallback: keyword matching
|
|
248
|
+
local lower
|
|
249
|
+
lower=$(echo "$goal" | tr '[:upper:]' '[:lower:]')
|
|
250
|
+
case "$lower" in
|
|
251
|
+
*fix*|*bug*|*broken*|*error*|*crash*) echo "bug" ;;
|
|
252
|
+
*refactor*|*clean*|*reorganize*|*extract*) echo "refactor" ;;
|
|
253
|
+
*test*|*coverage*|*spec*) echo "testing" ;;
|
|
254
|
+
*security*|*audit*|*vuln*|*cve*) echo "security" ;;
|
|
255
|
+
*doc*|*readme*|*guide*) echo "docs" ;;
|
|
256
|
+
*deploy*|*ci*|*pipeline*|*docker*|*infra*) echo "devops" ;;
|
|
257
|
+
*migrate*|*migration*|*schema*) echo "migration" ;;
|
|
258
|
+
*architect*|*design*|*rfc*|*adr*) echo "architecture" ;;
|
|
259
|
+
*) echo "feature" ;;
|
|
260
|
+
esac
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
template_for_type() {
|
|
264
|
+
case "$1" in
|
|
265
|
+
bug) echo "bug-fix" ;;
|
|
266
|
+
refactor) echo "refactor" ;;
|
|
267
|
+
testing) echo "testing" ;;
|
|
268
|
+
security) echo "security-audit" ;;
|
|
269
|
+
docs) echo "documentation" ;;
|
|
270
|
+
devops) echo "devops" ;;
|
|
271
|
+
migration) echo "migration" ;;
|
|
272
|
+
architecture) echo "architecture" ;;
|
|
273
|
+
*) echo "feature-dev" ;;
|
|
274
|
+
esac
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
# โโโ Stage Preview โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
278
|
+
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
# pipeline-github.sh โ GitHub API helpers for pipeline (for sw-pipeline.sh)
|
|
2
|
+
# Source from sw-pipeline.sh. Requires get_stage_status, get_stage_timing, get_stage_description, format_duration, now_iso from state/helpers.
|
|
3
|
+
[[ -n "${_PIPELINE_GITHUB_LOADED:-}" ]] && return 0
|
|
4
|
+
_PIPELINE_GITHUB_LOADED=1
|
|
5
|
+
|
|
6
|
+
gh_init() {
|
|
7
|
+
if [[ "$NO_GITHUB" == "true" ]]; then
|
|
8
|
+
GH_AVAILABLE=false
|
|
9
|
+
return
|
|
10
|
+
fi
|
|
11
|
+
|
|
12
|
+
if ! command -v gh &>/dev/null; then
|
|
13
|
+
GH_AVAILABLE=false
|
|
14
|
+
warn "gh CLI not found โ GitHub integration disabled"
|
|
15
|
+
return
|
|
16
|
+
fi
|
|
17
|
+
|
|
18
|
+
# Check if authenticated
|
|
19
|
+
if ! gh auth status &>/dev/null 2>&1; then
|
|
20
|
+
GH_AVAILABLE=false
|
|
21
|
+
warn "gh not authenticated โ GitHub integration disabled"
|
|
22
|
+
return
|
|
23
|
+
fi
|
|
24
|
+
|
|
25
|
+
# Detect repo owner/name from git remote
|
|
26
|
+
local remote_url
|
|
27
|
+
remote_url=$(git remote get-url origin 2>/dev/null || true)
|
|
28
|
+
if [[ -n "$remote_url" ]]; then
|
|
29
|
+
# Handle SSH: git@github.com:owner/repo.git
|
|
30
|
+
# Handle HTTPS: https://github.com/owner/repo.git
|
|
31
|
+
REPO_OWNER=$(echo "$remote_url" | sed -E 's#(.*github\.com[:/])([^/]+)/.*#\2#')
|
|
32
|
+
REPO_NAME=$(echo "$remote_url" | sed -E 's#.*/([^/]+)(\.git)?$#\1#' | sed 's/\.git$//')
|
|
33
|
+
fi
|
|
34
|
+
|
|
35
|
+
if [[ -n "$REPO_OWNER" && -n "$REPO_NAME" ]]; then
|
|
36
|
+
GH_AVAILABLE=true
|
|
37
|
+
info "GitHub: ${DIM}${REPO_OWNER}/${REPO_NAME}${RESET}"
|
|
38
|
+
else
|
|
39
|
+
GH_AVAILABLE=false
|
|
40
|
+
warn "Could not detect GitHub repo โ GitHub integration disabled"
|
|
41
|
+
fi
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
# Post or update a comment on a GitHub issue
|
|
45
|
+
# Usage: gh_comment_issue <issue_number> <body>
|
|
46
|
+
gh_comment_issue() {
|
|
47
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
48
|
+
local issue_num="$1" body="$2"
|
|
49
|
+
gh issue comment "$issue_num" --body "$body" 2>/dev/null || true
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Post a progress-tracking comment and save its ID for later updates
|
|
53
|
+
# Usage: gh_post_progress <issue_number> <body>
|
|
54
|
+
gh_post_progress() {
|
|
55
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
56
|
+
local issue_num="$1" body="$2"
|
|
57
|
+
local result
|
|
58
|
+
result=$(gh api "repos/${REPO_OWNER}/${REPO_NAME}/issues/${issue_num}/comments" \
|
|
59
|
+
-f body="$body" --jq '.id' 2>/dev/null) || true
|
|
60
|
+
if [[ -n "$result" && "$result" != "null" ]]; then
|
|
61
|
+
PROGRESS_COMMENT_ID="$result"
|
|
62
|
+
fi
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
# Update an existing progress comment by ID
|
|
66
|
+
# Usage: gh_update_progress <body>
|
|
67
|
+
gh_update_progress() {
|
|
68
|
+
[[ "$GH_AVAILABLE" != "true" || -z "$PROGRESS_COMMENT_ID" ]] && return 0
|
|
69
|
+
local body="$1"
|
|
70
|
+
gh api "repos/${REPO_OWNER}/${REPO_NAME}/issues/comments/${PROGRESS_COMMENT_ID}" \
|
|
71
|
+
-X PATCH -f body="$body" 2>/dev/null || true
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
# Add labels to an issue or PR
|
|
75
|
+
# Usage: gh_add_labels <issue_number> <label1,label2,...>
|
|
76
|
+
gh_add_labels() {
|
|
77
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
78
|
+
local issue_num="$1" labels="$2"
|
|
79
|
+
[[ -z "$labels" ]] && return 0
|
|
80
|
+
gh issue edit "$issue_num" --add-label "$labels" 2>/dev/null || true
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# Remove a label from an issue
|
|
84
|
+
# Usage: gh_remove_label <issue_number> <label>
|
|
85
|
+
gh_remove_label() {
|
|
86
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
87
|
+
local issue_num="$1" label="$2"
|
|
88
|
+
gh issue edit "$issue_num" --remove-label "$label" 2>/dev/null || true
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
# Self-assign an issue
|
|
92
|
+
# Usage: gh_assign_self <issue_number>
|
|
93
|
+
gh_assign_self() {
|
|
94
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
95
|
+
local issue_num="$1"
|
|
96
|
+
gh issue edit "$issue_num" --add-assignee "@me" 2>/dev/null || true
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
# Get full issue metadata as JSON
|
|
100
|
+
# Usage: gh_get_issue_meta <issue_number>
|
|
101
|
+
gh_get_issue_meta() {
|
|
102
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
103
|
+
local issue_num="$1"
|
|
104
|
+
gh issue view "$issue_num" --json title,body,labels,milestone,assignees,comments,number,state 2>/dev/null || true
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
# Build a progress table for GitHub comment
|
|
108
|
+
# Usage: gh_build_progress_body
|
|
109
|
+
gh_build_progress_body() {
|
|
110
|
+
local body="## ๐ค Pipeline Progress โ \`${PIPELINE_NAME}\`
|
|
111
|
+
|
|
112
|
+
**Delivering:** ${GOAL}
|
|
113
|
+
|
|
114
|
+
| Stage | Status | Duration | |
|
|
115
|
+
|-------|--------|----------|-|"
|
|
116
|
+
|
|
117
|
+
local stages
|
|
118
|
+
stages=$(jq -c '.stages[]' "$PIPELINE_CONFIG" 2>/dev/null)
|
|
119
|
+
while IFS= read -r -u 3 stage; do
|
|
120
|
+
local id enabled
|
|
121
|
+
id=$(echo "$stage" | jq -r '.id')
|
|
122
|
+
enabled=$(echo "$stage" | jq -r '.enabled')
|
|
123
|
+
|
|
124
|
+
if [[ "$enabled" != "true" ]]; then
|
|
125
|
+
body="${body}
|
|
126
|
+
| ${id} | โญ๏ธ skipped | โ | |"
|
|
127
|
+
continue
|
|
128
|
+
fi
|
|
129
|
+
|
|
130
|
+
local sstatus
|
|
131
|
+
sstatus=$(get_stage_status "$id")
|
|
132
|
+
local duration
|
|
133
|
+
duration=$(get_stage_timing "$id")
|
|
134
|
+
|
|
135
|
+
local icon detail_col
|
|
136
|
+
case "$sstatus" in
|
|
137
|
+
complete) icon="โ
"; detail_col="" ;;
|
|
138
|
+
running) icon="๐"; detail_col=$(get_stage_description "$id") ;;
|
|
139
|
+
failed) icon="โ"; detail_col="" ;;
|
|
140
|
+
*) icon="โฌ"; detail_col=$(get_stage_description "$id") ;;
|
|
141
|
+
esac
|
|
142
|
+
|
|
143
|
+
body="${body}
|
|
144
|
+
| ${id} | ${icon} ${sstatus:-pending} | ${duration:-โ} | ${detail_col} |"
|
|
145
|
+
done 3<<< "$stages"
|
|
146
|
+
|
|
147
|
+
body="${body}
|
|
148
|
+
|
|
149
|
+
**Branch:** \`${GIT_BRANCH}\`"
|
|
150
|
+
|
|
151
|
+
[[ -n "${GITHUB_ISSUE:-}" ]] && body="${body}
|
|
152
|
+
**Issue:** ${GITHUB_ISSUE}"
|
|
153
|
+
|
|
154
|
+
local total_dur=""
|
|
155
|
+
if [[ -n "$PIPELINE_START_EPOCH" ]]; then
|
|
156
|
+
total_dur=$(format_duration $(( $(now_epoch) - PIPELINE_START_EPOCH )))
|
|
157
|
+
body="${body}
|
|
158
|
+
**Elapsed:** ${total_dur}"
|
|
159
|
+
fi
|
|
160
|
+
|
|
161
|
+
# Artifacts section
|
|
162
|
+
local artifacts=""
|
|
163
|
+
[[ -f "$ARTIFACTS_DIR/plan.md" ]] && artifacts="${artifacts}[Plan](.claude/pipeline-artifacts/plan.md)"
|
|
164
|
+
[[ -f "$ARTIFACTS_DIR/design.md" ]] && { [[ -n "$artifacts" ]] && artifacts="${artifacts} ยท "; artifacts="${artifacts}[Design](.claude/pipeline-artifacts/design.md)"; }
|
|
165
|
+
[[ -n "${PR_NUMBER:-}" ]] && { [[ -n "$artifacts" ]] && artifacts="${artifacts} ยท "; artifacts="${artifacts}PR #${PR_NUMBER}"; }
|
|
166
|
+
[[ -n "$artifacts" ]] && body="${body}
|
|
167
|
+
|
|
168
|
+
๐ **Artifacts:** ${artifacts}"
|
|
169
|
+
|
|
170
|
+
body="${body}
|
|
171
|
+
|
|
172
|
+
---
|
|
173
|
+
_Updated: $(now_iso) ยท shipwright pipeline_"
|
|
174
|
+
echo "$body"
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
# Push a page to the GitHub wiki
|
|
178
|
+
# Usage: gh_wiki_page <title> <content>
|
|
179
|
+
gh_wiki_page() {
|
|
180
|
+
local title="$1" content="$2"
|
|
181
|
+
$GH_AVAILABLE || return 0
|
|
182
|
+
$NO_GITHUB && return 0
|
|
183
|
+
local wiki_dir="$ARTIFACTS_DIR/wiki"
|
|
184
|
+
if [[ ! -d "$wiki_dir" ]]; then
|
|
185
|
+
git clone "https://github.com/${REPO_OWNER}/${REPO_NAME}.wiki.git" "$wiki_dir" 2>/dev/null || {
|
|
186
|
+
info "Wiki not initialized โ skipping wiki update"
|
|
187
|
+
return 0
|
|
188
|
+
}
|
|
189
|
+
fi
|
|
190
|
+
echo "$content" > "$wiki_dir/${title}.md"
|
|
191
|
+
( cd "$wiki_dir" && git add -A && git commit -m "Pipeline: update $title" && git push ) 2>/dev/null || true
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
# โโโ Auto-Detection โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
195
|
+
|
|
196
|
+
# Detect the test command from project files
|