@humanu/orchestra 0.5.28 → 0.5.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/install.js +27 -3
- package/package.json +1 -1
- package/resources/prebuilt/linux-x64/orchestra +0 -0
- package/resources/prebuilt/macos-arm64/orchestra +0 -0
- package/resources/prebuilt/macos-intel/orchestra +0 -0
- package/resources/scripts/gw-bridge.sh +50 -1024
- package/resources/scripts/gw.sh +25 -19
- package/resources/scripts/gwr.sh +41 -139
- package/resources/scripts/shell/bridge/ai.sh +175 -0
- package/resources/scripts/shell/bridge/copy_env.sh +84 -0
- package/resources/scripts/shell/bridge/tmux.sh +162 -0
- package/resources/scripts/shell/bridge/utils.sh +449 -0
- package/resources/scripts/shell/build/build_bridge.sh +17 -0
- package/resources/scripts/shell/build/build_dependencies.sh +23 -0
- package/resources/scripts/shell/build/build_install.sh +7 -0
- package/resources/scripts/shell/build/build_load.sh +10 -0
- package/resources/scripts/shell/build/build_logging.sh +6 -0
- package/resources/scripts/shell/build/build_rust.sh +18 -0
- package/resources/scripts/shell/build/build_usage.sh +27 -0
- package/resources/scripts/shell/env/copy_env_command.sh +27 -0
- package/resources/scripts/shell/env/copy_env_constants.sh +3 -0
- package/resources/scripts/shell/env/copy_env_core.sh +171 -0
- package/resources/scripts/shell/env/copy_env_debug_parse.sh +14 -0
- package/resources/scripts/shell/env/copy_env_load.sh +9 -0
- package/resources/scripts/shell/env/copy_env_locations.sh +34 -0
- package/resources/scripts/shell/env/copy_env_logging.sh +17 -0
- package/resources/scripts/shell/env/copy_env_state.sh +5 -0
- package/resources/scripts/shell/fix_hanging_colors.sh +9 -0
- package/resources/scripts/shell/fix_hanging_load.sh +9 -0
- package/resources/scripts/shell/fix_hanging_logging.sh +6 -0
- package/resources/scripts/shell/fix_hanging_script.sh +19 -0
- package/resources/scripts/shell/fix_hanging_steps.sh +81 -0
- package/resources/scripts/shell/git/bridge_check_branch.sh +53 -0
- package/resources/scripts/shell/git/bridge_create_worktree.sh +35 -0
- package/resources/scripts/shell/git/bridge_create_worktree_from_existing.sh +32 -0
- package/resources/scripts/shell/git/bridge_create_worktree_from_remote.sh +28 -0
- package/resources/scripts/shell/git/bridge_delete_branch_only.sh +25 -0
- package/resources/scripts/shell/git/bridge_delete_worktree.sh +26 -0
- package/resources/scripts/shell/git/bridge_delete_worktree_only.sh +28 -0
- package/resources/scripts/shell/git/bridge_enhanced_git_status.sh +106 -0
- package/resources/scripts/shell/git/bridge_git_status.sh +13 -0
- package/resources/scripts/shell/git/bridge_list_worktrees.sh +12 -0
- package/resources/scripts/shell/git/bridge_merge.sh +12 -0
- package/resources/scripts/shell/git/bridge_merge_from_primary.sh +112 -0
- package/resources/scripts/shell/git/bridge_merge_into_primary.sh +116 -0
- package/resources/scripts/shell/git/bridge_primary_branch.sh +15 -0
- package/resources/scripts/shell/git/bridge_rebase_from_primary.sh +72 -0
- package/resources/scripts/shell/git/bridge_repo.sh +12 -0
- package/resources/scripts/shell/git/bridge_repo_info.sh +23 -0
- package/resources/scripts/shell/git/bridge_squash_into_primary.sh +144 -0
- package/resources/scripts/shell/git/bridge_switch_worktree.sh +23 -0
- package/resources/scripts/shell/git/bridge_worktree.sh +17 -0
- package/resources/scripts/shell/git/checkout_worktree.sh +68 -0
- package/resources/scripts/shell/git/create_worktree.sh +23 -0
- package/resources/scripts/shell/git/delete_worktree.sh +27 -0
- package/resources/scripts/shell/git/gwr_worktree_title.sh +29 -0
- package/resources/scripts/shell/git/list_worktrees.sh +22 -0
- package/resources/scripts/shell/git/merge.sh +12 -0
- package/resources/scripts/shell/git/repo.sh +12 -0
- package/resources/scripts/shell/git/status.sh +21 -0
- package/resources/scripts/shell/git/worktree.sh +17 -0
- package/resources/scripts/shell/gw_debug.sh +3 -0
- package/resources/scripts/shell/gw_err.sh +3 -0
- package/resources/scripts/shell/gw_have_cmd.sh +3 -0
- package/resources/scripts/shell/gw_info.sh +3 -0
- package/resources/scripts/shell/gw_legacy_wrappers.sh +7 -0
- package/resources/scripts/shell/gw_load.sh +9 -0
- package/resources/scripts/shell/gwr_binary.sh +25 -0
- package/resources/scripts/shell/gwr_bridge.sh +10 -0
- package/resources/scripts/shell/gwr_colors.sh +7 -0
- package/resources/scripts/shell/gwr_git.sh +8 -0
- package/resources/scripts/shell/gwr_load.sh +12 -0
- package/resources/scripts/shell/gwr_logging.sh +6 -0
- package/resources/scripts/shell/gwr_terminal.sh +13 -0
- package/resources/scripts/shell/gwr_usage.sh +49 -0
- package/resources/scripts/shell/orchestra-command-hook.sh +109 -0
- package/resources/scripts/shell/orchestra-local.sh +87 -0
- package/resources/scripts/commands.sh +0 -208
|
@@ -0,0 +1,449 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# shellcheck shell=bash
|
|
4
|
+
|
|
5
|
+
# Initialise JSON backend (jq preferred, python3/node fallbacks)
|
|
6
|
+
bridge_init_json_backend() {
|
|
7
|
+
if [[ -n "${BRIDGE_JSON_BACKEND:-}" ]]; then
|
|
8
|
+
return 0
|
|
9
|
+
fi
|
|
10
|
+
|
|
11
|
+
if have_cmd jq; then
|
|
12
|
+
BRIDGE_JSON_BACKEND="jq"
|
|
13
|
+
elif command -v python3 >/dev/null 2>&1; then
|
|
14
|
+
BRIDGE_JSON_BACKEND="python"
|
|
15
|
+
elif command -v node >/dev/null 2>&1; then
|
|
16
|
+
BRIDGE_JSON_BACKEND="node"
|
|
17
|
+
else
|
|
18
|
+
err "jq (or python3/node fallback) required for JSON processing"
|
|
19
|
+
exit 1
|
|
20
|
+
fi
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
# Internal helper to emit a JSON object.
|
|
24
|
+
# Arguments use the format `key[:type]=value` where type defaults to `s` (string).
|
|
25
|
+
# Supported types: s (string), n (numeric), b (boolean), j (raw JSON), null (null literal).
|
|
26
|
+
json_object() {
|
|
27
|
+
bridge_init_json_backend
|
|
28
|
+
case "$BRIDGE_JSON_BACKEND" in
|
|
29
|
+
jq)
|
|
30
|
+
local -a jq_args=()
|
|
31
|
+
local -a jq_expr_parts=()
|
|
32
|
+
local idx=0
|
|
33
|
+
local spec key rest type value var_name
|
|
34
|
+
for spec in "$@"; do
|
|
35
|
+
[[ -z "$spec" ]] && continue
|
|
36
|
+
key="${spec%%:*}"
|
|
37
|
+
rest="${spec#*:}"
|
|
38
|
+
if [[ "$key" == "$rest" ]]; then
|
|
39
|
+
rest="s"
|
|
40
|
+
fi
|
|
41
|
+
if [[ "$rest" == *=* ]]; then
|
|
42
|
+
type="${rest%%=*}"
|
|
43
|
+
value="${rest#*=}"
|
|
44
|
+
else
|
|
45
|
+
type="s"
|
|
46
|
+
value="$rest"
|
|
47
|
+
fi
|
|
48
|
+
var_name="__jq_arg_$idx"
|
|
49
|
+
idx=$((idx + 1))
|
|
50
|
+
case "$type" in
|
|
51
|
+
s)
|
|
52
|
+
jq_args+=(--arg "$var_name" "$value")
|
|
53
|
+
jq_expr_parts+=("$key: $$var_name")
|
|
54
|
+
;;
|
|
55
|
+
n)
|
|
56
|
+
jq_args+=(--argjson "$var_name" "${value:-0}")
|
|
57
|
+
jq_expr_parts+=("$key: $$var_name")
|
|
58
|
+
;;
|
|
59
|
+
b)
|
|
60
|
+
jq_args+=(--argjson "$var_name" "$(printf '%s' "$value" | tr '[:upper:]' '[:lower:]')")
|
|
61
|
+
jq_expr_parts+=("$key: $$var_name")
|
|
62
|
+
;;
|
|
63
|
+
j)
|
|
64
|
+
jq_args+=(--argjson "$var_name" "${value:-null}")
|
|
65
|
+
jq_expr_parts+=("$key: $$var_name")
|
|
66
|
+
;;
|
|
67
|
+
null)
|
|
68
|
+
jq_expr_parts+=("$key: null")
|
|
69
|
+
;;
|
|
70
|
+
*)
|
|
71
|
+
jq_args+=(--arg "$var_name" "$value")
|
|
72
|
+
jq_expr_parts+=("$key: $$var_name")
|
|
73
|
+
;;
|
|
74
|
+
esac
|
|
75
|
+
done
|
|
76
|
+
local expr="{"
|
|
77
|
+
local first=1
|
|
78
|
+
local part
|
|
79
|
+
for part in "${jq_expr_parts[@]}"; do
|
|
80
|
+
if [[ $first -eq 1 ]]; then
|
|
81
|
+
expr+="$part"
|
|
82
|
+
first=0
|
|
83
|
+
else
|
|
84
|
+
expr+="; $part"
|
|
85
|
+
fi
|
|
86
|
+
done
|
|
87
|
+
expr+="}"
|
|
88
|
+
jq -n "${jq_args[@]}" "$expr"
|
|
89
|
+
;;
|
|
90
|
+
python)
|
|
91
|
+
python3 - "$@" <<'PY'
|
|
92
|
+
import json
|
|
93
|
+
import sys
|
|
94
|
+
|
|
95
|
+
def coerce(spec):
|
|
96
|
+
if not spec:
|
|
97
|
+
return None
|
|
98
|
+
if ':' in spec:
|
|
99
|
+
key, rest = spec.split(':', 1)
|
|
100
|
+
else:
|
|
101
|
+
key, rest = spec, 's'
|
|
102
|
+
if '=' in rest:
|
|
103
|
+
typ, value = rest.split('=', 1)
|
|
104
|
+
else:
|
|
105
|
+
typ, value = rest, ''
|
|
106
|
+
if typ == 's':
|
|
107
|
+
return key, value
|
|
108
|
+
if typ == 'n':
|
|
109
|
+
if value.strip() == '':
|
|
110
|
+
return key, 0
|
|
111
|
+
if any(c in value for c in '.eE'):
|
|
112
|
+
return key, float(value)
|
|
113
|
+
return key, int(value)
|
|
114
|
+
if typ == 'b':
|
|
115
|
+
return key, value.lower() in ('true', '1', 'yes', 'on')
|
|
116
|
+
if typ == 'j':
|
|
117
|
+
return key, json.loads(value) if value else None
|
|
118
|
+
if typ == 'null':
|
|
119
|
+
return key, None
|
|
120
|
+
return key, value
|
|
121
|
+
|
|
122
|
+
obj = {}
|
|
123
|
+
for spec in sys.argv[1:]:
|
|
124
|
+
item = coerce(spec)
|
|
125
|
+
if item:
|
|
126
|
+
key, value = item
|
|
127
|
+
obj[key] = value
|
|
128
|
+
|
|
129
|
+
print(json.dumps(obj))
|
|
130
|
+
PY
|
|
131
|
+
;;
|
|
132
|
+
node)
|
|
133
|
+
node - "$@" <<'NODE'
|
|
134
|
+
const specs = process.argv.slice(2);
|
|
135
|
+
const obj = {};
|
|
136
|
+
|
|
137
|
+
for (const spec of specs) {
|
|
138
|
+
if (!spec) continue;
|
|
139
|
+
let key, rest;
|
|
140
|
+
if (spec.includes(':')) {
|
|
141
|
+
[key, rest] = spec.split(':', 2);
|
|
142
|
+
} else {
|
|
143
|
+
key = spec;
|
|
144
|
+
rest = 's';
|
|
145
|
+
}
|
|
146
|
+
let type, value;
|
|
147
|
+
if (rest.includes('=')) {
|
|
148
|
+
[type, value] = rest.split('=', 2);
|
|
149
|
+
} else {
|
|
150
|
+
type = rest;
|
|
151
|
+
value = '';
|
|
152
|
+
}
|
|
153
|
+
switch (type) {
|
|
154
|
+
case 's':
|
|
155
|
+
obj[key] = value;
|
|
156
|
+
break;
|
|
157
|
+
case 'n':
|
|
158
|
+
if (!value.trim()) {
|
|
159
|
+
obj[key] = 0;
|
|
160
|
+
} else if (/\.|e|E/.test(value)) {
|
|
161
|
+
obj[key] = Number.parseFloat(value);
|
|
162
|
+
} else {
|
|
163
|
+
obj[key] = Number.parseInt(value, 10);
|
|
164
|
+
}
|
|
165
|
+
break;
|
|
166
|
+
case 'b':
|
|
167
|
+
obj[key] = ['true', '1', 'yes', 'on'].includes(value.toLowerCase());
|
|
168
|
+
break;
|
|
169
|
+
case 'j':
|
|
170
|
+
obj[key] = value ? JSON.parse(value) : null;
|
|
171
|
+
break;
|
|
172
|
+
case 'null':
|
|
173
|
+
obj[key] = null;
|
|
174
|
+
break;
|
|
175
|
+
default:
|
|
176
|
+
obj[key] = value;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
process.stdout.write(JSON.stringify(obj));
|
|
181
|
+
NODE
|
|
182
|
+
;;
|
|
183
|
+
esac
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
# Helper to convert newline-delimited text into a JSON array (dropping trailing blank)
|
|
187
|
+
json_array_from_lines() {
|
|
188
|
+
bridge_init_json_backend
|
|
189
|
+
local input="$1"
|
|
190
|
+
case "$BRIDGE_JSON_BACKEND" in
|
|
191
|
+
jq)
|
|
192
|
+
printf '%s' "$input" | jq -R -s 'split("\n")[:-1]'
|
|
193
|
+
;;
|
|
194
|
+
python)
|
|
195
|
+
printf '%s' "$input" | python3 <<'PY'
|
|
196
|
+
import json
|
|
197
|
+
import sys
|
|
198
|
+
text = sys.stdin.read()
|
|
199
|
+
lines = text.split('\n')
|
|
200
|
+
if lines and lines[-1] == '':
|
|
201
|
+
lines = lines[:-1]
|
|
202
|
+
print(json.dumps(lines))
|
|
203
|
+
PY
|
|
204
|
+
;;
|
|
205
|
+
node)
|
|
206
|
+
printf '%s' "$input" | node <<'NODE'
|
|
207
|
+
const fs = require('fs');
|
|
208
|
+
const text = fs.readFileSync(0, 'utf8');
|
|
209
|
+
let lines = text.split('\n');
|
|
210
|
+
if (lines.length && lines[lines.length - 1] === '') {
|
|
211
|
+
lines = lines.slice(0, -1);
|
|
212
|
+
}
|
|
213
|
+
process.stdout.write(JSON.stringify(lines));
|
|
214
|
+
NODE
|
|
215
|
+
;;
|
|
216
|
+
esac
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
# Helper to encode arbitrary text as a JSON string
|
|
220
|
+
json_stringify() {
|
|
221
|
+
bridge_init_json_backend
|
|
222
|
+
local input="$1"
|
|
223
|
+
case "$BRIDGE_JSON_BACKEND" in
|
|
224
|
+
jq)
|
|
225
|
+
printf '%s' "$input" | jq -R -s .
|
|
226
|
+
;;
|
|
227
|
+
python)
|
|
228
|
+
python3 - <<'PY' "$input"
|
|
229
|
+
import json
|
|
230
|
+
import sys
|
|
231
|
+
print(json.dumps(sys.argv[1]))
|
|
232
|
+
PY
|
|
233
|
+
;;
|
|
234
|
+
node)
|
|
235
|
+
node - <<'NODE' "$input"
|
|
236
|
+
const input = process.argv[2] ?? '';
|
|
237
|
+
process.stdout.write(JSON.stringify(input));
|
|
238
|
+
NODE
|
|
239
|
+
;;
|
|
240
|
+
esac
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
# Helper to convert newline-separated JSON objects into an array
|
|
244
|
+
json_array_from_json_lines() {
|
|
245
|
+
bridge_init_json_backend
|
|
246
|
+
local input="$1"
|
|
247
|
+
case "$BRIDGE_JSON_BACKEND" in
|
|
248
|
+
jq)
|
|
249
|
+
printf '%s' "$input" | jq -s .
|
|
250
|
+
;;
|
|
251
|
+
python)
|
|
252
|
+
printf '%s' "$input" | python3 <<'PY'
|
|
253
|
+
import json
|
|
254
|
+
import sys
|
|
255
|
+
items = []
|
|
256
|
+
for line in sys.stdin:
|
|
257
|
+
line = line.strip()
|
|
258
|
+
if not line:
|
|
259
|
+
continue
|
|
260
|
+
items.append(json.loads(line))
|
|
261
|
+
print(json.dumps(items))
|
|
262
|
+
PY
|
|
263
|
+
;;
|
|
264
|
+
node)
|
|
265
|
+
printf '%s' "$input" | node <<'NODE'
|
|
266
|
+
const fs = require('fs');
|
|
267
|
+
const lines = fs.readFileSync(0, 'utf8').split(/\r?\n/);
|
|
268
|
+
const items = [];
|
|
269
|
+
for (const line of lines) {
|
|
270
|
+
if (!line.trim()) continue;
|
|
271
|
+
items.push(JSON.parse(line));
|
|
272
|
+
}
|
|
273
|
+
process.stdout.write(JSON.stringify(items));
|
|
274
|
+
NODE
|
|
275
|
+
;;
|
|
276
|
+
esac
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
# Helper function to output JSON safely
|
|
280
|
+
json_output() {
|
|
281
|
+
if [[ $# -eq 0 ]]; then
|
|
282
|
+
echo "null"
|
|
283
|
+
else
|
|
284
|
+
local joined
|
|
285
|
+
joined="$(printf '%s\n' "$@")"
|
|
286
|
+
json_array_from_lines "$joined"
|
|
287
|
+
fi
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
# Helper function to output error
|
|
291
|
+
json_error() {
|
|
292
|
+
local message="$1"
|
|
293
|
+
json_object "error:s=$message"
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
# Extract a concise summary line from git output for error dialogs
|
|
297
|
+
git_error_summary() {
|
|
298
|
+
local text="${1-}"
|
|
299
|
+
local trimmed=""
|
|
300
|
+
local candidate=""
|
|
301
|
+
local fallback=""
|
|
302
|
+
while IFS= read -r line; do
|
|
303
|
+
line="${line%$'\r'}" # drop trailing carriage returns
|
|
304
|
+
trimmed="${line#${line%%[![:space:]]*}}"
|
|
305
|
+
if [[ -z "${trimmed//[[:space:]]/}" ]]; then
|
|
306
|
+
continue
|
|
307
|
+
fi
|
|
308
|
+
local is_hint=0
|
|
309
|
+
if [[ $trimmed == hint:* || $trimmed == Hint:* ]]; then
|
|
310
|
+
is_hint=1
|
|
311
|
+
fi
|
|
312
|
+
if [[ $is_hint -eq 0 ]]; then
|
|
313
|
+
fallback="$trimmed"
|
|
314
|
+
elif [[ -z "$fallback" ]]; then
|
|
315
|
+
fallback="$trimmed"
|
|
316
|
+
fi
|
|
317
|
+
if [[ $is_hint -eq 0 ]]; then
|
|
318
|
+
case "$trimmed" in
|
|
319
|
+
fatal:*|Fatal:*|error:*|Error:*|CONFLICT*|Conflict*|\
|
|
320
|
+
*merge\ failed*|*Merge\ failed*|\
|
|
321
|
+
*could\ not\ apply*)
|
|
322
|
+
candidate="$trimmed"
|
|
323
|
+
;;
|
|
324
|
+
esac
|
|
325
|
+
fi
|
|
326
|
+
done <<<"$text"
|
|
327
|
+
if [[ -n "$candidate" ]]; then
|
|
328
|
+
printf '%s' "$candidate"
|
|
329
|
+
else
|
|
330
|
+
printf '%s' "$fallback"
|
|
331
|
+
fi
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
# Helper function to output structured worktree data
|
|
335
|
+
json_worktrees() {
|
|
336
|
+
bridge_init_json_backend
|
|
337
|
+
case "$BRIDGE_JSON_BACKEND" in
|
|
338
|
+
jq)
|
|
339
|
+
git_list_worktrees | jq -R -s '
|
|
340
|
+
split("\n")[:-1] |
|
|
341
|
+
map(split("\t")) |
|
|
342
|
+
map({
|
|
343
|
+
path: .[0],
|
|
344
|
+
branch: .[1],
|
|
345
|
+
sha: .[2]
|
|
346
|
+
})'
|
|
347
|
+
;;
|
|
348
|
+
python)
|
|
349
|
+
git_list_worktrees | python3 <<'PY'
|
|
350
|
+
import json
|
|
351
|
+
import sys
|
|
352
|
+
items = []
|
|
353
|
+
for line in sys.stdin:
|
|
354
|
+
line = line.rstrip('\n')
|
|
355
|
+
if not line:
|
|
356
|
+
continue
|
|
357
|
+
parts = line.split('\t')
|
|
358
|
+
while len(parts) < 3:
|
|
359
|
+
parts.append('')
|
|
360
|
+
items.append({
|
|
361
|
+
"path": parts[0],
|
|
362
|
+
"branch": parts[1],
|
|
363
|
+
"sha": parts[2],
|
|
364
|
+
})
|
|
365
|
+
print(json.dumps(items))
|
|
366
|
+
PY
|
|
367
|
+
;;
|
|
368
|
+
node)
|
|
369
|
+
git_list_worktrees | node <<'NODE'
|
|
370
|
+
const fs = require('fs');
|
|
371
|
+
const input = fs.readFileSync(0, 'utf8');
|
|
372
|
+
const items = [];
|
|
373
|
+
for (const line of input.split(/\r?\n/)) {
|
|
374
|
+
if (!line) continue;
|
|
375
|
+
const parts = line.split('\t');
|
|
376
|
+
while (parts.length < 3) parts.push('');
|
|
377
|
+
items.push({ path: parts[0], branch: parts[1], sha: parts[2] });
|
|
378
|
+
}
|
|
379
|
+
process.stdout.write(JSON.stringify(items));
|
|
380
|
+
NODE
|
|
381
|
+
;;
|
|
382
|
+
esac
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
# Helper function to load Anthropic API key from config or .env file
|
|
386
|
+
load_anthropic_api_key() {
|
|
387
|
+
# First try to load from ~/.orchestra/config.json
|
|
388
|
+
local config_file="$HOME/.orchestra/config.json"
|
|
389
|
+
if [[ -f "$config_file" ]]; then
|
|
390
|
+
if have_cmd jq; then
|
|
391
|
+
local api_key
|
|
392
|
+
api_key="$(jq -r '.anthropic_api_key // empty' "$config_file" 2>/dev/null)"
|
|
393
|
+
if [[ -n "$api_key" ]] && [[ "$api_key" != "null" ]]; then
|
|
394
|
+
export ANTHROPIC_API_KEY="$api_key"
|
|
395
|
+
return 0
|
|
396
|
+
fi
|
|
397
|
+
else
|
|
398
|
+
bridge_init_json_backend
|
|
399
|
+
local api_key
|
|
400
|
+
case "$BRIDGE_JSON_BACKEND" in
|
|
401
|
+
python)
|
|
402
|
+
api_key="$(python3 - "$config_file" <<'PY'
|
|
403
|
+
import json
|
|
404
|
+
import sys
|
|
405
|
+
try:
|
|
406
|
+
data = json.load(open(sys.argv[1], 'r'))
|
|
407
|
+
except Exception:
|
|
408
|
+
data = {}
|
|
409
|
+
value = data.get('anthropic_api_key') or ''
|
|
410
|
+
print(value)
|
|
411
|
+
PY
|
|
412
|
+
)"
|
|
413
|
+
;;
|
|
414
|
+
node)
|
|
415
|
+
api_key="$(node - "$config_file" <<'NODE'
|
|
416
|
+
const fs = require('fs');
|
|
417
|
+
let value = '';
|
|
418
|
+
try {
|
|
419
|
+
const data = JSON.parse(fs.readFileSync(process.argv[2], 'utf8'));
|
|
420
|
+
value = data.anthropic_api_key || '';
|
|
421
|
+
} catch (err) {
|
|
422
|
+
value = '';
|
|
423
|
+
}
|
|
424
|
+
process.stdout.write(value);
|
|
425
|
+
NODE
|
|
426
|
+
)"
|
|
427
|
+
;;
|
|
428
|
+
esac
|
|
429
|
+
if [[ -n "${api_key:-}" ]]; then
|
|
430
|
+
export ANTHROPIC_API_KEY="$api_key"
|
|
431
|
+
return 0
|
|
432
|
+
fi
|
|
433
|
+
fi
|
|
434
|
+
fi
|
|
435
|
+
|
|
436
|
+
# Fallback to .env file in current directory
|
|
437
|
+
if [[ -f ".env" ]]; then
|
|
438
|
+
source .env
|
|
439
|
+
fi
|
|
440
|
+
|
|
441
|
+
# Fallback to .env file in repo root
|
|
442
|
+
if [[ -z "${ANTHROPIC_API_KEY-}" ]]; then
|
|
443
|
+
local repo_root
|
|
444
|
+
repo_root="$(git_repo_root 2>/dev/null)"
|
|
445
|
+
if [[ -n "$repo_root" ]] && [[ -f "$repo_root/.env" ]]; then
|
|
446
|
+
source "$repo_root/.env"
|
|
447
|
+
fi
|
|
448
|
+
fi
|
|
449
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# shellcheck shell=bash
|
|
2
|
+
|
|
3
|
+
build_test_bridge() {
|
|
4
|
+
build_info "Testing gw-bridge.sh..."
|
|
5
|
+
|
|
6
|
+
if [[ ! -x "$SCRIPT_DIR/gw-bridge.sh" ]]; then
|
|
7
|
+
build_error "gw-bridge.sh not found or not executable"
|
|
8
|
+
exit 1
|
|
9
|
+
fi
|
|
10
|
+
|
|
11
|
+
if "$SCRIPT_DIR/gw-bridge.sh" help >/dev/null 2>&1; then
|
|
12
|
+
build_success "gw-bridge.sh basic test passed"
|
|
13
|
+
else
|
|
14
|
+
build_error "gw-bridge.sh test failed"
|
|
15
|
+
exit 1
|
|
16
|
+
fi
|
|
17
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# shellcheck shell=bash
|
|
2
|
+
|
|
3
|
+
build_check_dependencies() {
|
|
4
|
+
build_info "Checking dependencies..."
|
|
5
|
+
|
|
6
|
+
if ! command -v cargo >/dev/null 2>&1; then
|
|
7
|
+
build_error "Rust/Cargo not found. Please install Rust: https://rustup.rs/"
|
|
8
|
+
exit 1
|
|
9
|
+
fi
|
|
10
|
+
|
|
11
|
+
if ! command -v jq >/dev/null 2>&1; then
|
|
12
|
+
build_warn "jq not found. Installing via package manager is recommended."
|
|
13
|
+
build_warn " macOS: brew install jq"
|
|
14
|
+
build_warn " Ubuntu/Debian: sudo apt install jq"
|
|
15
|
+
build_warn " RHEL/CentOS: sudo yum install jq"
|
|
16
|
+
fi
|
|
17
|
+
|
|
18
|
+
if ! command -v tty >/dev/null 2>&1; then
|
|
19
|
+
build_warn "tty command not found. Terminal title updates will be skipped."
|
|
20
|
+
fi
|
|
21
|
+
|
|
22
|
+
build_success "Dependencies check passed"
|
|
23
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
# shellcheck shell=bash
|
|
2
|
+
|
|
3
|
+
build_load() {
|
|
4
|
+
source "$SCRIPT_DIR/shell/build/build_logging.sh"
|
|
5
|
+
source "$SCRIPT_DIR/shell/build/build_dependencies.sh"
|
|
6
|
+
source "$SCRIPT_DIR/shell/build/build_rust.sh"
|
|
7
|
+
source "$SCRIPT_DIR/shell/build/build_bridge.sh"
|
|
8
|
+
source "$SCRIPT_DIR/shell/build/build_install.sh"
|
|
9
|
+
source "$SCRIPT_DIR/shell/build/build_usage.sh"
|
|
10
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# shellcheck shell=bash
|
|
2
|
+
|
|
3
|
+
build_info() { printf "${BLUE:-}[INFO]${NC:-} %s\n" "$*"; }
|
|
4
|
+
build_success() { printf "${GREEN:-}[SUCCESS]${NC:-} %s\n" "$*"; }
|
|
5
|
+
build_warn() { printf "${YELLOW:-}[WARN]${NC:-} %s\n" "$*"; }
|
|
6
|
+
build_error() { printf "${RED:-}[ERROR]${NC:-} %s\n" "$*"; }
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# shellcheck shell=bash
|
|
2
|
+
|
|
3
|
+
build_rust_tui() {
|
|
4
|
+
local build_type="${1:-release}"
|
|
5
|
+
|
|
6
|
+
build_info "Building gw-tui (${build_type})..."
|
|
7
|
+
|
|
8
|
+
( cd "$SCRIPT_DIR/gw-tui" && {
|
|
9
|
+
if [[ "$build_type" == "release" ]]; then
|
|
10
|
+
cargo build --release
|
|
11
|
+
build_success "Built gw-tui in release mode: ./gw-tui/target/release/gw-tui"
|
|
12
|
+
else
|
|
13
|
+
cargo build
|
|
14
|
+
build_success "Built gw-tui in debug mode: ./gw-tui/target/debug/gw-tui"
|
|
15
|
+
fi
|
|
16
|
+
}
|
|
17
|
+
)
|
|
18
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# shellcheck shell=bash
|
|
2
|
+
|
|
3
|
+
build_usage() {
|
|
4
|
+
cat << EOF
|
|
5
|
+
Usage: $0 [options]
|
|
6
|
+
|
|
7
|
+
Build script for gw.sh and gw-tui components.
|
|
8
|
+
|
|
9
|
+
Options:
|
|
10
|
+
-d, --debug Build in debug mode (default: release)
|
|
11
|
+
-i, --install Install gw-tui globally after building
|
|
12
|
+
-t, --test-only Only run tests, don't build
|
|
13
|
+
-h, --help Show this help
|
|
14
|
+
|
|
15
|
+
Examples:
|
|
16
|
+
$0 # Build release version
|
|
17
|
+
$0 --debug # Build debug version
|
|
18
|
+
$0 --install # Build and install globally
|
|
19
|
+
$0 --test-only # Just run tests
|
|
20
|
+
|
|
21
|
+
The built binary will be available at:
|
|
22
|
+
- Release: ./gw-tui/target/release/gw-tui
|
|
23
|
+
- Debug: ./gw-tui/target/debug/gw-tui
|
|
24
|
+
|
|
25
|
+
The gw.sh script will automatically detect and use the built binary.
|
|
26
|
+
EOF
|
|
27
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# shellcheck shell=bash
|
|
2
|
+
|
|
3
|
+
cmd_copy_env() {
|
|
4
|
+
local target_branch="${1-}"
|
|
5
|
+
if [[ -z "$target_branch" ]]; then
|
|
6
|
+
err "Target worktree name required"
|
|
7
|
+
echo "Usage: gw copy-env <worktreename>"
|
|
8
|
+
return 1
|
|
9
|
+
fi
|
|
10
|
+
|
|
11
|
+
git_require_repo || return 1
|
|
12
|
+
|
|
13
|
+
# Find absolute path of target worktree by branch name
|
|
14
|
+
local target_path; target_path="$(git_branch_to_worktree_path "$target_branch")"
|
|
15
|
+
if [[ -z "$target_path" ]]; then
|
|
16
|
+
err "Worktree for branch '$target_branch' not found"
|
|
17
|
+
return 1
|
|
18
|
+
fi
|
|
19
|
+
|
|
20
|
+
local root; root="$(git_repo_root)"
|
|
21
|
+
local shared_root; shared_root="$(git_shared_root 2>/dev/null)"
|
|
22
|
+
[[ -z "$shared_root" ]] && shared_root="$root"
|
|
23
|
+
local source_root="$root"
|
|
24
|
+
info "🔧 Copying env files from '$source_root' -> '$target_path'"
|
|
25
|
+
copy_env_files "$source_root" "$target_path" "$shared_root"
|
|
26
|
+
info "✅ Env files synced to '$target_branch'"
|
|
27
|
+
}
|