@orchagent/cli 0.3.91 → 0.3.92
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/diff-format.js +300 -0
- package/dist/commands/diff.js +2 -129
- package/dist/commands/health.js +90 -7
- package/dist/commands/index.js +4 -0
- package/dist/commands/init-wizard.js +129 -64
- package/dist/commands/init.js +71 -1
- package/dist/commands/publish.js +74 -66
- package/dist/commands/run.js +107 -29
- package/dist/commands/scaffold.js +213 -0
- package/dist/commands/schedule.js +40 -3
- package/dist/commands/templates/cron-job.js +259 -0
- package/dist/commands/update.js +46 -9
- package/dist/commands/validate.js +264 -0
- package/dist/lib/scaffold-orchestration.js +237 -0
- package/dist/lib/validate.js +478 -0
- package/package.json +1 -1
package/dist/commands/run.js
CHANGED
|
@@ -37,6 +37,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
37
37
|
};
|
|
38
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
39
|
exports.localCommandForEntrypoint = localCommandForEntrypoint;
|
|
40
|
+
exports.inferFileField = inferFileField;
|
|
40
41
|
exports.validateInputSchema = validateInputSchema;
|
|
41
42
|
exports.tryParseJsonObject = tryParseJsonObject;
|
|
42
43
|
exports.isKeyedFileArg = isKeyedFileArg;
|
|
@@ -156,6 +157,13 @@ function warnIfLocalPathReference(jsonBody) {
|
|
|
156
157
|
// If parsing fails, skip the warning
|
|
157
158
|
}
|
|
158
159
|
}
|
|
160
|
+
/**
|
|
161
|
+
* Infer which schema field should receive file content.
|
|
162
|
+
*
|
|
163
|
+
* Returns the detected field name, or null when the schema has properties
|
|
164
|
+
* but none of the heuristics can determine the right field.
|
|
165
|
+
* Returns 'content' (default) only when there is no schema to check against.
|
|
166
|
+
*/
|
|
159
167
|
function inferFileField(inputSchema) {
|
|
160
168
|
if (!inputSchema || typeof inputSchema !== 'object')
|
|
161
169
|
return 'content';
|
|
@@ -163,6 +171,7 @@ function inferFileField(inputSchema) {
|
|
|
163
171
|
if (!props || typeof props !== 'object')
|
|
164
172
|
return 'content';
|
|
165
173
|
const properties = props;
|
|
174
|
+
// 1. Check well-known content field names
|
|
166
175
|
for (const field of CONTENT_FIELD_NAMES) {
|
|
167
176
|
if (properties[field] && properties[field].type === 'string')
|
|
168
177
|
return field;
|
|
@@ -171,12 +180,41 @@ function inferFileField(inputSchema) {
|
|
|
171
180
|
const stringProps = Object.entries(properties)
|
|
172
181
|
.filter(([, v]) => v.type === 'string')
|
|
173
182
|
.map(([k]) => k);
|
|
183
|
+
// 2. Only one string property in the schema — use it
|
|
174
184
|
if (stringProps.length === 1)
|
|
175
185
|
return stringProps[0];
|
|
186
|
+
// 3. Only one required string property — use it
|
|
176
187
|
const requiredStrings = stringProps.filter(k => required.includes(k));
|
|
177
188
|
if (requiredStrings.length === 1)
|
|
178
189
|
return requiredStrings[0];
|
|
179
|
-
return
|
|
190
|
+
// 4. Schema exists but detection is ambiguous — return null so callers
|
|
191
|
+
// can surface a clear error instead of silently using the wrong field
|
|
192
|
+
return null;
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Resolve the file field name, throwing a clear error when auto-detection fails.
|
|
196
|
+
* Used at call sites where a file is being injected into a JSON payload.
|
|
197
|
+
*/
|
|
198
|
+
function resolveFileField(fileFieldOption, inputSchema) {
|
|
199
|
+
if (fileFieldOption)
|
|
200
|
+
return fileFieldOption;
|
|
201
|
+
const detected = inferFileField(inputSchema);
|
|
202
|
+
if (detected !== null)
|
|
203
|
+
return detected;
|
|
204
|
+
// Detection failed — build a helpful error message
|
|
205
|
+
const props = inputSchema?.properties;
|
|
206
|
+
const stringFields = props
|
|
207
|
+
? Object.entries(props).filter(([, v]) => v.type === 'string').map(([k]) => k)
|
|
208
|
+
: [];
|
|
209
|
+
const fieldList = stringFields.length > 0
|
|
210
|
+
? `String fields in schema: ${stringFields.map(f => `"${f}"`).join(', ')}`
|
|
211
|
+
: 'No string fields found in schema';
|
|
212
|
+
throw new errors_1.CliError(`Could not determine which input field to use for file content.\n\n` +
|
|
213
|
+
`${fieldList}\n\n` +
|
|
214
|
+
`Specify the field explicitly:\n` +
|
|
215
|
+
` orch run <agent> --file-field <field> input.json\n` +
|
|
216
|
+
` orch run <agent> --data @input.json\n` +
|
|
217
|
+
` orch run <agent> --file <field>=input.json`);
|
|
180
218
|
}
|
|
181
219
|
function applySchemaDefaults(body, schema) {
|
|
182
220
|
if (!schema)
|
|
@@ -2037,7 +2075,7 @@ async function executeCloud(agentRef, file, options) {
|
|
|
2037
2075
|
const resolvedBody = await resolveJsonBody(options.data);
|
|
2038
2076
|
const bodyObj = JSON.parse(resolvedBody);
|
|
2039
2077
|
if (cloudEngine !== 'code_runtime') {
|
|
2040
|
-
const fieldName = options.fileField
|
|
2078
|
+
const fieldName = resolveFileField(options.fileField, agentMeta.input_schema);
|
|
2041
2079
|
if (filePaths.length === 1) {
|
|
2042
2080
|
await validateFilePath(filePaths[0]);
|
|
2043
2081
|
bodyObj[fieldName] = await promises_1.default.readFile(filePaths[0], 'utf-8');
|
|
@@ -2093,7 +2131,7 @@ async function executeCloud(agentRef, file, options) {
|
|
|
2093
2131
|
headers['Content-Type'] = 'application/json';
|
|
2094
2132
|
}
|
|
2095
2133
|
else if ((filePaths.length > 0 || options.metadata) && cloudEngine !== 'code_runtime') {
|
|
2096
|
-
const fieldName = options.fileField
|
|
2134
|
+
const fieldName = resolveFileField(options.fileField, agentMeta.input_schema);
|
|
2097
2135
|
let bodyObj = {};
|
|
2098
2136
|
if (options.metadata) {
|
|
2099
2137
|
try {
|
|
@@ -2204,8 +2242,11 @@ async function executeCloud(agentRef, file, options) {
|
|
|
2204
2242
|
? { spinner: null, dispose: () => { } }
|
|
2205
2243
|
: (0, spinner_1.createElapsedSpinner)(`Running ${org}/${parsed.agent}@${parsed.version}...`);
|
|
2206
2244
|
spinner?.start();
|
|
2207
|
-
// Streamed sandbox runs can take longer; use 10 min timeout.
|
|
2208
|
-
const
|
|
2245
|
+
// Streamed sandbox runs can take longer; use 10 min timeout (or --wait-timeout).
|
|
2246
|
+
const waitTimeoutSec = options.waitTimeout ? parseInt(options.waitTimeout, 10) : undefined;
|
|
2247
|
+
const timeoutMs = wantStream
|
|
2248
|
+
? (waitTimeoutSec && waitTimeoutSec > 0 ? waitTimeoutSec * 1000 : 600000)
|
|
2249
|
+
: undefined;
|
|
2209
2250
|
let response;
|
|
2210
2251
|
try {
|
|
2211
2252
|
response = await (0, api_1.safeFetchWithRetryForCalls)(url, {
|
|
@@ -2375,38 +2416,74 @@ async function executeCloud(agentRef, file, options) {
|
|
|
2375
2416
|
process.stderr.write(chalk_1.default.gray(`\nStreaming ${org}/${parsed.agent}@${parsed.version}:\n`));
|
|
2376
2417
|
}
|
|
2377
2418
|
let progressErrorShown = false;
|
|
2378
|
-
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2419
|
+
let streamTimedOut = false;
|
|
2420
|
+
try {
|
|
2421
|
+
for await (const { event, data } of parseSSE(response.body)) {
|
|
2422
|
+
if (event === 'progress') {
|
|
2423
|
+
try {
|
|
2424
|
+
const parsed = JSON.parse(data);
|
|
2425
|
+
renderProgress(parsed, !!options.verbose);
|
|
2426
|
+
if (parsed.type === 'error') {
|
|
2427
|
+
progressErrorShown = true;
|
|
2428
|
+
}
|
|
2429
|
+
}
|
|
2430
|
+
catch {
|
|
2431
|
+
// ignore malformed progress events
|
|
2385
2432
|
}
|
|
2386
2433
|
}
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2434
|
+
else if (event === 'result') {
|
|
2435
|
+
try {
|
|
2436
|
+
finalPayload = JSON.parse(data);
|
|
2437
|
+
}
|
|
2438
|
+
catch {
|
|
2439
|
+
finalPayload = data;
|
|
2440
|
+
}
|
|
2394
2441
|
}
|
|
2395
|
-
|
|
2396
|
-
|
|
2442
|
+
else if (event === 'error') {
|
|
2443
|
+
hadError = true;
|
|
2444
|
+
try {
|
|
2445
|
+
finalPayload = JSON.parse(data);
|
|
2446
|
+
}
|
|
2447
|
+
catch {
|
|
2448
|
+
finalPayload = data;
|
|
2449
|
+
}
|
|
2397
2450
|
}
|
|
2398
2451
|
}
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
|
|
2402
|
-
|
|
2403
|
-
|
|
2404
|
-
|
|
2405
|
-
|
|
2406
|
-
|
|
2452
|
+
}
|
|
2453
|
+
catch (streamErr) {
|
|
2454
|
+
// BUG-6: Detect timeout/abort errors — the server-side job may still be running.
|
|
2455
|
+
const errName = streamErr instanceof DOMException ? streamErr.name
|
|
2456
|
+
: streamErr instanceof Error ? streamErr.name
|
|
2457
|
+
: '';
|
|
2458
|
+
if (errName === 'TimeoutError' || errName === 'AbortError') {
|
|
2459
|
+
streamTimedOut = true;
|
|
2460
|
+
}
|
|
2461
|
+
else {
|
|
2462
|
+
throw streamErr;
|
|
2407
2463
|
}
|
|
2408
2464
|
}
|
|
2409
2465
|
process.stderr.write('\n');
|
|
2466
|
+
// BUG-6: When the stream timed out, the run is likely still in progress on server.
|
|
2467
|
+
if (streamTimedOut) {
|
|
2468
|
+
const runId = response.headers?.get?.('x-run-id');
|
|
2469
|
+
process.stderr.write(chalk_1.default.yellow('\nRun still in progress on server — the CLI stopped waiting.\n') +
|
|
2470
|
+
(runId
|
|
2471
|
+
? chalk_1.default.yellow(`Check status with: orch logs ${runId}\n`)
|
|
2472
|
+
: chalk_1.default.yellow('Check recent runs with: orch runs\n')) +
|
|
2473
|
+
(options.waitTimeout
|
|
2474
|
+
? ''
|
|
2475
|
+
: chalk_1.default.gray('Tip: Use --wait-timeout <seconds> to wait longer.\n')));
|
|
2476
|
+
await (0, analytics_1.track)('cli_run', {
|
|
2477
|
+
agent: `${org}/${parsed.agent}@${parsed.version}`,
|
|
2478
|
+
input_type: hasInjection ? 'file_injection' : unkeyedFileArgs.length > 0 ? 'file' : options.data ? 'json' : 'empty',
|
|
2479
|
+
mode: 'cloud',
|
|
2480
|
+
streamed: true,
|
|
2481
|
+
timed_out: true,
|
|
2482
|
+
});
|
|
2483
|
+
const err = new errors_1.CliError('CLI wait timeout — run still in progress on server', errors_1.ExitCodes.TIMEOUT);
|
|
2484
|
+
err.displayed = true;
|
|
2485
|
+
throw err;
|
|
2486
|
+
}
|
|
2410
2487
|
await (0, analytics_1.track)('cli_run', {
|
|
2411
2488
|
agent: `${org}/${parsed.agent}@${parsed.version}`,
|
|
2412
2489
|
input_type: hasInjection ? 'file_injection' : unkeyedFileArgs.length > 0 ? 'file' : options.data ? 'json' : 'empty',
|
|
@@ -2794,6 +2871,7 @@ function registerRunCommand(program) {
|
|
|
2794
2871
|
.option('--skills-only <skills>', 'Use only these skills')
|
|
2795
2872
|
.option('--no-skills', 'Ignore default skills')
|
|
2796
2873
|
.option('--no-stream', 'Disable real-time streaming for stream-capable sandbox runs')
|
|
2874
|
+
.option('--wait-timeout <seconds>', 'Max seconds to wait for streaming result (default: 600)')
|
|
2797
2875
|
// Cloud-only options
|
|
2798
2876
|
.option('--endpoint <endpoint>', 'Override agent endpoint (cloud only)')
|
|
2799
2877
|
.option('--tenant <tenant>', 'Tenant identifier for multi-tenant callers (cloud only)')
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.registerScaffoldCommand = registerScaffoldCommand;
|
|
7
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const config_1 = require("../lib/config");
|
|
10
|
+
const api_1 = require("../lib/api");
|
|
11
|
+
const agent_ref_1 = require("../lib/agent-ref");
|
|
12
|
+
const errors_1 = require("../lib/errors");
|
|
13
|
+
const analytics_1 = require("../lib/analytics");
|
|
14
|
+
const output_1 = require("../lib/output");
|
|
15
|
+
const scaffold_orchestration_1 = require("../lib/scaffold-orchestration");
|
|
16
|
+
async function fileExists(filePath) {
|
|
17
|
+
try {
|
|
18
|
+
await promises_1.default.access(filePath);
|
|
19
|
+
return true;
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
return false;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function asCanonicalType(value) {
|
|
26
|
+
const normalized = String(value || 'agent').trim().toLowerCase();
|
|
27
|
+
if (normalized === 'agentic')
|
|
28
|
+
return 'agent';
|
|
29
|
+
if (normalized === 'code')
|
|
30
|
+
return 'tool';
|
|
31
|
+
return normalized;
|
|
32
|
+
}
|
|
33
|
+
function asObject(value) {
|
|
34
|
+
if (!value || typeof value !== 'object' || Array.isArray(value))
|
|
35
|
+
return undefined;
|
|
36
|
+
return value;
|
|
37
|
+
}
|
|
38
|
+
function formatDependencyNotFound(org, agent, version) {
|
|
39
|
+
return `Dependency agent not found: ${org}/${agent}@${version}`;
|
|
40
|
+
}
|
|
41
|
+
function throwDependencyResolutionError(err, org, agent, version) {
|
|
42
|
+
if (err instanceof api_1.ApiError) {
|
|
43
|
+
if (err.status === 404) {
|
|
44
|
+
throw new errors_1.CliError(formatDependencyNotFound(org, agent, version));
|
|
45
|
+
}
|
|
46
|
+
if (err.status === 401) {
|
|
47
|
+
throw new errors_1.CliError(`Authentication required to resolve ${org}/${agent}@${version}. Run \`orch login\` first.`);
|
|
48
|
+
}
|
|
49
|
+
throw new errors_1.CliError(`Failed to resolve ${org}/${agent}@${version}: ${err.message} (HTTP ${err.status})`);
|
|
50
|
+
}
|
|
51
|
+
throw err;
|
|
52
|
+
}
|
|
53
|
+
function assertDependencyIsCallable(agent, org, name, version) {
|
|
54
|
+
const canonicalType = asCanonicalType(agent.type);
|
|
55
|
+
if (canonicalType === 'skill') {
|
|
56
|
+
throw new errors_1.CliError(`Dependency ${org}/${name}@${version} is a skill. Skills are not callable agents.`);
|
|
57
|
+
}
|
|
58
|
+
if (agent.callable === false) {
|
|
59
|
+
throw new errors_1.CliError(`Dependency ${org}/${name}@${version} has callable: false and cannot be used for orchestration.`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
async function resolveDependencies(config, rawRefs) {
|
|
63
|
+
const workspaceByOrg = new Map();
|
|
64
|
+
const workspaceForOrg = async (org) => {
|
|
65
|
+
if (!workspaceByOrg.has(org)) {
|
|
66
|
+
workspaceByOrg.set(org, (0, api_1.resolveWorkspaceIdForOrg)(config, org).catch(() => undefined));
|
|
67
|
+
}
|
|
68
|
+
return workspaceByOrg.get(org);
|
|
69
|
+
};
|
|
70
|
+
const resolved = await Promise.all(rawRefs.map(async (rawRef) => {
|
|
71
|
+
const parsed = (0, agent_ref_1.parseAgentRef)(rawRef);
|
|
72
|
+
const org = parsed.org || config.defaultOrg;
|
|
73
|
+
if (!org) {
|
|
74
|
+
throw new errors_1.CliError(`Missing org in '${rawRef}'. Use org/agent[@version] format or set default org with \`orch config set default_org <org>\`.`);
|
|
75
|
+
}
|
|
76
|
+
const workspaceId = await workspaceForOrg(org);
|
|
77
|
+
let depAgent;
|
|
78
|
+
try {
|
|
79
|
+
depAgent = await (0, api_1.getAgentWithFallback)(config, org, parsed.agent, parsed.version, workspaceId);
|
|
80
|
+
}
|
|
81
|
+
catch (err) {
|
|
82
|
+
throwDependencyResolutionError(err, org, parsed.agent, parsed.version);
|
|
83
|
+
}
|
|
84
|
+
const pinnedVersion = depAgent.version || parsed.version;
|
|
85
|
+
assertDependencyIsCallable(depAgent, org, parsed.agent, pinnedVersion);
|
|
86
|
+
return {
|
|
87
|
+
org,
|
|
88
|
+
name: parsed.agent,
|
|
89
|
+
version: pinnedVersion,
|
|
90
|
+
description: typeof depAgent.description === 'string' ? depAgent.description : undefined,
|
|
91
|
+
inputSchema: asObject(depAgent.input_schema),
|
|
92
|
+
};
|
|
93
|
+
}));
|
|
94
|
+
const deduped = (0, scaffold_orchestration_1.dedupeOrchestrationDependencies)(resolved);
|
|
95
|
+
if (deduped.conflicts.length > 0) {
|
|
96
|
+
const details = deduped.conflicts
|
|
97
|
+
.map((conflict) => `- ${conflict.id}: ${conflict.versions.join(', ')}`)
|
|
98
|
+
.join('\n');
|
|
99
|
+
throw new errors_1.CliError(`Conflicting dependency versions provided:\n${details}\nUse a single version per dependency.`);
|
|
100
|
+
}
|
|
101
|
+
return {
|
|
102
|
+
dependencies: deduped.dependencies,
|
|
103
|
+
duplicates: deduped.duplicates,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
async function ensureOutputDirectory(outputDir) {
|
|
107
|
+
const exists = await fileExists(outputDir);
|
|
108
|
+
if (!exists) {
|
|
109
|
+
await promises_1.default.mkdir(outputDir, { recursive: true });
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
const stat = await promises_1.default.stat(outputDir);
|
|
113
|
+
if (!stat.isDirectory()) {
|
|
114
|
+
throw new errors_1.CliError(`Output path exists but is not a directory: ${outputDir}`);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
function registerScaffoldCommand(program) {
|
|
118
|
+
const scaffold = program
|
|
119
|
+
.command('scaffold')
|
|
120
|
+
.description('Scaffold projects from existing agents')
|
|
121
|
+
.action(() => { scaffold.help(); });
|
|
122
|
+
scaffold
|
|
123
|
+
.command('orchestration <agents...>')
|
|
124
|
+
.description('Generate a managed-loop orchestrator scaffold from published dependency agents')
|
|
125
|
+
.option('--profile <name>', 'Use API key from named profile')
|
|
126
|
+
.option('--name <name>', 'Orchestrator name (default: output directory name)')
|
|
127
|
+
.option('--output <dir>', 'Output directory (default: current directory)')
|
|
128
|
+
.option('--force', 'Overwrite existing scaffold files')
|
|
129
|
+
.option('--json', 'Print scaffold summary as JSON')
|
|
130
|
+
.action(async (agents, options) => {
|
|
131
|
+
const config = await (0, config_1.getResolvedConfig)({}, options.profile);
|
|
132
|
+
const outputDir = path_1.default.resolve(options.output || process.cwd());
|
|
133
|
+
await ensureOutputDirectory(outputDir);
|
|
134
|
+
const manifestName = (options.name || path_1.default.basename(outputDir)).trim();
|
|
135
|
+
const nameErrors = (0, scaffold_orchestration_1.validateScaffoldAgentName)(manifestName);
|
|
136
|
+
if (nameErrors.length > 0) {
|
|
137
|
+
throw new errors_1.CliError(nameErrors.join('\n'));
|
|
138
|
+
}
|
|
139
|
+
const manifestPath = path_1.default.join(outputDir, 'orchagent.json');
|
|
140
|
+
const promptPath = path_1.default.join(outputDir, 'prompt.md');
|
|
141
|
+
const schemaPath = path_1.default.join(outputDir, 'schema.json');
|
|
142
|
+
const targetFiles = [manifestPath, promptPath, schemaPath];
|
|
143
|
+
if (!options.force) {
|
|
144
|
+
const existing = (await Promise.all(targetFiles.map(async (filePath) => (await fileExists(filePath) ? filePath : null)))).filter((filePath) => Boolean(filePath));
|
|
145
|
+
if (existing.length > 0) {
|
|
146
|
+
const rel = existing.map((f) => path_1.default.relative(outputDir, f));
|
|
147
|
+
throw new errors_1.CliError(`Refusing to overwrite existing files in ${outputDir}: ${rel.join(', ')}. Re-run with --force to overwrite.`);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
const { dependencies, duplicates } = await resolveDependencies(config, agents);
|
|
151
|
+
if (dependencies.length === 0) {
|
|
152
|
+
throw new errors_1.CliError('No dependency agents were resolved.');
|
|
153
|
+
}
|
|
154
|
+
const customTools = (0, scaffold_orchestration_1.buildOrchestrationCustomTools)(dependencies);
|
|
155
|
+
const manifest = (0, scaffold_orchestration_1.buildOrchestrationManifest)({
|
|
156
|
+
name: manifestName,
|
|
157
|
+
dependencies,
|
|
158
|
+
customTools,
|
|
159
|
+
});
|
|
160
|
+
const prompt = (0, scaffold_orchestration_1.buildOrchestrationPrompt)({
|
|
161
|
+
name: manifestName,
|
|
162
|
+
dependencies,
|
|
163
|
+
customTools,
|
|
164
|
+
});
|
|
165
|
+
const schema = (0, scaffold_orchestration_1.buildOrchestrationSchema)();
|
|
166
|
+
await promises_1.default.writeFile(manifestPath, JSON.stringify(manifest, null, 2) + '\n');
|
|
167
|
+
await promises_1.default.writeFile(promptPath, prompt);
|
|
168
|
+
await promises_1.default.writeFile(schemaPath, JSON.stringify(schema, null, 2) + '\n');
|
|
169
|
+
await (0, analytics_1.track)('cli_scaffold_orchestration', {
|
|
170
|
+
dependency_count: dependencies.length,
|
|
171
|
+
duplicates_removed: duplicates.length,
|
|
172
|
+
output_custom: Boolean(options.output),
|
|
173
|
+
force: Boolean(options.force),
|
|
174
|
+
json: Boolean(options.json),
|
|
175
|
+
});
|
|
176
|
+
if (options.json) {
|
|
177
|
+
(0, output_1.printJson)({
|
|
178
|
+
name: manifestName,
|
|
179
|
+
output_dir: outputDir,
|
|
180
|
+
dependencies: dependencies.map((dep) => (0, scaffold_orchestration_1.dependencyRef)(dep)),
|
|
181
|
+
custom_tools: customTools.map((tool) => tool.name),
|
|
182
|
+
files: ['orchagent.json', 'prompt.md', 'schema.json'],
|
|
183
|
+
duplicates_removed: duplicates,
|
|
184
|
+
});
|
|
185
|
+
return;
|
|
186
|
+
}
|
|
187
|
+
process.stdout.write(`Scaffolded orchestrator "${manifestName}" in ${outputDir}\n`);
|
|
188
|
+
process.stdout.write('\nDependencies:\n');
|
|
189
|
+
for (const dep of dependencies) {
|
|
190
|
+
process.stdout.write(` - ${(0, scaffold_orchestration_1.dependencyRef)(dep)}\n`);
|
|
191
|
+
}
|
|
192
|
+
if (duplicates.length > 0) {
|
|
193
|
+
process.stdout.write('\nRemoved duplicate dependency refs:\n');
|
|
194
|
+
for (const dup of duplicates) {
|
|
195
|
+
process.stdout.write(` - ${dup}\n`);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
process.stdout.write('\nFiles written:\n');
|
|
199
|
+
process.stdout.write(' - orchagent.json\n');
|
|
200
|
+
process.stdout.write(' - prompt.md\n');
|
|
201
|
+
process.stdout.write(' - schema.json\n');
|
|
202
|
+
process.stdout.write('\nNext steps:\n');
|
|
203
|
+
if (path_1.default.resolve(outputDir) !== path_1.default.resolve(process.cwd())) {
|
|
204
|
+
process.stdout.write(` 1. cd ${outputDir}\n`);
|
|
205
|
+
process.stdout.write(' 2. Review prompt.md and schema.json\n');
|
|
206
|
+
process.stdout.write(' 3. Publish: orch publish\n');
|
|
207
|
+
}
|
|
208
|
+
else {
|
|
209
|
+
process.stdout.write(' 1. Review prompt.md and schema.json\n');
|
|
210
|
+
process.stdout.write(' 2. Publish: orch publish\n');
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
}
|
|
@@ -230,7 +230,7 @@ function registerScheduleCommand(program) {
|
|
|
230
230
|
}
|
|
231
231
|
else {
|
|
232
232
|
if (s.webhook_url) {
|
|
233
|
-
process.stdout.write(`\n ${chalk_1.default.bold('Webhook URL')} (save this —
|
|
233
|
+
process.stdout.write(`\n ${chalk_1.default.bold('Webhook URL')} (save this — retrieve later with ${chalk_1.default.cyan('orch schedule info --reveal')}):\n`);
|
|
234
234
|
process.stdout.write(` ${s.webhook_url}\n`);
|
|
235
235
|
}
|
|
236
236
|
}
|
|
@@ -444,8 +444,11 @@ function registerScheduleCommand(program) {
|
|
|
444
444
|
if (s.webhook_url) {
|
|
445
445
|
process.stdout.write(` Webhook: ${s.webhook_url}\n`);
|
|
446
446
|
}
|
|
447
|
-
else if (s.schedule_type === 'webhook' &&
|
|
448
|
-
process.stdout.write(` Webhook: ${chalk_1.default.
|
|
447
|
+
else if (s.schedule_type === 'webhook' && options.reveal) {
|
|
448
|
+
process.stdout.write(` Webhook: ${chalk_1.default.red('Failed to reveal — you may need owner permissions')}\n`);
|
|
449
|
+
}
|
|
450
|
+
else if (s.schedule_type === 'webhook') {
|
|
451
|
+
process.stdout.write(` Webhook: ${chalk_1.default.gray('(redacted — use --reveal to show, or regenerate-webhook if lost)')}\n`);
|
|
449
452
|
}
|
|
450
453
|
process.stdout.write(` Enabled: ${s.enabled ? chalk_1.default.green('yes') : chalk_1.default.red('no')}\n`);
|
|
451
454
|
process.stdout.write(` Auto-update: ${s.auto_update === false ? chalk_1.default.yellow('pinned') : chalk_1.default.green('yes')}\n`);
|
|
@@ -562,4 +565,38 @@ function registerScheduleCommand(program) {
|
|
|
562
565
|
process.stdout.write(chalk_1.default.red('\u2717') + ' Test alert delivery failed\n');
|
|
563
566
|
}
|
|
564
567
|
});
|
|
568
|
+
// orch schedule regenerate-webhook <schedule-id>
|
|
569
|
+
schedule
|
|
570
|
+
.command('regenerate-webhook <schedule-id>')
|
|
571
|
+
.description('Regenerate the webhook secret (invalidates old URL)')
|
|
572
|
+
.option('--workspace <slug>', 'Workspace slug (default: current workspace)')
|
|
573
|
+
.option('-y, --yes', 'Skip confirmation prompt')
|
|
574
|
+
.action(async (partialScheduleId, options) => {
|
|
575
|
+
const config = await (0, config_1.getResolvedConfig)();
|
|
576
|
+
if (!config.apiKey) {
|
|
577
|
+
throw new errors_1.CliError('Missing API key. Run `orch login` first.');
|
|
578
|
+
}
|
|
579
|
+
const workspaceId = await resolveWorkspaceId(config, options.workspace);
|
|
580
|
+
const scheduleId = await resolveScheduleId(config, partialScheduleId, workspaceId);
|
|
581
|
+
if (!options.yes) {
|
|
582
|
+
const rl = promises_1.default.createInterface({
|
|
583
|
+
input: process.stdin,
|
|
584
|
+
output: process.stdout,
|
|
585
|
+
});
|
|
586
|
+
const answer = await rl.question(chalk_1.default.yellow('Warning: This will invalidate the current webhook URL.\n') +
|
|
587
|
+
'Any integrations using the old URL will stop working.\n' +
|
|
588
|
+
'Regenerate webhook secret? (y/N): ');
|
|
589
|
+
rl.close();
|
|
590
|
+
if (answer.trim().toLowerCase() !== 'y' && answer.trim().toLowerCase() !== 'yes') {
|
|
591
|
+
process.stdout.write('Cancelled.\n');
|
|
592
|
+
return;
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
const result = await (0, api_1.request)(config, 'POST', `/workspaces/${workspaceId}/schedules/${scheduleId}/regenerate-webhook`);
|
|
596
|
+
process.stdout.write(chalk_1.default.green('\u2713') + ' Webhook secret regenerated\n\n');
|
|
597
|
+
process.stdout.write(` ${chalk_1.default.bold('New Webhook URL')} (save this — retrieve later with ${chalk_1.default.cyan('orch schedule info --reveal')}):\n`);
|
|
598
|
+
process.stdout.write(` ${result.webhook_url}\n\n`);
|
|
599
|
+
process.stdout.write(chalk_1.default.yellow(' The old webhook URL no longer works.\n'));
|
|
600
|
+
process.stdout.write('\n');
|
|
601
|
+
});
|
|
565
602
|
}
|