@zibby/cli 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,101 @@
1
+ import { spawn } from 'child_process';
2
+ import { resolve, join, dirname } from 'path';
3
+ import { fileURLToPath } from 'url';
4
+ import chalk from 'chalk';
5
+
6
+ const __filename = fileURLToPath(import.meta.url);
7
+ const __dirname = dirname(__filename);
8
+
9
+ async function runScript(scriptName, description, headed = false, cloudSync = false, video = 'on', viewport = null) {
10
+ const corePackagePath = resolve(__dirname, '../../../core');
11
+ const scriptPath = join(corePackagePath, 'scripts', scriptName);
12
+
13
+ const env = { ...process.env };
14
+ // Default is headed mode, so only set env var for headless
15
+ if (!headed) {
16
+ env.ZIBBY_HEADLESS = '1';
17
+ }
18
+ if (cloudSync) {
19
+ env.ZIBBY_CLOUD_SYNC = '1';
20
+ }
21
+ if (video) {
22
+ env.ZIBBY_VIDEO = video;
23
+ }
24
+ if (viewport) {
25
+ env.ZIBBY_VIEWPORT_WIDTH = String(viewport.width || 1280);
26
+ env.ZIBBY_VIEWPORT_HEIGHT = String(viewport.height || 720);
27
+ }
28
+
29
+ return new Promise((resolveFn, reject) => {
30
+ const script = spawn('bash', [scriptPath], {
31
+ cwd: process.cwd(),
32
+ stdio: 'inherit',
33
+ env,
34
+ });
35
+
36
+ script.on('close', (code) => {
37
+ if (code === 0) {
38
+ resolveFn({ success: true });
39
+ } else {
40
+ reject(new Error(`Script exited with code ${code}`));
41
+ }
42
+ });
43
+
44
+ script.on('error', (error) => {
45
+ reject(error);
46
+ });
47
+ });
48
+ }
49
+
50
+ export async function setupPlaywrightMcpCommand(options = {}) {
51
+ // Let the caller handle errors gracefully (e.g., init.js has a try/catch)
52
+ await runScript('setup-official-playwright-mcp.sh', 'Setting up official Playwright MCP', options.headed, options.cloudSync, options.video, options.viewport);
53
+ }
54
+
55
+ export async function setupCiCommand(_options) {
56
+ try {
57
+ await runScript('setup-ci.sh', 'Complete CI/CD setup');
58
+ console.log(chalk.green('\n✅ CI/CD setup complete!\n'));
59
+ } catch (error) {
60
+ console.log(chalk.red(`\n❌ Error: ${error.message}\n`));
61
+ process.exit(1);
62
+ }
63
+ }
64
+
65
+ export async function testWithVideoCommand(testFile, options) {
66
+ try {
67
+ const corePackagePath = resolve(__dirname, '../../../core');
68
+ const scriptPath = join(corePackagePath, 'scripts', 'test-with-video.sh');
69
+
70
+ const args = [scriptPath, testFile || 'tests/'];
71
+ if (options.headed) {
72
+ args.push('headed');
73
+ }
74
+
75
+ console.log(chalk.cyan('\n🎥 Running tests with video recording...\n'));
76
+ console.log(chalk.gray('━'.repeat(50)));
77
+
78
+ const script = spawn('bash', args, {
79
+ cwd: process.cwd(),
80
+ stdio: 'inherit',
81
+ });
82
+
83
+ script.on('close', (code) => {
84
+ if (code === 0) {
85
+ console.log(chalk.green('\n✅ Tests complete!\n'));
86
+ } else {
87
+ console.log(chalk.red(`\n❌ Tests failed with code ${code}\n`));
88
+ process.exit(code);
89
+ }
90
+ });
91
+
92
+ script.on('error', (error) => {
93
+ console.log(chalk.red(`\n❌ Error: ${error.message}\n`));
94
+ process.exit(1);
95
+ });
96
+ } catch (error) {
97
+ console.log(chalk.red(`\n❌ Error: ${error.message}\n`));
98
+ process.exit(1);
99
+ }
100
+ }
101
+
@@ -0,0 +1,163 @@
1
+ import { readFileSync, existsSync, statSync } from 'fs';
2
+ import { join, basename } from 'path';
3
+ import { glob } from 'glob';
4
+ import chalk from 'chalk';
5
+ import ora from 'ora';
6
+ import dotenv from 'dotenv';
7
+ import fetch from 'node-fetch';
8
+ import { getApiUrl, getCurrentEnvironment, getFrontendUrl } from '../config/environments.js';
9
+ import { getSessionToken, getUserInfo } from '../config/config.js';
10
+
11
+ dotenv.config();
12
+
13
+ function getGeneratedTestPath(specPath, config) {
14
+ const specsDir = config?.paths?.specs || 'test-specs';
15
+ const generatedDir = config?.paths?.generated || 'tests';
16
+
17
+ return specPath
18
+ .replace(specsDir, generatedDir)
19
+ .replace(/\.txt$/, '.spec.js');
20
+ }
21
+
22
+ export async function uploadCommand(specPath, options) {
23
+ const env = getCurrentEnvironment();
24
+
25
+ console.log(chalk.bold.cyan('\n📤 Zibby Cloud Upload\n'));
26
+ console.log(chalk.gray('━'.repeat(50)));
27
+ console.log(chalk.white(`Environment: ${chalk.cyan(env.name)}\n`));
28
+
29
+ const cwd = process.cwd();
30
+
31
+ const apiKey = options.project || process.env.ZIBBY_API_KEY;
32
+
33
+ if (!apiKey) {
34
+ console.log(chalk.red('❌ Error: Project API key required\n'));
35
+ console.log(chalk.white('Provide via:'));
36
+ console.log(chalk.gray(' --project <token>'));
37
+ console.log(chalk.gray(' or ZIBBY_API_KEY in .env\n'));
38
+ process.exit(1);
39
+ }
40
+
41
+ const sessionToken = process.env.ZIBBY_USER_TOKEN || getSessionToken();
42
+ const userInfo = getUserInfo();
43
+
44
+ if (!sessionToken) {
45
+ console.log(chalk.red('\n❌ Error: User authentication required\n'));
46
+ console.log(chalk.cyan('Option 1 (Local):'));
47
+ console.log(chalk.gray(' zibby login\n'));
48
+ console.log(chalk.cyan('Option 2 (CI/CD):'));
49
+ console.log(chalk.gray(' export ZIBBY_USER_TOKEN=zby_pat_xxxxx\n'));
50
+ process.exit(1);
51
+ }
52
+
53
+ if (userInfo) {
54
+ console.log(chalk.gray(`Authenticated: ${userInfo.email}\n`));
55
+ } else {
56
+ console.log(chalk.gray('Authenticated: Using Personal Access Token\n'));
57
+ }
58
+
59
+ const sessionFolders = glob.sync('test-results/sessions/*', { cwd })
60
+ .filter(f => {
61
+ const stat = statSync(join(cwd, f));
62
+ return stat.isDirectory() && !/session_/.test(f);
63
+ })
64
+ .sort((a, b) => {
65
+ const statA = statSync(join(cwd, a));
66
+ const statB = statSync(join(cwd, b));
67
+ return statB.mtimeMs - statA.mtimeMs;
68
+ });
69
+
70
+ if (sessionFolders.length === 0) {
71
+ console.log(chalk.red('❌ No test results found\n'));
72
+ console.log(chalk.white('Run a test first: zibby run <spec-path>\n'));
73
+ process.exit(1);
74
+ }
75
+
76
+ const latestSession = sessionFolders[0];
77
+ const executeLiveDir = join(cwd, latestSession, 'execute_live');
78
+
79
+ if (!existsSync(executeLiveDir)) {
80
+ console.log(chalk.red(`❌ No execution data found in ${latestSession}\n`));
81
+ process.exit(1);
82
+ }
83
+
84
+ const videoPath = join(executeLiveDir, 'recording.webm');
85
+ const eventsPath = join(executeLiveDir, 'events.json');
86
+ const titlePath = join(executeLiveDir, 'title.txt');
87
+ const generatedTestPath = getGeneratedTestPath(specPath, {});
88
+
89
+ console.log(chalk.white('Found artifacts:'));
90
+ console.log(chalk.gray(` Session: ${basename(latestSession)}`));
91
+ console.log(chalk.gray(` Video: ${existsSync(videoPath) ? '✓' : '✗'}`));
92
+ console.log(chalk.gray(` Events: ${existsSync(eventsPath) ? '✓' : '✗'}`));
93
+ console.log(chalk.gray(` Test: ${existsSync(generatedTestPath) ? '✓' : '✗'}\n`));
94
+
95
+ const uploadSpinner = ora(`Uploading to ${env.name}...`).start();
96
+
97
+ try {
98
+ const formData = new FormData();
99
+
100
+ formData.append('specPath', specPath);
101
+ formData.append('agent', options.agent || 'cursor');
102
+ formData.append('agentType', options.agent || 'cursor');
103
+
104
+ if (options.collection) {
105
+ formData.append('collectionIdOrName', options.collection);
106
+ }
107
+ if (options.folder) {
108
+ formData.append('folder', options.folder);
109
+ }
110
+
111
+ if (existsSync(videoPath)) {
112
+ const videoBlob = new Blob([readFileSync(videoPath)]);
113
+ formData.append('video', videoBlob, 'recording.webm');
114
+ }
115
+
116
+ if (existsSync(eventsPath)) {
117
+ const events = readFileSync(eventsPath, 'utf-8');
118
+ formData.append('events', events);
119
+ }
120
+
121
+ if (existsSync(generatedTestPath)) {
122
+ const testCode = readFileSync(generatedTestPath, 'utf-8');
123
+ formData.append('testCode', testCode);
124
+ }
125
+
126
+ if (existsSync(titlePath)) {
127
+ const title = readFileSync(titlePath, 'utf-8').trim();
128
+ formData.append('title', title);
129
+ }
130
+
131
+ const apiUrl = getApiUrl();
132
+ const response = await fetch(`${apiUrl}/executions/upload`, {
133
+ method: 'POST',
134
+ headers: {
135
+ 'Authorization': `Bearer ${apiKey}`,
136
+ 'X-User-Token': sessionToken,
137
+ },
138
+ body: formData,
139
+ });
140
+
141
+ if (!response.ok) {
142
+ const body = await response.text();
143
+ uploadSpinner.fail(`Upload failed: ${response.status}`);
144
+ console.log(chalk.red(`${body}\n`));
145
+ process.exit(1);
146
+ }
147
+
148
+ const result = await response.json();
149
+ uploadSpinner.succeed('Upload complete!');
150
+
151
+ console.log(chalk.green(`\n✓ Test uploaded successfully`));
152
+ console.log(chalk.gray(` Execution ID: ${result.executionId}`));
153
+
154
+ const frontendUrl = getFrontendUrl();
155
+ const resultsUrl = `${frontendUrl}/projects/${result.projectId}/runs/${result.executionId}`;
156
+ console.log(chalk.cyan(`\n View results: ${resultsUrl}\n`));
157
+
158
+ } catch (err) {
159
+ uploadSpinner.fail('Upload failed');
160
+ console.log(chalk.red(`\n${err.message}\n`));
161
+ process.exit(1);
162
+ }
163
+ }
@@ -0,0 +1,30 @@
1
+ import { organizeVideos } from '@zibby/core';
2
+ import { resolve } from 'path';
3
+ import chalk from 'chalk';
4
+ import ora from 'ora';
5
+
6
+ export async function videoCommand(_options) {
7
+ const spinner = ora('Organizing test videos...').start();
8
+
9
+ try {
10
+ const result = await organizeVideos({
11
+ projectRoot: resolve(process.cwd()),
12
+ verbose: false,
13
+ });
14
+
15
+ if (result.success) {
16
+ spinner.succeed(`Organized ${result.movedCount} video(s)`);
17
+ console.log(chalk.gray('\n📂 Videos are now next to their test files in tests/\n'));
18
+ } else {
19
+ spinner.fail('Failed to organize videos');
20
+ if (result.error) {
21
+ console.log(chalk.red(`Error: ${result.error}\n`));
22
+ }
23
+ }
24
+ } catch (error) {
25
+ spinner.fail('Failed to organize videos');
26
+ console.log(chalk.red(`\n❌ Error: ${error.message}\n`));
27
+ process.exit(1);
28
+ }
29
+ }
30
+
@@ -0,0 +1,369 @@
1
+ import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
2
+ import { resolve, join } from 'path';
3
+ import chalk from 'chalk';
4
+ import ora from 'ora';
5
+ import dotenv from 'dotenv';
6
+ import fetch from 'node-fetch';
7
+ import { getApiUrl, getCurrentEnvironment } from '../config/environments.js';
8
+ import { validateGraphConfig } from '@zibby/core/framework/graph-compiler.js';
9
+ import { generateWorkflowCode, generateNodeConfigsJson } from '@zibby/core/framework/code-generator.js';
10
+ import '@zibby/core/templates/register-nodes.js';
11
+
12
+ dotenv.config();
13
+
14
+ function resolveAuth(options) {
15
+ const apiKey = options.apiKey || process.env.ZIBBY_API_KEY;
16
+ if (!apiKey) {
17
+ console.log(chalk.red('\n ZIBBY_API_KEY not set'));
18
+ console.log(chalk.gray(' Add to .env: ZIBBY_API_KEY=zby_xxx\n'));
19
+ process.exit(1);
20
+ }
21
+
22
+ const projectId = options.project || process.env.ZIBBY_PROJECT_ID;
23
+ if (!projectId) {
24
+ console.log(chalk.red('\n --project or ZIBBY_PROJECT_ID is required'));
25
+ console.log(chalk.gray(' Example: zibby workflow download --project <id> --type analysis\n'));
26
+ process.exit(1);
27
+ }
28
+
29
+ return { apiKey, projectId };
30
+ }
31
+
32
+ const VALID_TYPES = ['analysis', 'implementation', 'run_test'];
33
+
34
+ function resolveWorkflowType(options) {
35
+ const type = options.type;
36
+ if (!type) {
37
+ console.log(chalk.red('\n --type is required'));
38
+ console.log(chalk.gray(` Valid types: ${VALID_TYPES.join(', ')}\n`));
39
+ process.exit(1);
40
+ }
41
+ if (!VALID_TYPES.includes(type)) {
42
+ console.log(chalk.red(`\n Invalid workflow type: "${type}"`));
43
+ console.log(chalk.gray(` Valid types: ${VALID_TYPES.join(', ')}\n`));
44
+ process.exit(1);
45
+ }
46
+ return type;
47
+ }
48
+
49
+ export async function workflowDownloadCommand(options) {
50
+ const env = getCurrentEnvironment();
51
+ const { apiKey, projectId } = resolveAuth(options);
52
+ const workflowType = resolveWorkflowType(options);
53
+
54
+ console.log(chalk.bold.cyan('\n Zibby Workflow Download\n'));
55
+ console.log(chalk.gray(' '.padEnd(52, '-')));
56
+ console.log(chalk.white(` Environment: ${chalk.cyan(env.name)}`));
57
+ console.log(chalk.white(` Project: ${chalk.cyan(projectId)}`));
58
+ console.log(chalk.white(` Type: ${chalk.cyan(workflowType)}`));
59
+ console.log(chalk.gray(' '.padEnd(52, '-')));
60
+
61
+ const spinner = ora(' Fetching workflow from cloud...').start();
62
+
63
+ try {
64
+ const apiUrl = getApiUrl();
65
+ const response = await fetch(`${apiUrl}/projects/${projectId}/workflows/${workflowType}`, {
66
+ method: 'GET',
67
+ headers: {
68
+ 'Content-Type': 'application/json',
69
+ 'Authorization': `Bearer ${apiKey}`,
70
+ },
71
+ });
72
+
73
+ if (!response.ok) {
74
+ const body = await response.text();
75
+ spinner.fail(` API error: ${response.status}`);
76
+ console.log(chalk.red(` ${body}\n`));
77
+ process.exit(1);
78
+ }
79
+
80
+ const data = await response.json();
81
+
82
+ if (!data.graph && data.isDefault) {
83
+ spinner.info(' No custom workflow saved -- downloading default graph');
84
+ } else {
85
+ spinner.succeed(` Fetched workflow (v${data.version || 0})`);
86
+ }
87
+
88
+ const graphConfig = data.graph || null;
89
+ if (!graphConfig) {
90
+ console.log(chalk.yellow('\n No graph config available for this workflow.'));
91
+ console.log(chalk.gray(' The project is using the built-in default graph.'));
92
+ console.log(chalk.gray(' Edit the graph in the UI first, or use --include-default to download the default.\n'));
93
+
94
+ if (!options.includeDefault) {
95
+ process.exit(0);
96
+ }
97
+
98
+ spinner.start(' Fetching default graph...');
99
+ const { getDefaultGraph } = await import('@zibby/core/templates/graphs/index.js');
100
+ const defaultGraph = getDefaultGraph(workflowType);
101
+ if (!defaultGraph) {
102
+ spinner.fail(` No default graph found for type "${workflowType}"`);
103
+ process.exit(1);
104
+ }
105
+ return writeWorkflowFile(workflowType, {
106
+ graph: defaultGraph,
107
+ version: 0,
108
+ isDefault: true,
109
+ projectId,
110
+ workflowType,
111
+ }, options);
112
+ }
113
+
114
+ return writeWorkflowFile(workflowType, {
115
+ graph: graphConfig,
116
+ version: data.version || 0,
117
+ isDefault: data.isDefault || false,
118
+ projectId,
119
+ workflowType,
120
+ }, options);
121
+
122
+ } catch (err) {
123
+ spinner.fail(' Download failed');
124
+ console.log(chalk.red(`\n ${err.message}\n`));
125
+ process.exit(1);
126
+ }
127
+ }
128
+
129
+ function writeWorkflowFile(workflowType, payload, options) {
130
+ const cwd = process.cwd();
131
+ const outDir = options.output || join(cwd, '.zibby');
132
+
133
+ if (!existsSync(outDir)) {
134
+ mkdirSync(outDir, { recursive: true });
135
+ }
136
+
137
+ const meta = {
138
+ projectId: payload.projectId,
139
+ workflowType: payload.workflowType,
140
+ version: payload.version,
141
+ isDefault: payload.isDefault,
142
+ };
143
+
144
+ const jsFilename = `workflow-${workflowType}.js`;
145
+ const jsPath = join(outDir, jsFilename);
146
+ const jsCode = generateWorkflowCode(payload.graph, meta);
147
+ writeFileSync(jsPath, jsCode, 'utf-8');
148
+
149
+ const nodeConfigs = payload.graph.nodeConfigs || {};
150
+ const runtimeConfigs = generateNodeConfigsJson(nodeConfigs);
151
+ const configFilename = `workflow-${workflowType}.config.json`;
152
+ const configPath = join(outDir, configFilename);
153
+ writeFileSync(configPath, `${JSON.stringify(runtimeConfigs, null, 2) }\n`, 'utf-8');
154
+
155
+ const jsonFilename = `workflow-${workflowType}.json`;
156
+ const jsonPath = join(outDir, jsonFilename);
157
+ const jsonContent = {
158
+ _meta: { ...meta, downloadedAt: new Date().toISOString() },
159
+ ...payload.graph,
160
+ };
161
+ writeFileSync(jsonPath, `${JSON.stringify(jsonContent, null, 2) }\n`, 'utf-8');
162
+
163
+ console.log(chalk.green(`\n Generated workflow files:`));
164
+ console.log(chalk.white(` ${chalk.bold(jsPath)}`));
165
+ console.log(chalk.gray(` Executable graph with inline tool bindings`));
166
+ console.log(chalk.white(` ${chalk.bold(configPath)}`));
167
+ console.log(chalk.gray(` Extra prompt instructions & runtime config`));
168
+ console.log(chalk.white(` ${chalk.bold(jsonPath)}`));
169
+ console.log(chalk.gray(` Raw JSON config (for upload back to cloud)`));
170
+ console.log('');
171
+ console.log(chalk.gray(` Version: ${payload.version}`));
172
+ console.log(chalk.gray(` Nodes: ${payload.graph.nodes?.length || 0}`));
173
+ console.log(chalk.gray(` Edges: ${payload.graph.edges?.length || 0}\n`));
174
+
175
+ console.log(chalk.white(' To run locally:'));
176
+ console.log(chalk.cyan(` zibby analyze --workflow ${jsPath}\n`));
177
+ console.log(chalk.white(' To upload changes back:'));
178
+ console.log(chalk.cyan(` zibby workflow upload --project ${payload.projectId} --type ${workflowType}\n`));
179
+ }
180
+
181
+ export async function workflowUploadCommand(options) {
182
+ const env = getCurrentEnvironment();
183
+ const { apiKey, projectId } = resolveAuth(options);
184
+ const workflowType = resolveWorkflowType(options);
185
+
186
+ console.log(chalk.bold.cyan('\n Zibby Workflow Upload\n'));
187
+ console.log(chalk.gray(' '.padEnd(52, '-')));
188
+ console.log(chalk.white(` Environment: ${chalk.cyan(env.name)}`));
189
+ console.log(chalk.white(` Project: ${chalk.cyan(projectId)}`));
190
+ console.log(chalk.white(` Type: ${chalk.cyan(workflowType)}`));
191
+ console.log(chalk.gray(' '.padEnd(52, '-')));
192
+
193
+ const cwd = process.cwd();
194
+ const defaultJson = join(cwd, '.zibby', `workflow-${workflowType}.json`);
195
+ const defaultJs = join(cwd, '.zibby', `workflow-${workflowType}.js`);
196
+ const filePath = options.file || (existsSync(defaultJs) ? defaultJs : defaultJson);
197
+
198
+ if (!existsSync(filePath)) {
199
+ console.log(chalk.red(`\n File not found: ${filePath}`));
200
+ console.log(chalk.gray(' Download a workflow first: zibby workflow download --project <id> --type <type>\n'));
201
+ process.exit(1);
202
+ }
203
+
204
+ const isJsFile = filePath.endsWith('.js') || filePath.endsWith('.mjs');
205
+ let graphConfig;
206
+
207
+ if (isJsFile) {
208
+ const loadSpinner = ora(' Loading JS workflow module...').start();
209
+ try {
210
+ const { pathToFileURL } = await import('url');
211
+ const mod = await import(pathToFileURL(resolve(filePath)).href);
212
+ const graph = mod.buildGraph();
213
+ graphConfig = graph.serialize();
214
+
215
+ const jsNodeConfigs = mod.nodeConfigs || {};
216
+ if (Object.keys(jsNodeConfigs).length > 0) {
217
+ for (const [nodeId, conf] of Object.entries(jsNodeConfigs)) {
218
+ graphConfig.nodeConfigs[nodeId] = { ...conf, ...graphConfig.nodeConfigs[nodeId] };
219
+ }
220
+ }
221
+
222
+ loadSpinner.succeed(` Loaded JS module (${graph.nodes.size} nodes)`);
223
+ } catch (err) {
224
+ loadSpinner.fail(` Failed to load JS module`);
225
+ console.log(chalk.red(`\n ${err.message}\n`));
226
+ process.exit(1);
227
+ }
228
+ } else {
229
+ let fileContent;
230
+ try {
231
+ fileContent = JSON.parse(readFileSync(filePath, 'utf-8'));
232
+ } catch (err) {
233
+ console.log(chalk.red(`\n Failed to parse ${filePath}: ${err.message}\n`));
234
+ process.exit(1);
235
+ }
236
+
237
+ const { _meta, ...config } = fileContent;
238
+ graphConfig = config;
239
+ }
240
+
241
+ if (!graphConfig.nodes || !graphConfig.edges) {
242
+ console.log(chalk.red('\n Invalid workflow file: missing nodes or edges'));
243
+ console.log(chalk.gray(' The file should contain { nodes: [...], edges: [...], nodeConfigs: {...} }\n'));
244
+ process.exit(1);
245
+ }
246
+
247
+ console.log(chalk.gray(`\n File: ${filePath}`));
248
+ console.log(chalk.gray(` Format: ${isJsFile ? 'JavaScript (serialized via graph.serialize())' : 'JSON'}`));
249
+ console.log(chalk.gray(` Nodes: ${graphConfig.nodes.length}`));
250
+ console.log(chalk.gray(` Edges: ${graphConfig.edges.length}`));
251
+
252
+ const validateSpinner = ora(' Validating graph...').start();
253
+ const validation = validateGraphConfig(graphConfig);
254
+
255
+ if (!validation.valid) {
256
+ validateSpinner.fail(' Graph validation failed');
257
+ console.log('');
258
+ for (const err of validation.errors) {
259
+ console.log(chalk.red(` ${err}`));
260
+ }
261
+ console.log(chalk.gray('\n Fix the errors above and try again.\n'));
262
+ process.exit(1);
263
+ }
264
+ validateSpinner.succeed(' Graph is valid');
265
+
266
+ const uploadSpinner = ora(' Uploading to cloud...').start();
267
+
268
+ try {
269
+ const apiUrl = getApiUrl();
270
+ const response = await fetch(`${apiUrl}/projects/${projectId}/workflows/${workflowType}`, {
271
+ method: 'PUT',
272
+ headers: {
273
+ 'Content-Type': 'application/json',
274
+ 'Authorization': `Bearer ${apiKey}`,
275
+ },
276
+ body: JSON.stringify({ graph: graphConfig }),
277
+ });
278
+
279
+ if (!response.ok) {
280
+ const body = await response.text();
281
+ uploadSpinner.fail(` API error: ${response.status}`);
282
+ console.log(chalk.red(` ${body}\n`));
283
+ process.exit(1);
284
+ }
285
+
286
+ const result = await response.json();
287
+ uploadSpinner.succeed(` Uploaded successfully (v${result.version})`);
288
+
289
+ console.log(chalk.green(`\n Workflow "${workflowType}" updated to version ${result.version}`));
290
+ console.log(chalk.gray(` Project: ${projectId}\n`));
291
+
292
+ } catch (err) {
293
+ uploadSpinner.fail(' Upload failed');
294
+ console.log(chalk.red(`\n ${err.message}\n`));
295
+ process.exit(1);
296
+ }
297
+ }
298
+
299
+ export async function workflowListCommand(options) {
300
+ const _env = getCurrentEnvironment();
301
+ const { apiKey, projectId } = resolveAuth(options);
302
+
303
+ console.log(chalk.bold.cyan('\n Zibby Workflows\n'));
304
+
305
+ const spinner = ora(' Fetching workflows...').start();
306
+
307
+ try {
308
+ const apiUrl = getApiUrl();
309
+ const results = [];
310
+
311
+ for (const type of VALID_TYPES) {
312
+ const response = await fetch(`${apiUrl}/projects/${projectId}/workflows/${type}`, {
313
+ method: 'GET',
314
+ headers: {
315
+ 'Content-Type': 'application/json',
316
+ 'Authorization': `Bearer ${apiKey}`,
317
+ },
318
+ });
319
+
320
+ if (response.ok) {
321
+ const data = await response.json();
322
+ results.push({
323
+ type,
324
+ version: data.version || 0,
325
+ isDefault: data.isDefault !== false && !data.graph,
326
+ nodes: data.graph?.nodes?.length || 0,
327
+ updatedAt: data.updatedAt || null,
328
+ });
329
+ }
330
+ }
331
+
332
+ spinner.succeed(' Fetched workflows\n');
333
+
334
+ console.log(chalk.gray(' '.padEnd(70, '-')));
335
+ console.log(
336
+ chalk.white(' Type'.padEnd(20)) +
337
+ chalk.white('Version'.padEnd(10)) +
338
+ chalk.white('Nodes'.padEnd(10)) +
339
+ chalk.white('Status'.padEnd(15)) +
340
+ chalk.white('Updated')
341
+ );
342
+ console.log(chalk.gray(' '.padEnd(70, '-')));
343
+
344
+ for (const wf of results) {
345
+ const status = wf.isDefault
346
+ ? chalk.gray('default')
347
+ : chalk.green('custom');
348
+ const updated = wf.updatedAt
349
+ ? new Date(wf.updatedAt).toLocaleDateString()
350
+ : chalk.gray('-');
351
+
352
+ console.log(
353
+ ` ${chalk.cyan(wf.type.padEnd(18))}` +
354
+ `${String(wf.version).padEnd(10)}` +
355
+ `${String(wf.nodes).padEnd(10)}` +
356
+ `${status.padEnd(15)}` +
357
+ `${updated}`
358
+ );
359
+ }
360
+
361
+ console.log(chalk.gray(' '.padEnd(70, '-')));
362
+ console.log('');
363
+
364
+ } catch (err) {
365
+ spinner.fail(' Failed to fetch workflows');
366
+ console.log(chalk.red(`\n ${err.message}\n`));
367
+ process.exit(1);
368
+ }
369
+ }