@zibby/cli 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +926 -0
- package/bin/zibby.js +266 -0
- package/package.json +65 -0
- package/src/auth/cli-login.js +406 -0
- package/src/commands/analyze-graph.js +334 -0
- package/src/commands/ci-setup.js +65 -0
- package/src/commands/implement.js +664 -0
- package/src/commands/init.js +736 -0
- package/src/commands/list-projects.js +78 -0
- package/src/commands/memory.js +171 -0
- package/src/commands/run.js +926 -0
- package/src/commands/setup-scripts.js +101 -0
- package/src/commands/upload.js +163 -0
- package/src/commands/video.js +30 -0
- package/src/commands/workflow.js +369 -0
- package/src/config/config.js +117 -0
- package/src/config/environments.js +145 -0
- package/src/utils/execution-context.js +25 -0
- package/src/utils/progress-reporter.js +155 -0
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Configuration management for Zibby CLI
|
|
3
|
+
* Stores user session and settings in ~/.zibby/config.json
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
|
7
|
+
import { homedir } from 'os';
|
|
8
|
+
import { join } from 'path';
|
|
9
|
+
|
|
10
|
+
function getConfigDir() {
|
|
11
|
+
return process.env.ZIBBY_CONFIG_DIR || join(homedir(), '.zibby');
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function getConfigFile() {
|
|
15
|
+
return join(getConfigDir(), 'config.json');
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const CONFIG_DIR = join(homedir(), '.zibby');
|
|
19
|
+
const CONFIG_FILE = join(CONFIG_DIR, 'config.json');
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Ensure config directory exists
|
|
23
|
+
*/
|
|
24
|
+
function ensureConfigDir() {
|
|
25
|
+
const dir = getConfigDir();
|
|
26
|
+
if (!existsSync(dir)) {
|
|
27
|
+
mkdirSync(dir, { recursive: true });
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Load configuration from file
|
|
33
|
+
*/
|
|
34
|
+
export function loadConfig() {
|
|
35
|
+
try {
|
|
36
|
+
const file = getConfigFile();
|
|
37
|
+
if (existsSync(file)) {
|
|
38
|
+
const data = readFileSync(file, 'utf-8');
|
|
39
|
+
return JSON.parse(data);
|
|
40
|
+
}
|
|
41
|
+
} catch {
|
|
42
|
+
// Config file corrupt or unreadable
|
|
43
|
+
}
|
|
44
|
+
return {};
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Save configuration to file
|
|
49
|
+
*/
|
|
50
|
+
export function saveConfig(config) {
|
|
51
|
+
ensureConfigDir();
|
|
52
|
+
writeFileSync(getConfigFile(), JSON.stringify(config, null, 2));
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Get saved session token
|
|
57
|
+
*/
|
|
58
|
+
export function getSessionToken() {
|
|
59
|
+
const config = loadConfig();
|
|
60
|
+
return config.sessionToken || null;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Save session token to config file
|
|
65
|
+
*/
|
|
66
|
+
export function saveSessionToken(token) {
|
|
67
|
+
const config = loadConfig();
|
|
68
|
+
config.sessionToken = token;
|
|
69
|
+
saveConfig(config);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Get saved user info
|
|
74
|
+
*/
|
|
75
|
+
export function getUserInfo() {
|
|
76
|
+
const config = loadConfig();
|
|
77
|
+
return config.user || null;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Save user info to config file
|
|
82
|
+
*/
|
|
83
|
+
export function saveUserInfo(user) {
|
|
84
|
+
const config = loadConfig();
|
|
85
|
+
config.user = user;
|
|
86
|
+
saveConfig(config);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Check if user is logged in
|
|
91
|
+
*/
|
|
92
|
+
export function isLoggedIn() {
|
|
93
|
+
return getSessionToken() !== null;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Clear session (logout)
|
|
98
|
+
*/
|
|
99
|
+
export function clearSession() {
|
|
100
|
+
const config = loadConfig();
|
|
101
|
+
delete config.sessionToken;
|
|
102
|
+
delete config.user;
|
|
103
|
+
saveConfig(config);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
export default {
|
|
107
|
+
loadConfig,
|
|
108
|
+
saveConfig,
|
|
109
|
+
getSessionToken,
|
|
110
|
+
saveSessionToken,
|
|
111
|
+
getUserInfo,
|
|
112
|
+
saveUserInfo,
|
|
113
|
+
isLoggedIn,
|
|
114
|
+
clearSession,
|
|
115
|
+
CONFIG_DIR,
|
|
116
|
+
CONFIG_FILE
|
|
117
|
+
};
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Environment Configuration
|
|
3
|
+
* Determines which backend API to use
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const ENVIRONMENTS = {
|
|
7
|
+
local: {
|
|
8
|
+
name: 'Local Development',
|
|
9
|
+
apiUrl: 'http://localhost:3001',
|
|
10
|
+
accountApiUrl: 'http://localhost:3001',
|
|
11
|
+
frontendUrl: 'http://localhost:3000',
|
|
12
|
+
description: 'Local backend running on port 3001'
|
|
13
|
+
},
|
|
14
|
+
prod: {
|
|
15
|
+
name: 'Production',
|
|
16
|
+
apiUrl: process.env.ZIBBY_PROD_API_URL || 'https://api-prod.zibby.app',
|
|
17
|
+
accountApiUrl: process.env.ZIBBY_PROD_ACCOUNT_API_URL || 'https://account-api-prod.zibby.app',
|
|
18
|
+
frontendUrl: process.env.ZIBBY_PROD_FRONTEND_URL || 'https://studio.zibby.app',
|
|
19
|
+
description: 'Production environment'
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Get API URL based on environment
|
|
25
|
+
* Priority: ENV var > config > default
|
|
26
|
+
*/
|
|
27
|
+
function getApiUrl() {
|
|
28
|
+
let url;
|
|
29
|
+
|
|
30
|
+
// 1. Check explicit environment variable
|
|
31
|
+
if (process.env.ZIBBY_API_URL) {
|
|
32
|
+
url = process.env.ZIBBY_API_URL;
|
|
33
|
+
} else {
|
|
34
|
+
// 2. Check environment mode
|
|
35
|
+
const env = process.env.ZIBBY_ENV || 'prod';
|
|
36
|
+
|
|
37
|
+
if (ENVIRONMENTS[env]) {
|
|
38
|
+
url = ENVIRONMENTS[env].apiUrl;
|
|
39
|
+
} else {
|
|
40
|
+
// 3. Default to production
|
|
41
|
+
url = ENVIRONMENTS.prod.apiUrl;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Validate URL format for security
|
|
46
|
+
try {
|
|
47
|
+
const parsed = new URL(url);
|
|
48
|
+
|
|
49
|
+
if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
|
|
50
|
+
console.error(`⚠️ Invalid API URL protocol: ${parsed.protocol} (only http/https allowed)`);
|
|
51
|
+
return ENVIRONMENTS.prod.apiUrl;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return url;
|
|
55
|
+
} catch (_error) {
|
|
56
|
+
console.error(`⚠️ Invalid API URL: ${url}`);
|
|
57
|
+
return ENVIRONMENTS.prod.apiUrl;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Get Account API URL (collections, billing, user tokens)
|
|
63
|
+
* Separate API Gateway due to CloudFormation resource limits.
|
|
64
|
+
*/
|
|
65
|
+
function getAccountApiUrl() {
|
|
66
|
+
if (process.env.ZIBBY_ACCOUNT_API_URL) {
|
|
67
|
+
return process.env.ZIBBY_ACCOUNT_API_URL;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const env = process.env.ZIBBY_ENV || 'prod';
|
|
71
|
+
const config = ENVIRONMENTS[env] || ENVIRONMENTS.prod;
|
|
72
|
+
return config.accountApiUrl;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Get current environment info
|
|
77
|
+
*/
|
|
78
|
+
function getCurrentEnvironment() {
|
|
79
|
+
const env = process.env.ZIBBY_ENV || 'prod';
|
|
80
|
+
return ENVIRONMENTS[env] || ENVIRONMENTS.prod;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Get frontend URL based on environment
|
|
85
|
+
* Priority: ENV var > config > default
|
|
86
|
+
*/
|
|
87
|
+
function getFrontendUrl() {
|
|
88
|
+
let url;
|
|
89
|
+
|
|
90
|
+
// 1. Check explicit environment variable
|
|
91
|
+
if (process.env.ZIBBY_FRONTEND_URL) {
|
|
92
|
+
url = process.env.ZIBBY_FRONTEND_URL;
|
|
93
|
+
} else {
|
|
94
|
+
// 2. Check environment mode
|
|
95
|
+
const env = process.env.ZIBBY_ENV || 'prod';
|
|
96
|
+
|
|
97
|
+
if (ENVIRONMENTS[env]) {
|
|
98
|
+
url = ENVIRONMENTS[env].frontendUrl;
|
|
99
|
+
} else {
|
|
100
|
+
// 3. Default to production
|
|
101
|
+
url = ENVIRONMENTS.prod.frontendUrl;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Validate URL format for security
|
|
106
|
+
try {
|
|
107
|
+
const parsed = new URL(url);
|
|
108
|
+
|
|
109
|
+
// Only allow http/https protocols
|
|
110
|
+
if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
|
|
111
|
+
console.error(`⚠️ Invalid frontend URL protocol: ${parsed.protocol} (only http/https allowed)`);
|
|
112
|
+
return ENVIRONMENTS.local.frontendUrl;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// In production, only allow known Zibby domains or localhost
|
|
116
|
+
const isProd = process.env.NODE_ENV === 'production' || process.env.ZIBBY_ENV === 'prod';
|
|
117
|
+
if (isProd) {
|
|
118
|
+
const allowedDomains = ['zibby.app', 'studio.zibby.app', 'studio-staging.zibby.app'];
|
|
119
|
+
const hostname = parsed.hostname;
|
|
120
|
+
|
|
121
|
+
const isAllowed = allowedDomains.some(domain =>
|
|
122
|
+
hostname === domain || hostname.endsWith(`.${domain}`)
|
|
123
|
+
);
|
|
124
|
+
|
|
125
|
+
if (!isAllowed && !hostname.includes('localhost') && hostname !== '127.0.0.1') {
|
|
126
|
+
console.error(`⚠️ Untrusted frontend URL in production: ${hostname}`);
|
|
127
|
+
return 'https://studio.zibby.app';
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
return url;
|
|
132
|
+
} catch (_error) {
|
|
133
|
+
console.error(`⚠️ Invalid frontend URL: ${url}`);
|
|
134
|
+
return ENVIRONMENTS.local.frontendUrl;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
export {
|
|
139
|
+
ENVIRONMENTS,
|
|
140
|
+
getApiUrl,
|
|
141
|
+
getAccountApiUrl,
|
|
142
|
+
getCurrentEnvironment,
|
|
143
|
+
getFrontendUrl
|
|
144
|
+
};
|
|
145
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export async function fetchExecutionContext(_executionId, _projectId) {
|
|
2
|
+
const presignedUrl = process.env.CONTEXT_PRESIGNED_URL;
|
|
3
|
+
if (!presignedUrl) {
|
|
4
|
+
throw new Error('CONTEXT_PRESIGNED_URL env var is required');
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
console.log(`📦 Fetching execution context via pre-signed URL`);
|
|
8
|
+
const response = await fetch(presignedUrl);
|
|
9
|
+
if (!response.ok) {
|
|
10
|
+
throw new Error(`Failed to fetch execution context: ${response.status}`);
|
|
11
|
+
}
|
|
12
|
+
const data = await response.json();
|
|
13
|
+
|
|
14
|
+
console.log(` ✅ Got ticketContext (${JSON.stringify(data.ticketContext || {}).length} chars)`);
|
|
15
|
+
if (data.nodeConfigs && Object.keys(data.nodeConfigs).length > 0) {
|
|
16
|
+
console.log(` ✅ Got nodeConfigs (${Object.keys(data.nodeConfigs).length} nodes configured)`);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
return {
|
|
20
|
+
ticketContext: data.ticketContext || {},
|
|
21
|
+
nodeConfigs: data.nodeConfigs || {},
|
|
22
|
+
graphConfig: data.graphConfig || null,
|
|
23
|
+
repos: data.repos || []
|
|
24
|
+
};
|
|
25
|
+
}
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import { SQSClient, SendMessageCommand } from '@aws-sdk/client-sqs';
|
|
2
|
+
|
|
3
|
+
let sqsClient = null;
|
|
4
|
+
|
|
5
|
+
function getSqsClient() {
|
|
6
|
+
if (!sqsClient) {
|
|
7
|
+
sqsClient = new SQSClient({ region: process.env.AWS_REGION || 'ap-southeast-2' });
|
|
8
|
+
}
|
|
9
|
+
return sqsClient;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export async function reportProgress(stepName, status, logs, state) {
|
|
13
|
+
const { EXECUTION_ID, SQS_AUTH_TOKEN, PROGRESS_API_URL, PROGRESS_QUEUE_URL, PROJECT_API_TOKEN } = state;
|
|
14
|
+
|
|
15
|
+
if (!EXECUTION_ID) return;
|
|
16
|
+
|
|
17
|
+
const message = {
|
|
18
|
+
executionId: EXECUTION_ID,
|
|
19
|
+
...(SQS_AUTH_TOKEN && { sqsAuthToken: SQS_AUTH_TOKEN }),
|
|
20
|
+
step: {
|
|
21
|
+
name: stepName,
|
|
22
|
+
status,
|
|
23
|
+
logs,
|
|
24
|
+
timestamp: new Date().toISOString(),
|
|
25
|
+
...(status === 'success' && { completedAt: new Date().toISOString() })
|
|
26
|
+
},
|
|
27
|
+
status: status === 'failed' ? 'failed' : 'running'
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
if (PROGRESS_API_URL) {
|
|
32
|
+
await reportViaHttp(PROGRESS_API_URL, EXECUTION_ID, message, PROJECT_API_TOKEN);
|
|
33
|
+
} else if (PROGRESS_QUEUE_URL) {
|
|
34
|
+
await reportViaSqs(PROGRESS_QUEUE_URL, EXECUTION_ID, message);
|
|
35
|
+
}
|
|
36
|
+
} catch (error) {
|
|
37
|
+
console.error(`⚠️ Failed to send progress: ${error.message}`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Send a single artifact (e.g. 'analysis', 'codeImplementation', 'report').
|
|
43
|
+
* Called after each node finishes so artifacts persist even if later nodes fail.
|
|
44
|
+
*/
|
|
45
|
+
export async function reportArtifact(state, artifactKey, artifactValue) {
|
|
46
|
+
const { EXECUTION_ID, SQS_AUTH_TOKEN, PROGRESS_API_URL, PROGRESS_QUEUE_URL, PROJECT_API_TOKEN } = state;
|
|
47
|
+
if (!EXECUTION_ID || !artifactValue) return;
|
|
48
|
+
|
|
49
|
+
const size = JSON.stringify(artifactValue).length;
|
|
50
|
+
console.log(`📦 Sending artifact: ${artifactKey} (${(size / 1024).toFixed(1)}KB)`);
|
|
51
|
+
|
|
52
|
+
const message = {
|
|
53
|
+
executionId: EXECUTION_ID,
|
|
54
|
+
...(SQS_AUTH_TOKEN && { sqsAuthToken: SQS_AUTH_TOKEN }),
|
|
55
|
+
artifacts: { [artifactKey]: artifactValue },
|
|
56
|
+
timestamp: new Date().toISOString()
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
const transport = PROGRESS_API_URL ? 'HTTP' : PROGRESS_QUEUE_URL ? 'SQS' : 'NONE';
|
|
60
|
+
const msgBytes = JSON.stringify(message).length;
|
|
61
|
+
try {
|
|
62
|
+
if (PROGRESS_API_URL) {
|
|
63
|
+
await reportViaHttp(PROGRESS_API_URL, EXECUTION_ID, message, PROJECT_API_TOKEN);
|
|
64
|
+
} else if (PROGRESS_QUEUE_URL) {
|
|
65
|
+
await reportViaSqs(PROGRESS_QUEUE_URL, EXECUTION_ID, message);
|
|
66
|
+
} else {
|
|
67
|
+
console.warn(`⚠️ No transport configured for artifact ${artifactKey} — neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set`);
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
console.log(`✅ Artifact ${artifactKey} sent via ${transport} (payload=${(msgBytes / 1024).toFixed(1)}KB, value=${(size / 1024).toFixed(1)}KB)`);
|
|
71
|
+
} catch (err) {
|
|
72
|
+
console.error(`❌ Failed to send artifact ${artifactKey} via ${transport}:`);
|
|
73
|
+
console.error(` Payload size: ${(msgBytes / 1024).toFixed(1)}KB, Value size: ${(size / 1024).toFixed(1)}KB`);
|
|
74
|
+
console.error(` Error: ${err.message}`);
|
|
75
|
+
if (err.name) console.error(` Error type: ${err.name}`);
|
|
76
|
+
if (err.code) console.error(` Error code: ${err.code}`);
|
|
77
|
+
if (msgBytes > 256 * 1024) {
|
|
78
|
+
console.error(` ⚠️ Message exceeds SQS 256KB limit! Consider splitting or compressing.`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Send the final execution status (completed/failed/blocked). No artifacts — those are sent per-node.
|
|
85
|
+
*/
|
|
86
|
+
export async function reportFinalStatus(state, { status, error }) {
|
|
87
|
+
const { EXECUTION_ID, SQS_AUTH_TOKEN, PROGRESS_API_URL, PROGRESS_QUEUE_URL, PROJECT_API_TOKEN } = state;
|
|
88
|
+
|
|
89
|
+
if (!EXECUTION_ID) return;
|
|
90
|
+
|
|
91
|
+
const message = {
|
|
92
|
+
executionId: EXECUTION_ID,
|
|
93
|
+
...(SQS_AUTH_TOKEN && { sqsAuthToken: SQS_AUTH_TOKEN }),
|
|
94
|
+
status,
|
|
95
|
+
...(error && { error }),
|
|
96
|
+
timestamp: new Date().toISOString()
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
const transport = PROGRESS_API_URL ? 'HTTP' : PROGRESS_QUEUE_URL ? 'SQS' : 'NONE';
|
|
100
|
+
const msgBytes = JSON.stringify(message).length;
|
|
101
|
+
console.log(`📊 Sending final status: ${status} via ${transport} (${(msgBytes / 1024).toFixed(1)}KB)`);
|
|
102
|
+
try {
|
|
103
|
+
if (PROGRESS_API_URL) {
|
|
104
|
+
await reportViaHttp(PROGRESS_API_URL, EXECUTION_ID, message, PROJECT_API_TOKEN);
|
|
105
|
+
} else if (PROGRESS_QUEUE_URL) {
|
|
106
|
+
const msgType = ['completed', 'failed', 'insufficient_context', 'blocked'].includes(status)
|
|
107
|
+
? 'execution_completed' : 'progress_update';
|
|
108
|
+
await reportViaSqs(PROGRESS_QUEUE_URL, EXECUTION_ID, message, msgType);
|
|
109
|
+
} else {
|
|
110
|
+
console.warn(`⚠️ No transport configured for final status — neither PROGRESS_API_URL nor PROGRESS_QUEUE_URL set`);
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
console.log(`✅ Final status ${status} sent via ${transport}`);
|
|
114
|
+
} catch (err) {
|
|
115
|
+
console.error(`❌ Failed to send final status (${status}) via ${transport}:`);
|
|
116
|
+
console.error(` Payload: ${(msgBytes / 1024).toFixed(1)}KB`);
|
|
117
|
+
console.error(` Error: ${err.message}`);
|
|
118
|
+
if (err.name) console.error(` Error type: ${err.name}`);
|
|
119
|
+
if (err.code) console.error(` Error code: ${err.code}`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async function reportViaHttp(apiUrl, executionId, message, apiToken) {
|
|
124
|
+
const url = `${apiUrl}/${executionId}/progress`;
|
|
125
|
+
const headers = { 'Content-Type': 'application/json' };
|
|
126
|
+
if (apiToken) {
|
|
127
|
+
headers['Authorization'] = `Bearer ${apiToken}`;
|
|
128
|
+
}
|
|
129
|
+
const res = await fetch(url, {
|
|
130
|
+
method: 'POST',
|
|
131
|
+
headers,
|
|
132
|
+
body: JSON.stringify(message)
|
|
133
|
+
});
|
|
134
|
+
if (!res.ok) {
|
|
135
|
+
const body = await res.text();
|
|
136
|
+
throw new Error(`HTTP ${res.status}: ${body}`);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
async function reportViaSqs(queueUrl, executionId, message, messageType = 'progress_update') {
|
|
141
|
+
const body = JSON.stringify(message);
|
|
142
|
+
const bodyKB = (body.length / 1024).toFixed(1);
|
|
143
|
+
if (body.length > 256 * 1024) {
|
|
144
|
+
console.error(`❌ SQS message too large: ${bodyKB}KB (limit 256KB) for ${executionId} [${messageType}]`);
|
|
145
|
+
}
|
|
146
|
+
await getSqsClient().send(new SendMessageCommand({
|
|
147
|
+
QueueUrl: queueUrl,
|
|
148
|
+
MessageBody: body,
|
|
149
|
+
MessageGroupId: executionId,
|
|
150
|
+
MessageAttributes: {
|
|
151
|
+
executionId: { DataType: 'String', StringValue: executionId },
|
|
152
|
+
messageType: { DataType: 'String', StringValue: messageType }
|
|
153
|
+
}
|
|
154
|
+
}));
|
|
155
|
+
}
|