@ryanfw/prompt-orchestration-pipeline 0.9.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/config/log-events.js +77 -0
- package/src/core/file-io.js +202 -7
- package/src/core/orchestrator.js +108 -4
- package/src/core/pipeline-runner.js +84 -6
- package/src/core/symlink-utils.js +196 -0
- package/src/core/task-runner.js +71 -8
- package/src/providers/zhipu.js +32 -13
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ryanfw/prompt-orchestration-pipeline",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.10.0",
|
|
4
4
|
"description": "A Prompt-orchestration pipeline (POP) is a framework for building, running, and experimenting with complex chains of LLM tasks.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "src/ui/server.js",
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Canonical log event constants and file extensions for the prompt orchestration pipeline.
|
|
3
|
+
* This module serves as the single source of truth for all log-related naming conventions.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
// Log event types for different stages and events in the pipeline
|
|
7
|
+
export const LogEvent = Object.freeze({
|
|
8
|
+
START: "start",
|
|
9
|
+
COMPLETE: "complete",
|
|
10
|
+
ERROR: "error",
|
|
11
|
+
CONTEXT: "context",
|
|
12
|
+
DEBUG: "debug",
|
|
13
|
+
METRICS: "metrics",
|
|
14
|
+
PIPELINE_START: "pipeline-start",
|
|
15
|
+
PIPELINE_COMPLETE: "pipeline-complete",
|
|
16
|
+
PIPELINE_ERROR: "pipeline-error",
|
|
17
|
+
EXECUTION_LOGS: "execution-logs",
|
|
18
|
+
FAILURE_DETAILS: "failure-details",
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
// File extensions for different log types
|
|
22
|
+
export const LogFileExtension = Object.freeze({
|
|
23
|
+
TEXT: "log",
|
|
24
|
+
JSON: "json",
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
// Validation sets for ensuring consistency
|
|
28
|
+
export const VALID_LOG_EVENTS = new Set(Object.values(LogEvent));
|
|
29
|
+
export const VALID_LOG_FILE_EXTENSIONS = new Set(
|
|
30
|
+
Object.values(LogFileExtension)
|
|
31
|
+
);
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Validates a log event string.
|
|
35
|
+
* @param {string} event - Log event to validate
|
|
36
|
+
* @returns {boolean} True if valid, false otherwise
|
|
37
|
+
*/
|
|
38
|
+
export function isValidLogEvent(event) {
|
|
39
|
+
return VALID_LOG_EVENTS.has(event);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Validates a log file extension string.
|
|
44
|
+
* @param {string} ext - File extension to validate
|
|
45
|
+
* @returns {boolean} True if valid, false otherwise
|
|
46
|
+
*/
|
|
47
|
+
export function isValidLogFileExtension(ext) {
|
|
48
|
+
return VALID_LOG_FILE_EXTENSIONS.has(ext);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Normalizes a log event string to canonical form.
|
|
53
|
+
* @param {string} event - Raw log event
|
|
54
|
+
* @returns {string|null} Canonical log event or null if invalid
|
|
55
|
+
*/
|
|
56
|
+
export function normalizeLogEvent(event) {
|
|
57
|
+
if (typeof event !== "string") {
|
|
58
|
+
return null;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const normalized = event.toLowerCase().trim();
|
|
62
|
+
return isValidLogEvent(normalized) ? normalized : null;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Normalizes a log file extension string to canonical form.
|
|
67
|
+
* @param {string} ext - Raw file extension
|
|
68
|
+
* @returns {string|null} Canonical file extension or null if invalid
|
|
69
|
+
*/
|
|
70
|
+
export function normalizeLogFileExtension(ext) {
|
|
71
|
+
if (typeof ext !== "string") {
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const normalized = ext.toLowerCase().trim().replace(/^\./, "");
|
|
76
|
+
return isValidLogFileExtension(normalized) ? normalized : null;
|
|
77
|
+
}
|
package/src/core/file-io.js
CHANGED
|
@@ -1,6 +1,13 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
|
+
import fsSync from "node:fs";
|
|
2
3
|
import path from "node:path";
|
|
3
4
|
import { writeJobStatus } from "./status-writer.js";
|
|
5
|
+
import {
|
|
6
|
+
LogEvent,
|
|
7
|
+
LogFileExtension,
|
|
8
|
+
isValidLogEvent,
|
|
9
|
+
isValidLogFileExtension,
|
|
10
|
+
} from "../config/log-events.js";
|
|
4
11
|
|
|
5
12
|
/**
|
|
6
13
|
* Creates a task-scoped file I/O interface that manages file operations
|
|
@@ -18,7 +25,17 @@ async function ensureDir(dir) {
|
|
|
18
25
|
await fs.mkdir(dir, { recursive: true });
|
|
19
26
|
}
|
|
20
27
|
|
|
21
|
-
|
|
28
|
+
function ensureDirSync(dir) {
|
|
29
|
+
fsSync.mkdir(dir, { recursive: true });
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function createTaskFileIO({
|
|
33
|
+
workDir,
|
|
34
|
+
taskName,
|
|
35
|
+
getStage,
|
|
36
|
+
statusPath,
|
|
37
|
+
trackTaskFiles = true,
|
|
38
|
+
}) {
|
|
22
39
|
const taskDir = path.join(workDir, "tasks", taskName);
|
|
23
40
|
|
|
24
41
|
// New directory structure: {workDir}/files/{type}
|
|
@@ -34,18 +51,21 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
34
51
|
const jobDir = path.dirname(statusPath);
|
|
35
52
|
await writeJobStatus(jobDir, (snapshot) => {
|
|
36
53
|
snapshot.files ||= { artifacts: [], logs: [], tmp: [] };
|
|
37
|
-
snapshot.tasks ||= {};
|
|
38
|
-
snapshot.tasks[taskName] ||= {};
|
|
39
|
-
snapshot.tasks[taskName].files ||= { artifacts: [], logs: [], tmp: [] };
|
|
40
54
|
|
|
41
55
|
const jobArray = snapshot.files[fileType];
|
|
42
56
|
if (!jobArray.includes(fileName)) {
|
|
43
57
|
jobArray.push(fileName);
|
|
44
58
|
}
|
|
45
59
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
60
|
+
if (trackTaskFiles) {
|
|
61
|
+
snapshot.tasks ||= {};
|
|
62
|
+
snapshot.tasks[taskName] ||= {};
|
|
63
|
+
snapshot.tasks[taskName].files ||= { artifacts: [], logs: [], tmp: [] };
|
|
64
|
+
|
|
65
|
+
const taskArray = snapshot.tasks[taskName].files[fileType];
|
|
66
|
+
if (!taskArray.includes(fileName)) {
|
|
67
|
+
taskArray.push(fileName);
|
|
68
|
+
}
|
|
49
69
|
}
|
|
50
70
|
|
|
51
71
|
return snapshot;
|
|
@@ -61,6 +81,15 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
61
81
|
await fs.rename(tmpPath, filePath);
|
|
62
82
|
}
|
|
63
83
|
|
|
84
|
+
/**
|
|
85
|
+
* Synchronous atomic write helper
|
|
86
|
+
*/
|
|
87
|
+
function atomicWriteSync(filePath, data) {
|
|
88
|
+
const tmpPath = filePath + ".tmp";
|
|
89
|
+
fsSync.writeFileSync(tmpPath, data);
|
|
90
|
+
fsSync.renameSync(tmpPath, filePath);
|
|
91
|
+
}
|
|
92
|
+
|
|
64
93
|
/**
|
|
65
94
|
* Generic write function that handles different modes
|
|
66
95
|
*/
|
|
@@ -85,6 +114,54 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
85
114
|
return await fs.readFile(filePath, "utf8");
|
|
86
115
|
}
|
|
87
116
|
|
|
117
|
+
/**
|
|
118
|
+
* Synchronous status writer for critical paths
|
|
119
|
+
* @param {string} jobDir - Directory containing tasks-status.json
|
|
120
|
+
* @param {Function} updater - Function that mutates and returns the snapshot
|
|
121
|
+
*/
|
|
122
|
+
function writeJobStatusSync(jobDir, updater) {
|
|
123
|
+
const statusPath = path.join(jobDir, "tasks-status.json");
|
|
124
|
+
let snapshot;
|
|
125
|
+
try {
|
|
126
|
+
const raw = fsSync.readFileSync(statusPath, "utf8");
|
|
127
|
+
snapshot = JSON.parse(raw);
|
|
128
|
+
} catch {
|
|
129
|
+
snapshot = { files: { artifacts: [], logs: [], tmp: [] }, tasks: {} };
|
|
130
|
+
}
|
|
131
|
+
const updated = updater(snapshot);
|
|
132
|
+
fsSync.writeFileSync(statusPath, JSON.stringify(updated, null, 2));
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Synchronous status update with file tracking and metadata
|
|
137
|
+
* @param {string} fileType - "logs", "artifacts", or "tmp"
|
|
138
|
+
* @param {string} fileName - Name of the file
|
|
139
|
+
*/
|
|
140
|
+
function updateStatusWithFilesSync(fileType, fileName) {
|
|
141
|
+
const jobDir = path.dirname(statusPath);
|
|
142
|
+
writeJobStatusSync(jobDir, (snapshot) => {
|
|
143
|
+
snapshot.files ||= { artifacts: [], logs: [], tmp: [] };
|
|
144
|
+
|
|
145
|
+
const jobArray = snapshot.files[fileType];
|
|
146
|
+
if (!jobArray.includes(fileName)) {
|
|
147
|
+
jobArray.push(fileName);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
if (trackTaskFiles) {
|
|
151
|
+
snapshot.tasks ||= {};
|
|
152
|
+
snapshot.tasks[taskName] ||= {};
|
|
153
|
+
snapshot.tasks[taskName].files ||= { artifacts: [], logs: [], tmp: [] };
|
|
154
|
+
|
|
155
|
+
const taskArray = snapshot.tasks[taskName].files[fileType];
|
|
156
|
+
if (!taskArray.includes(fileName)) {
|
|
157
|
+
taskArray.push(fileName);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return snapshot;
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
|
|
88
165
|
// Return curried functions for each file type
|
|
89
166
|
return {
|
|
90
167
|
/**
|
|
@@ -113,6 +190,12 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
113
190
|
* @param {string} options.mode - "append" (default) or "replace"
|
|
114
191
|
*/
|
|
115
192
|
async writeLog(name, content, options = {}) {
|
|
193
|
+
if (!validateLogName(name)) {
|
|
194
|
+
throw new Error(
|
|
195
|
+
`Invalid log filename "${name}". Must follow format {taskName}-{stage}-{event}.{ext}`
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
|
|
116
199
|
const filePath = await writeFile(
|
|
117
200
|
logsDir,
|
|
118
201
|
name,
|
|
@@ -176,6 +259,33 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
176
259
|
return taskDir;
|
|
177
260
|
},
|
|
178
261
|
|
|
262
|
+
/**
|
|
263
|
+
* Write a log file synchronously (critical path only)
|
|
264
|
+
* @param {string} name - File name
|
|
265
|
+
* @param {string} content - Log content
|
|
266
|
+
* @param {Object} options - Options object
|
|
267
|
+
* @param {string} options.mode - "replace" (default) or "append"
|
|
268
|
+
*/
|
|
269
|
+
writeLogSync(name, content, options = {}) {
|
|
270
|
+
if (!validateLogName(name)) {
|
|
271
|
+
throw new Error(
|
|
272
|
+
`Invalid log filename "${name}". Must follow format {taskName}-{stage}-{event}.{ext}`
|
|
273
|
+
);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
ensureDirSync(logsDir);
|
|
277
|
+
const filePath = path.join(logsDir, name);
|
|
278
|
+
|
|
279
|
+
if (options.mode === "append") {
|
|
280
|
+
fsSync.appendFileSync(filePath, content);
|
|
281
|
+
} else {
|
|
282
|
+
atomicWriteSync(filePath, content);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
updateStatusWithFilesSync("logs", name);
|
|
286
|
+
return filePath;
|
|
287
|
+
},
|
|
288
|
+
|
|
179
289
|
/**
|
|
180
290
|
* Get the current stage name
|
|
181
291
|
* @returns {string} Current stage name
|
|
@@ -185,3 +295,88 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
185
295
|
},
|
|
186
296
|
};
|
|
187
297
|
}
|
|
298
|
+
|
|
299
|
+
/**
|
|
300
|
+
* Generates a standardized log filename following the convention {taskName}-{stage}-{event}.{ext}
|
|
301
|
+
* @param {string} taskName - Name of the task
|
|
302
|
+
* @param {string} stage - Stage name or identifier
|
|
303
|
+
* @param {string} event - Event type from LogEvent constants
|
|
304
|
+
* @param {string} ext - File extension from LogFileExtension constants
|
|
305
|
+
* @returns {string} Formatted log filename
|
|
306
|
+
*/
|
|
307
|
+
export function generateLogName(
|
|
308
|
+
taskName,
|
|
309
|
+
stage,
|
|
310
|
+
event,
|
|
311
|
+
ext = LogFileExtension.TEXT
|
|
312
|
+
) {
|
|
313
|
+
if (!taskName || !stage || !event || !ext) {
|
|
314
|
+
throw new Error(
|
|
315
|
+
"All parameters (taskName, stage, event, ext) are required for generateLogName"
|
|
316
|
+
);
|
|
317
|
+
}
|
|
318
|
+
if (!isValidLogEvent(event)) {
|
|
319
|
+
throw new Error(
|
|
320
|
+
`Invalid log event "${event}". Use a value from LogEvent: ${Object.values(
|
|
321
|
+
LogEvent
|
|
322
|
+
).join(", ")}`
|
|
323
|
+
);
|
|
324
|
+
}
|
|
325
|
+
if (!isValidLogFileExtension(ext)) {
|
|
326
|
+
throw new Error(
|
|
327
|
+
`Invalid log file extension "${ext}". Use a value from LogFileExtension: ${Object.values(
|
|
328
|
+
LogFileExtension
|
|
329
|
+
).join(", ")}`
|
|
330
|
+
);
|
|
331
|
+
}
|
|
332
|
+
return `${taskName}-${stage}-${event}.${ext}`;
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* Parses a log filename to extract taskName, stage, event, and extension
|
|
337
|
+
* @param {string} fileName - Log filename to parse
|
|
338
|
+
* @returns {Object|null} Parsed components or null if invalid format
|
|
339
|
+
*/
|
|
340
|
+
export function parseLogName(fileName) {
|
|
341
|
+
if (typeof fileName !== "string") {
|
|
342
|
+
return null;
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// Match pattern: taskName-stage-event.ext
|
|
346
|
+
// Split on first two hyphens: taskName-stage-event.ext
|
|
347
|
+
const match = fileName.match(
|
|
348
|
+
/^(?<taskName>[^-]+)-(?<stage>[^-]+)-(?<event>[^.]+)\.(?<ext>.+)$/
|
|
349
|
+
);
|
|
350
|
+
if (!match) {
|
|
351
|
+
return null;
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
const { taskName, stage, event, ext } = match.groups;
|
|
355
|
+
return { taskName, stage, event, ext };
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
/**
|
|
359
|
+
* Generates a glob pattern for matching log files with specific components
|
|
360
|
+
* @param {string} taskName - Task name (optional, use "*" for wildcard)
|
|
361
|
+
* @param {string} stage - Stage name (optional, use "*" for wildcard)
|
|
362
|
+
* @param {string} event - Event type (optional, use "*" for wildcard)
|
|
363
|
+
* @param {string} ext - File extension (optional, use "*" for wildcard)
|
|
364
|
+
* @returns {string} Glob pattern for file matching
|
|
365
|
+
*/
|
|
366
|
+
export function getLogPattern(
|
|
367
|
+
taskName = "*",
|
|
368
|
+
stage = "*",
|
|
369
|
+
event = "*",
|
|
370
|
+
ext = "*"
|
|
371
|
+
) {
|
|
372
|
+
return `${taskName}-${stage}-${event}.${ext}`;
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
/**
|
|
376
|
+
* Validates that a log filename follows the standardized naming convention
|
|
377
|
+
* @param {string} fileName - Log filename to validate
|
|
378
|
+
* @returns {boolean} True if valid, false otherwise
|
|
379
|
+
*/
|
|
380
|
+
export function validateLogName(fileName) {
|
|
381
|
+
return parseLogName(fileName) !== null;
|
|
382
|
+
}
|
package/src/core/orchestrator.js
CHANGED
|
@@ -5,6 +5,8 @@ import chokidar from "chokidar";
|
|
|
5
5
|
import { spawn as defaultSpawn } from "node:child_process";
|
|
6
6
|
import { getConfig, getPipelineConfig } from "./config.js";
|
|
7
7
|
import { createLogger } from "./logger.js";
|
|
8
|
+
import { createTaskFileIO, generateLogName } from "./file-io.js";
|
|
9
|
+
import { LogEvent } from "../config/log-events.js";
|
|
8
10
|
|
|
9
11
|
/**
|
|
10
12
|
* Resolve canonical pipeline directories for the given data root.
|
|
@@ -153,6 +155,35 @@ export async function startOrchestrator(opts) {
|
|
|
153
155
|
};
|
|
154
156
|
await fs.writeFile(statusPath, JSON.stringify(status, null, 2));
|
|
155
157
|
}
|
|
158
|
+
// Create fileIO for orchestrator-level logging
|
|
159
|
+
const fileIO = createTaskFileIO({
|
|
160
|
+
workDir,
|
|
161
|
+
taskName: jobId,
|
|
162
|
+
getStage: () => "orchestrator",
|
|
163
|
+
statusPath,
|
|
164
|
+
trackTaskFiles: false,
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
// Write job start log
|
|
168
|
+
await fileIO.writeLog(
|
|
169
|
+
generateLogName(jobId, "orchestrator", LogEvent.START),
|
|
170
|
+
JSON.stringify(
|
|
171
|
+
{
|
|
172
|
+
jobId,
|
|
173
|
+
pipeline: seed?.pipeline,
|
|
174
|
+
timestamp: new Date().toISOString(),
|
|
175
|
+
seedSummary: {
|
|
176
|
+
name: seed?.name,
|
|
177
|
+
pipeline: seed?.pipeline,
|
|
178
|
+
keys: Object.keys(seed || {}),
|
|
179
|
+
},
|
|
180
|
+
},
|
|
181
|
+
null,
|
|
182
|
+
2
|
|
183
|
+
),
|
|
184
|
+
{ mode: "replace" }
|
|
185
|
+
);
|
|
186
|
+
|
|
156
187
|
// Spawn runner for this job
|
|
157
188
|
const child = spawnRunner(
|
|
158
189
|
logger,
|
|
@@ -161,7 +192,8 @@ export async function startOrchestrator(opts) {
|
|
|
161
192
|
running,
|
|
162
193
|
spawn,
|
|
163
194
|
testMode,
|
|
164
|
-
seed
|
|
195
|
+
seed,
|
|
196
|
+
fileIO
|
|
165
197
|
);
|
|
166
198
|
// child registered inside spawnRunner
|
|
167
199
|
return child;
|
|
@@ -223,6 +255,12 @@ export async function startOrchestrator(opts) {
|
|
|
223
255
|
return { stop };
|
|
224
256
|
}
|
|
225
257
|
|
|
258
|
+
/**
|
|
259
|
+
* @typedef {Object} TaskFileIO
|
|
260
|
+
* @property {(name: string, content: string, options?: { mode?: 'append'|'replace' }) => Promise<string>} writeLog
|
|
261
|
+
* @property {(name: string, content: string, options?: { mode?: 'append'|'replace' }) => string} writeLogSync
|
|
262
|
+
*/
|
|
263
|
+
|
|
226
264
|
/**
|
|
227
265
|
* Spawn a pipeline runner. In testMode we still call spawn() so tests can assert,
|
|
228
266
|
* but we resolve immediately and let tests drive the lifecycle (emit 'exit', etc.).
|
|
@@ -234,8 +272,18 @@ export async function startOrchestrator(opts) {
|
|
|
234
272
|
* @param {typeof defaultSpawn} spawn
|
|
235
273
|
* @param {boolean} testMode
|
|
236
274
|
* @param {Object} seed - Seed data containing pipeline information
|
|
275
|
+
* @param {TaskFileIO} fileIO - Task-scoped file I/O interface for writing logs
|
|
237
276
|
*/
|
|
238
|
-
function spawnRunner(
|
|
277
|
+
function spawnRunner(
|
|
278
|
+
logger,
|
|
279
|
+
jobId,
|
|
280
|
+
dirs,
|
|
281
|
+
running,
|
|
282
|
+
spawn,
|
|
283
|
+
testMode,
|
|
284
|
+
seed,
|
|
285
|
+
fileIO
|
|
286
|
+
) {
|
|
239
287
|
// Use path relative to this file to avoid process.cwd() issues
|
|
240
288
|
const orchestratorDir = path.dirname(new URL(import.meta.url).pathname);
|
|
241
289
|
const runnerPath = path.join(orchestratorDir, "pipeline-runner.js");
|
|
@@ -316,11 +364,67 @@ function spawnRunner(logger, jobId, dirs, running, spawn, testMode, seed) {
|
|
|
316
364
|
|
|
317
365
|
running.set(jobId, child);
|
|
318
366
|
|
|
319
|
-
child.on("exit", () => {
|
|
367
|
+
child.on("exit", (code, signal) => {
|
|
320
368
|
running.delete(jobId);
|
|
369
|
+
|
|
370
|
+
// Write job completion log synchronously
|
|
371
|
+
if (fileIO) {
|
|
372
|
+
try {
|
|
373
|
+
fileIO.writeLogSync(
|
|
374
|
+
generateLogName(jobId, "orchestrator", LogEvent.COMPLETE),
|
|
375
|
+
JSON.stringify(
|
|
376
|
+
{
|
|
377
|
+
jobId,
|
|
378
|
+
exitCode: code,
|
|
379
|
+
signal: signal,
|
|
380
|
+
timestamp: new Date().toISOString(),
|
|
381
|
+
completionType: code === 0 ? "success" : "failure",
|
|
382
|
+
},
|
|
383
|
+
null,
|
|
384
|
+
2
|
|
385
|
+
),
|
|
386
|
+
{ mode: "replace" }
|
|
387
|
+
);
|
|
388
|
+
} catch (error) {
|
|
389
|
+
logger.error("Failed to write job completion log", {
|
|
390
|
+
jobId,
|
|
391
|
+
error: error.message,
|
|
392
|
+
});
|
|
393
|
+
}
|
|
394
|
+
}
|
|
321
395
|
});
|
|
322
|
-
|
|
396
|
+
|
|
397
|
+
child.on("error", (error) => {
|
|
323
398
|
running.delete(jobId);
|
|
399
|
+
|
|
400
|
+
// Write job error log synchronously
|
|
401
|
+
if (fileIO) {
|
|
402
|
+
try {
|
|
403
|
+
fileIO.writeLogSync(
|
|
404
|
+
generateLogName(jobId, "orchestrator", LogEvent.ERROR),
|
|
405
|
+
JSON.stringify(
|
|
406
|
+
{
|
|
407
|
+
jobId,
|
|
408
|
+
error: {
|
|
409
|
+
message: error.message,
|
|
410
|
+
name: error.name,
|
|
411
|
+
code: error.code,
|
|
412
|
+
},
|
|
413
|
+
timestamp: new Date().toISOString(),
|
|
414
|
+
completionType: "error",
|
|
415
|
+
},
|
|
416
|
+
null,
|
|
417
|
+
2
|
|
418
|
+
),
|
|
419
|
+
{ mode: "replace" }
|
|
420
|
+
);
|
|
421
|
+
} catch (logError) {
|
|
422
|
+
logger.error("Failed to write job error log", {
|
|
423
|
+
jobId,
|
|
424
|
+
error: logError.message,
|
|
425
|
+
});
|
|
426
|
+
}
|
|
427
|
+
}
|
|
324
428
|
});
|
|
325
429
|
|
|
326
430
|
// In test mode: return immediately; in real mode you might await readiness
|
|
@@ -7,9 +7,14 @@ import { getPipelineConfig } from "./config.js";
|
|
|
7
7
|
import { writeJobStatus } from "./status-writer.js";
|
|
8
8
|
import { TaskState } from "../config/statuses.js";
|
|
9
9
|
import { ensureTaskSymlinkBridge } from "./symlink-bridge.js";
|
|
10
|
-
import {
|
|
11
|
-
|
|
10
|
+
import {
|
|
11
|
+
cleanupTaskSymlinks,
|
|
12
|
+
validateTaskSymlinks,
|
|
13
|
+
repairTaskSymlinks,
|
|
14
|
+
} from "./symlink-utils.js";
|
|
15
|
+
import { createTaskFileIO, generateLogName } from "./file-io.js";
|
|
12
16
|
import { createJobLogger } from "./logger.js";
|
|
17
|
+
import { LogEvent, LogFileExtension } from "../config/log-events.js";
|
|
13
18
|
|
|
14
19
|
const ROOT = process.env.PO_ROOT || process.cwd();
|
|
15
20
|
const DATA_DIR = path.join(ROOT, process.env.PO_DATA_DIR || "pipeline-data");
|
|
@@ -132,8 +137,66 @@ for (const taskName of pipeline.tasks) {
|
|
|
132
137
|
? modulePath
|
|
133
138
|
: path.resolve(path.dirname(TASK_REGISTRY), modulePath);
|
|
134
139
|
|
|
135
|
-
//
|
|
140
|
+
// Validate symlinks before task execution to ensure restart reliability
|
|
136
141
|
const poRoot = process.env.PO_ROOT || process.cwd();
|
|
142
|
+
const expectedTargets = {
|
|
143
|
+
nodeModules: path.join(path.resolve(poRoot, ".."), "node_modules"),
|
|
144
|
+
taskRoot: path.dirname(absoluteModulePath),
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
const validationResult = await validateTaskSymlinks(
|
|
148
|
+
taskDir,
|
|
149
|
+
expectedTargets
|
|
150
|
+
);
|
|
151
|
+
|
|
152
|
+
if (!validationResult.isValid) {
|
|
153
|
+
logger.warn("Task symlinks validation failed, attempting repair", {
|
|
154
|
+
taskName,
|
|
155
|
+
taskDir,
|
|
156
|
+
errors: validationResult.errors,
|
|
157
|
+
validationDuration: validationResult.duration,
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
const repairResult = await repairTaskSymlinks(
|
|
161
|
+
taskDir,
|
|
162
|
+
poRoot,
|
|
163
|
+
absoluteModulePath
|
|
164
|
+
);
|
|
165
|
+
|
|
166
|
+
if (!repairResult.success) {
|
|
167
|
+
const errorMessage = `Failed to repair task symlinks for ${taskName}: ${repairResult.errors.join(", ")}`;
|
|
168
|
+
logger.error("Task symlink repair failed, aborting execution", {
|
|
169
|
+
taskName,
|
|
170
|
+
taskDir,
|
|
171
|
+
errors: repairResult.errors,
|
|
172
|
+
repairDuration: repairResult.duration,
|
|
173
|
+
});
|
|
174
|
+
|
|
175
|
+
await updateStatus(taskName, {
|
|
176
|
+
state: TaskState.FAILED,
|
|
177
|
+
endedAt: now(),
|
|
178
|
+
error: { message: errorMessage, type: "SymlinkRepairFailed" },
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
process.exitCode = 1;
|
|
182
|
+
process.exit(1);
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
logger.log("Task symlinks repaired successfully", {
|
|
186
|
+
taskName,
|
|
187
|
+
taskDir,
|
|
188
|
+
repairDuration: repairResult.duration,
|
|
189
|
+
relocatedEntry: repairResult.relocatedEntry,
|
|
190
|
+
});
|
|
191
|
+
} else {
|
|
192
|
+
logger.debug("Task symlinks validation passed", {
|
|
193
|
+
taskName,
|
|
194
|
+
taskDir,
|
|
195
|
+
validationDuration: validationResult.duration,
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Create symlink bridge for deterministic module resolution
|
|
137
200
|
const relocatedEntry = await ensureTaskSymlinkBridge({
|
|
138
201
|
taskDir,
|
|
139
202
|
poRoot,
|
|
@@ -162,7 +225,12 @@ for (const taskName of pipeline.tasks) {
|
|
|
162
225
|
// Persist execution-logs.json and failure-details.json on task failure via IO
|
|
163
226
|
if (result.logs) {
|
|
164
227
|
await fileIO.writeLog(
|
|
165
|
-
|
|
228
|
+
generateLogName(
|
|
229
|
+
taskName,
|
|
230
|
+
"pipeline",
|
|
231
|
+
LogEvent.EXECUTION_LOGS,
|
|
232
|
+
LogFileExtension.JSON
|
|
233
|
+
),
|
|
166
234
|
JSON.stringify(result.logs, null, 2),
|
|
167
235
|
{ mode: "replace" }
|
|
168
236
|
);
|
|
@@ -175,7 +243,12 @@ for (const taskName of pipeline.tasks) {
|
|
|
175
243
|
refinementAttempts: result.refinementAttempts || 0,
|
|
176
244
|
};
|
|
177
245
|
await fileIO.writeLog(
|
|
178
|
-
|
|
246
|
+
generateLogName(
|
|
247
|
+
taskName,
|
|
248
|
+
"pipeline",
|
|
249
|
+
LogEvent.FAILURE_DETAILS,
|
|
250
|
+
LogFileExtension.JSON
|
|
251
|
+
),
|
|
179
252
|
JSON.stringify(failureDetails, null, 2),
|
|
180
253
|
{ mode: "replace" }
|
|
181
254
|
);
|
|
@@ -218,7 +291,12 @@ for (const taskName of pipeline.tasks) {
|
|
|
218
291
|
|
|
219
292
|
if (result.logs) {
|
|
220
293
|
await fileIO.writeLog(
|
|
221
|
-
|
|
294
|
+
generateLogName(
|
|
295
|
+
taskName,
|
|
296
|
+
"pipeline",
|
|
297
|
+
LogEvent.EXECUTION_LOGS,
|
|
298
|
+
LogFileExtension.JSON
|
|
299
|
+
),
|
|
222
300
|
JSON.stringify(result.logs, null, 2),
|
|
223
301
|
{ mode: "replace" }
|
|
224
302
|
);
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
2
|
import path from "node:path";
|
|
3
3
|
import { createLogger } from "./logger.js";
|
|
4
|
+
import { ensureTaskSymlinkBridge } from "./symlink-bridge.js";
|
|
4
5
|
|
|
5
6
|
const logger = createLogger("SymlinkUtils");
|
|
6
7
|
|
|
@@ -49,6 +50,201 @@ export async function ensureSymlink(linkPath, targetPath, type) {
|
|
|
49
50
|
}
|
|
50
51
|
}
|
|
51
52
|
|
|
53
|
+
/**
|
|
54
|
+
* Validates that required task symlinks exist and point to accessible targets.
|
|
55
|
+
*
|
|
56
|
+
* @param {string} taskDir - The task directory containing symlinks
|
|
57
|
+
* @param {Object} expectedTargets - Expected symlink targets
|
|
58
|
+
* @param {string} expectedTargets.nodeModules - Expected target for node_modules symlink
|
|
59
|
+
* @param {string} expectedTargets.taskRoot - Expected target for _task_root symlink
|
|
60
|
+
* @returns {Object} Validation result with isValid flag and details
|
|
61
|
+
*/
|
|
62
|
+
export async function validateTaskSymlinks(taskDir, expectedTargets) {
|
|
63
|
+
const startTime = Date.now();
|
|
64
|
+
const validationErrors = [];
|
|
65
|
+
const validationDetails = {};
|
|
66
|
+
|
|
67
|
+
const symlinksToValidate = [
|
|
68
|
+
{ name: "node_modules", expectedTarget: expectedTargets.nodeModules },
|
|
69
|
+
{ name: "_task_root", expectedTarget: expectedTargets.taskRoot },
|
|
70
|
+
];
|
|
71
|
+
|
|
72
|
+
for (const { name, expectedTarget } of symlinksToValidate) {
|
|
73
|
+
const linkPath = path.join(taskDir, name);
|
|
74
|
+
|
|
75
|
+
try {
|
|
76
|
+
// Check if symlink exists
|
|
77
|
+
const stats = await fs.lstat(linkPath);
|
|
78
|
+
|
|
79
|
+
if (!stats.isSymbolicLink()) {
|
|
80
|
+
validationErrors.push(
|
|
81
|
+
`${name} exists but is not a symlink (type: ${stats.isFile() ? "file" : "directory"})`
|
|
82
|
+
);
|
|
83
|
+
validationDetails[name] = {
|
|
84
|
+
exists: true,
|
|
85
|
+
isSymlink: false,
|
|
86
|
+
targetAccessible: false,
|
|
87
|
+
};
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Read the symlink target
|
|
92
|
+
const actualTarget = await fs.readlink(linkPath);
|
|
93
|
+
|
|
94
|
+
// Check if target matches expected (normalize paths for comparison)
|
|
95
|
+
const normalizedActual = path.resolve(taskDir, actualTarget);
|
|
96
|
+
const normalizedExpected = path.resolve(expectedTarget);
|
|
97
|
+
|
|
98
|
+
if (normalizedActual !== normalizedExpected) {
|
|
99
|
+
validationErrors.push(
|
|
100
|
+
`${name} points to wrong target: expected ${expectedTarget}, got ${actualTarget}`
|
|
101
|
+
);
|
|
102
|
+
validationDetails[name] = {
|
|
103
|
+
exists: true,
|
|
104
|
+
isSymlink: true,
|
|
105
|
+
targetAccessible: false,
|
|
106
|
+
actualTarget,
|
|
107
|
+
expectedTarget,
|
|
108
|
+
};
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Check if target is accessible
|
|
113
|
+
const targetStats = await fs.stat(normalizedActual).catch(() => null);
|
|
114
|
+
if (!targetStats) {
|
|
115
|
+
validationErrors.push(
|
|
116
|
+
`${name} target is not accessible: ${actualTarget}`
|
|
117
|
+
);
|
|
118
|
+
validationDetails[name] = {
|
|
119
|
+
exists: true,
|
|
120
|
+
isSymlink: true,
|
|
121
|
+
targetAccessible: false,
|
|
122
|
+
actualTarget,
|
|
123
|
+
};
|
|
124
|
+
continue;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (!targetStats.isDirectory()) {
|
|
128
|
+
validationErrors.push(
|
|
129
|
+
`${name} target is not a directory: ${actualTarget}`
|
|
130
|
+
);
|
|
131
|
+
validationDetails[name] = {
|
|
132
|
+
exists: true,
|
|
133
|
+
isSymlink: true,
|
|
134
|
+
targetAccessible: false,
|
|
135
|
+
actualTarget,
|
|
136
|
+
targetType: "file",
|
|
137
|
+
};
|
|
138
|
+
continue;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Symlink is valid
|
|
142
|
+
validationDetails[name] = {
|
|
143
|
+
exists: true,
|
|
144
|
+
isSymlink: true,
|
|
145
|
+
targetAccessible: true,
|
|
146
|
+
actualTarget,
|
|
147
|
+
};
|
|
148
|
+
} catch (error) {
|
|
149
|
+
if (error.code === "ENOENT") {
|
|
150
|
+
validationErrors.push(`${name} symlink does not exist`);
|
|
151
|
+
validationDetails[name] = {
|
|
152
|
+
exists: false,
|
|
153
|
+
isSymlink: false,
|
|
154
|
+
targetAccessible: false,
|
|
155
|
+
};
|
|
156
|
+
} else {
|
|
157
|
+
validationErrors.push(`${name} validation failed: ${error.message}`);
|
|
158
|
+
validationDetails[name] = {
|
|
159
|
+
exists: false,
|
|
160
|
+
isSymlink: false,
|
|
161
|
+
targetAccessible: false,
|
|
162
|
+
error: error.message,
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const isValid = validationErrors.length === 0;
|
|
169
|
+
const duration = Date.now() - startTime;
|
|
170
|
+
|
|
171
|
+
logger.debug("Symlink validation completed", {
|
|
172
|
+
taskDir,
|
|
173
|
+
isValid,
|
|
174
|
+
errorsCount: validationErrors.length,
|
|
175
|
+
duration,
|
|
176
|
+
details: validationDetails,
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
return {
|
|
180
|
+
isValid,
|
|
181
|
+
errors: validationErrors,
|
|
182
|
+
details: validationDetails,
|
|
183
|
+
duration,
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Repairs task symlinks by recreating them using the existing symlink bridge.
|
|
189
|
+
*
|
|
190
|
+
* @param {string} taskDir - The task directory where symlinks should be created
|
|
191
|
+
* @param {string} poRoot - The repository root directory
|
|
192
|
+
* @param {string} taskModulePath - Absolute path to the original task module
|
|
193
|
+
* @returns {Object} Repair result with success flag and details
|
|
194
|
+
*/
|
|
195
|
+
export async function repairTaskSymlinks(taskDir, poRoot, taskModulePath) {
|
|
196
|
+
const startTime = Date.now();
|
|
197
|
+
|
|
198
|
+
try {
|
|
199
|
+
logger.log("Repairing task symlinks", {
|
|
200
|
+
taskDir,
|
|
201
|
+
poRoot,
|
|
202
|
+
taskModulePath,
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
// Use existing ensureTaskSymlinkBridge for repairs
|
|
206
|
+
const relocatedEntry = await ensureTaskSymlinkBridge({
|
|
207
|
+
taskDir,
|
|
208
|
+
poRoot,
|
|
209
|
+
taskModulePath,
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
const duration = Date.now() - startTime;
|
|
213
|
+
|
|
214
|
+
logger.log("Task symlinks repaired successfully", {
|
|
215
|
+
taskDir,
|
|
216
|
+
duration,
|
|
217
|
+
relocatedEntry,
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
return {
|
|
221
|
+
success: true,
|
|
222
|
+
relocatedEntry,
|
|
223
|
+
duration,
|
|
224
|
+
errors: [],
|
|
225
|
+
};
|
|
226
|
+
} catch (error) {
|
|
227
|
+
const duration = Date.now() - startTime;
|
|
228
|
+
const errorMessage = `Failed to repair task symlinks: ${error.message}`;
|
|
229
|
+
|
|
230
|
+
logger.error("Task symlink repair failed", {
|
|
231
|
+
taskDir,
|
|
232
|
+
poRoot,
|
|
233
|
+
taskModulePath,
|
|
234
|
+
duration,
|
|
235
|
+
error: error.message,
|
|
236
|
+
stack: error.stack,
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
return {
|
|
240
|
+
success: false,
|
|
241
|
+
relocatedEntry: null,
|
|
242
|
+
duration,
|
|
243
|
+
errors: [errorMessage],
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
52
248
|
/**
|
|
53
249
|
* Removes task symlinks from a completed job directory to avoid dangling links.
|
|
54
250
|
*
|
package/src/core/task-runner.js
CHANGED
|
@@ -4,12 +4,13 @@ import fs from "fs";
|
|
|
4
4
|
import { createLLM, getLLMEvents } from "../llm/index.js";
|
|
5
5
|
import { loadFreshModule } from "./module-loader.js";
|
|
6
6
|
import { loadEnvironment } from "./environment.js";
|
|
7
|
-
import { createTaskFileIO } from "./file-io.js";
|
|
7
|
+
import { createTaskFileIO, generateLogName } from "./file-io.js";
|
|
8
8
|
import { writeJobStatus } from "./status-writer.js";
|
|
9
9
|
import { computeDeterministicProgress } from "./progress.js";
|
|
10
10
|
import { TaskState } from "../config/statuses.js";
|
|
11
11
|
import { validateWithSchema } from "../api/validators/json.js";
|
|
12
12
|
import { createJobLogger } from "./logger.js";
|
|
13
|
+
import { LogEvent, LogFileExtension } from "../config/log-events.js";
|
|
13
14
|
|
|
14
15
|
/**
|
|
15
16
|
* Derives model key and token counts from LLM metric event.
|
|
@@ -67,13 +68,13 @@ function assertStageResult(stageName, result) {
|
|
|
67
68
|
);
|
|
68
69
|
}
|
|
69
70
|
|
|
70
|
-
if (!
|
|
71
|
+
if (!Object.prototype.hasOwnProperty.call(result, "output")) {
|
|
71
72
|
throw new Error(
|
|
72
73
|
`Stage "${stageName}" result missing required property: output`
|
|
73
74
|
);
|
|
74
75
|
}
|
|
75
76
|
|
|
76
|
-
if (!
|
|
77
|
+
if (!Object.prototype.hasOwnProperty.call(result, "flags")) {
|
|
77
78
|
throw new Error(
|
|
78
79
|
`Stage "${stageName}" result missing required property: flags`
|
|
79
80
|
);
|
|
@@ -507,7 +508,11 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
507
508
|
}
|
|
508
509
|
|
|
509
510
|
// Add console output capture before stage execution using IO
|
|
510
|
-
const logName =
|
|
511
|
+
const logName = generateLogName(
|
|
512
|
+
context.meta.taskName,
|
|
513
|
+
stageName,
|
|
514
|
+
LogEvent.START
|
|
515
|
+
);
|
|
511
516
|
const logPath = path.join(context.meta.workDir, "files", "logs", logName);
|
|
512
517
|
console.debug("[task-runner] stage log path resolution via IO", {
|
|
513
518
|
stage: stageName,
|
|
@@ -593,7 +598,12 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
593
598
|
},
|
|
594
599
|
};
|
|
595
600
|
await context.io.writeLog(
|
|
596
|
-
|
|
601
|
+
generateLogName(
|
|
602
|
+
context.meta.taskName,
|
|
603
|
+
stageName,
|
|
604
|
+
LogEvent.CONTEXT,
|
|
605
|
+
LogFileExtension.JSON
|
|
606
|
+
),
|
|
597
607
|
JSON.stringify(snapshot, null, 2),
|
|
598
608
|
{ mode: "replace" }
|
|
599
609
|
);
|
|
@@ -696,6 +706,18 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
696
706
|
}
|
|
697
707
|
}
|
|
698
708
|
|
|
709
|
+
// Add explicit completion log after stage completion
|
|
710
|
+
const completeLogName = generateLogName(
|
|
711
|
+
context.meta.taskName,
|
|
712
|
+
stageName,
|
|
713
|
+
LogEvent.COMPLETE
|
|
714
|
+
);
|
|
715
|
+
await context.io.writeLog(
|
|
716
|
+
completeLogName,
|
|
717
|
+
`Stage ${stageName} completed at ${new Date().toISOString()}\n`,
|
|
718
|
+
{ mode: "replace" }
|
|
719
|
+
);
|
|
720
|
+
|
|
699
721
|
const ms = +(performance.now() - start).toFixed(2);
|
|
700
722
|
logger.log("Stage completed successfully", {
|
|
701
723
|
stage: stageName,
|
|
@@ -729,9 +751,17 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
729
751
|
context.meta.workDir,
|
|
730
752
|
"files",
|
|
731
753
|
"logs",
|
|
732
|
-
|
|
754
|
+
generateLogName(context.meta.taskName, stageName, LogEvent.START)
|
|
755
|
+
),
|
|
756
|
+
snapshotPath: path.join(
|
|
757
|
+
logsDir,
|
|
758
|
+
generateLogName(
|
|
759
|
+
context.meta.taskName,
|
|
760
|
+
stageName,
|
|
761
|
+
LogEvent.CONTEXT,
|
|
762
|
+
LogFileExtension.JSON
|
|
763
|
+
)
|
|
733
764
|
),
|
|
734
|
-
snapshotPath: path.join(logsDir, `stage-${stageName}-context.json`),
|
|
735
765
|
dataHasSeed: !!context.data?.seed,
|
|
736
766
|
seedHasData: context.data?.seed?.data !== undefined,
|
|
737
767
|
flagsKeys: Object.keys(context.flags || {}),
|
|
@@ -855,8 +885,41 @@ function toAbsFileURL(p) {
|
|
|
855
885
|
}
|
|
856
886
|
|
|
857
887
|
function normalizeError(err) {
|
|
858
|
-
if (err instanceof Error)
|
|
888
|
+
if (err instanceof Error) {
|
|
859
889
|
return { name: err.name, message: err.message, stack: err.stack };
|
|
890
|
+
}
|
|
891
|
+
|
|
892
|
+
// Handle plain object errors (like those from HTTP responses)
|
|
893
|
+
if (typeof err === "object" && err !== null) {
|
|
894
|
+
let message = "Unknown error";
|
|
895
|
+
if (typeof err?.message === "string") {
|
|
896
|
+
message = err.message;
|
|
897
|
+
} else if (typeof err?.error?.message === "string") {
|
|
898
|
+
message = err.error.message;
|
|
899
|
+
} else if (typeof err?.error === "string") {
|
|
900
|
+
message = err.error;
|
|
901
|
+
}
|
|
902
|
+
const result = { message };
|
|
903
|
+
|
|
904
|
+
// Include additional context if available
|
|
905
|
+
if (err.status) result.status = err.status;
|
|
906
|
+
if (err.code) result.code = err.code;
|
|
907
|
+
if (err.error) {
|
|
908
|
+
if (typeof err.error === "string") {
|
|
909
|
+
result.error = err.error;
|
|
910
|
+
} else if (typeof err.error === "object" && err.error !== null) {
|
|
911
|
+
// Try to extract a message property, else serialize the object
|
|
912
|
+
result.error = err.error.message
|
|
913
|
+
? err.error.message
|
|
914
|
+
: JSON.stringify(err.error);
|
|
915
|
+
} else {
|
|
916
|
+
result.error = String(err.error);
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
return result;
|
|
921
|
+
}
|
|
922
|
+
|
|
860
923
|
return { message: String(err) };
|
|
861
924
|
}
|
|
862
925
|
|
package/src/providers/zhipu.js
CHANGED
|
@@ -65,20 +65,39 @@ export async function zhipuChat({
|
|
|
65
65
|
};
|
|
66
66
|
|
|
67
67
|
console.log("[Zhipu] Calling Zhipu API...");
|
|
68
|
-
const response = await fetch(
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
68
|
+
const response = await fetch(
|
|
69
|
+
"https://api.z.ai/api/paas/v4/chat/completions",
|
|
70
|
+
{
|
|
71
|
+
method: "POST",
|
|
72
|
+
headers: {
|
|
73
|
+
"Content-Type": "application/json",
|
|
74
|
+
Authorization: `Bearer ${process.env.ZHIPU_API_KEY}`,
|
|
75
|
+
},
|
|
76
|
+
body: JSON.stringify(requestBody),
|
|
77
|
+
}
|
|
78
|
+
);
|
|
76
79
|
|
|
77
80
|
if (!response.ok) {
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
81
|
+
let errorMessage;
|
|
82
|
+
try {
|
|
83
|
+
const errorData = await response.json();
|
|
84
|
+
errorMessage =
|
|
85
|
+
errorData?.error?.message ||
|
|
86
|
+
errorData?.message ||
|
|
87
|
+
response.statusText ||
|
|
88
|
+
"Unknown error";
|
|
89
|
+
} catch {
|
|
90
|
+
// If JSON parsing fails, try to get text response
|
|
91
|
+
try {
|
|
92
|
+
errorMessage = await response.text();
|
|
93
|
+
} catch {
|
|
94
|
+
errorMessage = response.statusText || "Unknown error";
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const error = new Error(errorMessage);
|
|
99
|
+
error.status = response.status;
|
|
100
|
+
throw error;
|
|
82
101
|
}
|
|
83
102
|
|
|
84
103
|
const data = await response.json();
|
|
@@ -117,7 +136,7 @@ export async function zhipuChat({
|
|
|
117
136
|
};
|
|
118
137
|
} catch (error) {
|
|
119
138
|
lastError = error;
|
|
120
|
-
const msg = error?.
|
|
139
|
+
const msg = error?.message || error?.toString() || "Unknown error";
|
|
121
140
|
console.error("[Zhipu] Error occurred:", msg);
|
|
122
141
|
console.error("[Zhipu] Error status:", error?.status);
|
|
123
142
|
|