@ryanfw/prompt-orchestration-pipeline 0.5.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/package.json +1 -2
- package/src/api/validators/json.js +39 -0
- package/src/components/DAGGrid.jsx +392 -303
- package/src/components/JobCard.jsx +14 -12
- package/src/components/JobDetail.jsx +54 -51
- package/src/components/JobTable.jsx +72 -23
- package/src/components/Layout.jsx +145 -42
- package/src/components/LiveText.jsx +47 -0
- package/src/components/PageSubheader.jsx +75 -0
- package/src/components/TaskDetailSidebar.jsx +216 -0
- package/src/components/TimerText.jsx +82 -0
- package/src/components/UploadSeed.jsx +0 -70
- package/src/components/ui/Logo.jsx +16 -0
- package/src/components/ui/RestartJobModal.jsx +140 -0
- package/src/components/ui/toast.jsx +138 -0
- package/src/config/models.js +322 -0
- package/src/config/statuses.js +119 -0
- package/src/core/config.js +4 -34
- package/src/core/file-io.js +13 -28
- package/src/core/module-loader.js +54 -40
- package/src/core/pipeline-runner.js +65 -26
- package/src/core/status-writer.js +213 -58
- package/src/core/symlink-bridge.js +57 -0
- package/src/core/symlink-utils.js +94 -0
- package/src/core/task-runner.js +321 -437
- package/src/llm/index.js +258 -86
- package/src/pages/Code.jsx +351 -0
- package/src/pages/PipelineDetail.jsx +124 -15
- package/src/pages/PromptPipelineDashboard.jsx +20 -88
- package/src/providers/anthropic.js +83 -69
- package/src/providers/base.js +52 -0
- package/src/providers/deepseek.js +20 -21
- package/src/providers/gemini.js +226 -0
- package/src/providers/openai.js +36 -106
- package/src/providers/zhipu.js +136 -0
- package/src/ui/client/adapters/job-adapter.js +42 -28
- package/src/ui/client/api.js +134 -0
- package/src/ui/client/hooks/useJobDetailWithUpdates.js +65 -179
- package/src/ui/client/index.css +15 -0
- package/src/ui/client/index.html +2 -1
- package/src/ui/client/main.jsx +19 -14
- package/src/ui/client/time-store.js +161 -0
- package/src/ui/config-bridge.js +15 -24
- package/src/ui/config-bridge.node.js +15 -24
- package/src/ui/dist/assets/{index-CxcrauYR.js → index-DqkbzXZ1.js} +2132 -1086
- package/src/ui/dist/assets/style-DBF9NQGk.css +62 -0
- package/src/ui/dist/index.html +4 -3
- package/src/ui/job-reader.js +0 -108
- package/src/ui/public/favicon.svg +12 -0
- package/src/ui/server.js +252 -0
- package/src/ui/sse-enhancer.js +0 -1
- package/src/ui/transformers/list-transformer.js +32 -12
- package/src/ui/transformers/status-transformer.js +29 -42
- package/src/utils/dag.js +8 -4
- package/src/utils/duration.js +13 -19
- package/src/utils/formatters.js +27 -0
- package/src/utils/geometry-equality.js +83 -0
- package/src/utils/pipelines.js +5 -1
- package/src/utils/time-utils.js +40 -0
- package/src/utils/token-cost-calculator.js +294 -0
- package/src/utils/ui.jsx +18 -20
- package/src/components/ui/select.jsx +0 -27
- package/src/lib/utils.js +0 -6
- package/src/ui/client/hooks/useTicker.js +0 -26
- package/src/ui/config-bridge.browser.js +0 -149
- package/src/ui/dist/assets/style-D6K_oQ12.css +0 -62
|
@@ -1,18 +1,16 @@
|
|
|
1
1
|
// PromptPipelineDashboard.jsx
|
|
2
|
-
import React, {
|
|
2
|
+
import React, { useMemo, useState } from "react";
|
|
3
3
|
import { useNavigate } from "react-router-dom";
|
|
4
4
|
|
|
5
|
-
import { Box, Flex, Text,
|
|
5
|
+
import { Box, Flex, Text, Tabs } from "@radix-ui/themes";
|
|
6
6
|
|
|
7
7
|
import { Progress } from "../components/ui/progress";
|
|
8
8
|
import { useJobListWithUpdates } from "../ui/client/hooks/useJobListWithUpdates";
|
|
9
9
|
import { adaptJobSummary } from "../ui/client/adapters/job-adapter";
|
|
10
|
-
import {
|
|
11
|
-
import { useTicker } from "../ui/client/hooks/useTicker";
|
|
10
|
+
import { TaskState, JobStatus } from "../config/statuses.js";
|
|
12
11
|
|
|
13
12
|
// Referenced components — leave these alone
|
|
14
13
|
import JobTable from "../components/JobTable";
|
|
15
|
-
import UploadSeed from "../components/UploadSeed";
|
|
16
14
|
import Layout from "../components/Layout.jsx";
|
|
17
15
|
|
|
18
16
|
export default function PromptPipelineDashboard({ isConnected }) {
|
|
@@ -55,43 +53,40 @@ export default function PromptPipelineDashboard({ isConnected }) {
|
|
|
55
53
|
return src.map(adaptJobSummary);
|
|
56
54
|
}, [apiJobs, error]);
|
|
57
55
|
const [activeTab, setActiveTab] = useState("current");
|
|
58
|
-
const [seedUploadSuccess, setSeedUploadSuccess] = useState(null);
|
|
59
|
-
const [seedUploadTimer, setSeedUploadTimer] = useState(null);
|
|
60
56
|
|
|
61
|
-
// Shared ticker for live duration updates
|
|
62
|
-
const now = useTicker(10000);
|
|
57
|
+
// Shared ticker for live duration updates - removed useTicker
|
|
63
58
|
|
|
64
59
|
const errorCount = useMemo(
|
|
65
|
-
() => jobs.filter((j) => j.status ===
|
|
60
|
+
() => jobs.filter((j) => j.status === TaskState.FAILED).length,
|
|
66
61
|
[jobs]
|
|
67
62
|
);
|
|
68
63
|
const currentCount = useMemo(
|
|
69
|
-
() => jobs.filter((j) => j.status ===
|
|
64
|
+
() => jobs.filter((j) => j.status === TaskState.RUNNING).length,
|
|
70
65
|
[jobs]
|
|
71
66
|
);
|
|
72
67
|
const completedCount = useMemo(
|
|
73
|
-
() => jobs.filter((j) => j.status ===
|
|
68
|
+
() => jobs.filter((j) => j.status === JobStatus.COMPLETE).length,
|
|
74
69
|
[jobs]
|
|
75
70
|
);
|
|
76
71
|
|
|
77
72
|
const filteredJobs = useMemo(() => {
|
|
78
73
|
switch (activeTab) {
|
|
79
74
|
case "current":
|
|
80
|
-
return jobs.filter((j) => j.status ===
|
|
75
|
+
return jobs.filter((j) => j.status === TaskState.RUNNING);
|
|
81
76
|
case "errors":
|
|
82
|
-
return jobs.filter((j) => j.status ===
|
|
77
|
+
return jobs.filter((j) => j.status === TaskState.FAILED);
|
|
83
78
|
case "complete":
|
|
84
|
-
return jobs.filter((j) => j.status ===
|
|
79
|
+
return jobs.filter((j) => j.status === JobStatus.COMPLETE);
|
|
85
80
|
default:
|
|
86
81
|
return [];
|
|
87
82
|
}
|
|
88
83
|
}, [jobs, activeTab]);
|
|
89
84
|
|
|
90
|
-
|
|
85
|
+
// overallElapsed function removed - JobTable now uses LiveText for duration calculations
|
|
91
86
|
|
|
92
87
|
// Aggregate progress for currently running jobs (for a subtle top progress bar)
|
|
93
88
|
const runningJobs = useMemo(
|
|
94
|
-
() => jobs.filter((j) => j.status ===
|
|
89
|
+
() => jobs.filter((j) => j.status === TaskState.RUNNING),
|
|
95
90
|
[jobs]
|
|
96
91
|
);
|
|
97
92
|
const aggregateProgress = useMemo(() => {
|
|
@@ -111,34 +106,6 @@ export default function PromptPipelineDashboard({ isConnected }) {
|
|
|
111
106
|
}
|
|
112
107
|
};
|
|
113
108
|
|
|
114
|
-
// Handle seed upload success
|
|
115
|
-
const handleSeedUploadSuccess = ({ jobName }) => {
|
|
116
|
-
// Clear any existing timer
|
|
117
|
-
if (seedUploadTimer) {
|
|
118
|
-
clearTimeout(seedUploadTimer);
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
// Set success message
|
|
122
|
-
setSeedUploadSuccess(jobName);
|
|
123
|
-
|
|
124
|
-
// Auto-clear after exactly 5000 ms
|
|
125
|
-
const timer = setTimeout(() => {
|
|
126
|
-
setSeedUploadSuccess(null);
|
|
127
|
-
setSeedUploadTimer(null);
|
|
128
|
-
}, 5000);
|
|
129
|
-
|
|
130
|
-
setSeedUploadTimer(timer);
|
|
131
|
-
};
|
|
132
|
-
|
|
133
|
-
// Cleanup timer on unmount
|
|
134
|
-
useEffect(() => {
|
|
135
|
-
return () => {
|
|
136
|
-
if (seedUploadTimer) {
|
|
137
|
-
clearTimeout(seedUploadTimer);
|
|
138
|
-
}
|
|
139
|
-
};
|
|
140
|
-
}, [seedUploadTimer]);
|
|
141
|
-
|
|
142
109
|
// Header actions for Layout
|
|
143
110
|
const headerActions = runningJobs.length > 0 && (
|
|
144
111
|
<Flex align="center" gap="2" className="text-gray-11">
|
|
@@ -154,26 +121,6 @@ export default function PromptPipelineDashboard({ isConnected }) {
|
|
|
154
121
|
|
|
155
122
|
return (
|
|
156
123
|
<Layout title="Prompt Pipeline" actions={headerActions}>
|
|
157
|
-
{/* Upload Seed File Section */}
|
|
158
|
-
<Card className="mb-6">
|
|
159
|
-
<Flex direction="column" gap="3">
|
|
160
|
-
<Heading size="4" weight="medium" className="text-gray-12">
|
|
161
|
-
Upload Seed File
|
|
162
|
-
</Heading>
|
|
163
|
-
|
|
164
|
-
{/* Success Message */}
|
|
165
|
-
{seedUploadSuccess && (
|
|
166
|
-
<Box className="rounded-md bg-green-50 p-3 border border-green-200">
|
|
167
|
-
<Text size="2" className="text-green-800">
|
|
168
|
-
Job <strong>{seedUploadSuccess}</strong> created successfully
|
|
169
|
-
</Text>
|
|
170
|
-
</Box>
|
|
171
|
-
)}
|
|
172
|
-
|
|
173
|
-
<UploadSeed onUploadSuccess={handleSeedUploadSuccess} />
|
|
174
|
-
</Flex>
|
|
175
|
-
</Card>
|
|
176
|
-
|
|
177
124
|
{error && (
|
|
178
125
|
<Box className="mb-4 rounded-md bg-yellow-50 p-3 border border-yellow-200">
|
|
179
126
|
<Text size="2" className="text-yellow-800">
|
|
@@ -181,7 +128,11 @@ export default function PromptPipelineDashboard({ isConnected }) {
|
|
|
181
128
|
</Text>
|
|
182
129
|
</Box>
|
|
183
130
|
)}
|
|
184
|
-
<Tabs.Root
|
|
131
|
+
<Tabs.Root
|
|
132
|
+
value={activeTab}
|
|
133
|
+
onValueChange={setActiveTab}
|
|
134
|
+
className="mt-4"
|
|
135
|
+
>
|
|
185
136
|
<Tabs.List aria-label="Job filters">
|
|
186
137
|
<Tabs.Trigger value="current">Current ({currentCount})</Tabs.Trigger>
|
|
187
138
|
<Tabs.Trigger value="errors">Errors ({errorCount})</Tabs.Trigger>
|
|
@@ -189,33 +140,14 @@ export default function PromptPipelineDashboard({ isConnected }) {
|
|
|
189
140
|
Completed ({completedCount})
|
|
190
141
|
</Tabs.Trigger>
|
|
191
142
|
</Tabs.List>
|
|
192
|
-
|
|
193
143
|
<Tabs.Content value="current">
|
|
194
|
-
<JobTable
|
|
195
|
-
jobs={filteredJobs}
|
|
196
|
-
pipeline={null}
|
|
197
|
-
onOpenJob={openJob}
|
|
198
|
-
overallElapsed={overallElapsed}
|
|
199
|
-
now={now}
|
|
200
|
-
/>
|
|
144
|
+
<JobTable jobs={filteredJobs} pipeline={null} onOpenJob={openJob} />
|
|
201
145
|
</Tabs.Content>
|
|
202
146
|
<Tabs.Content value="errors">
|
|
203
|
-
<JobTable
|
|
204
|
-
jobs={filteredJobs}
|
|
205
|
-
pipeline={null}
|
|
206
|
-
onOpenJob={openJob}
|
|
207
|
-
overallElapsed={overallElapsed}
|
|
208
|
-
now={now}
|
|
209
|
-
/>
|
|
147
|
+
<JobTable jobs={filteredJobs} pipeline={null} onOpenJob={openJob} />
|
|
210
148
|
</Tabs.Content>
|
|
211
149
|
<Tabs.Content value="complete">
|
|
212
|
-
<JobTable
|
|
213
|
-
jobs={filteredJobs}
|
|
214
|
-
pipeline={null}
|
|
215
|
-
onOpenJob={openJob}
|
|
216
|
-
overallElapsed={overallElapsed}
|
|
217
|
-
now={now}
|
|
218
|
-
/>
|
|
150
|
+
<JobTable jobs={filteredJobs} pipeline={null} onOpenJob={openJob} />
|
|
219
151
|
</Tabs.Content>
|
|
220
152
|
</Tabs.Root>
|
|
221
153
|
</Layout>
|
|
@@ -1,54 +1,38 @@
|
|
|
1
|
-
import Anthropic from "@anthropic-ai/sdk";
|
|
2
1
|
import {
|
|
3
2
|
extractMessages,
|
|
4
3
|
isRetryableError,
|
|
5
4
|
sleep,
|
|
6
5
|
tryParseJSON,
|
|
6
|
+
ensureJsonResponseFormat,
|
|
7
|
+
ProviderJsonParseError,
|
|
7
8
|
} from "./base.js";
|
|
8
9
|
|
|
9
|
-
let client = null;
|
|
10
|
-
|
|
11
|
-
function getClient() {
|
|
12
|
-
if (!client && process.env.ANTHROPIC_API_KEY) {
|
|
13
|
-
client = new Anthropic({
|
|
14
|
-
apiKey: process.env.ANTHROPIC_API_KEY,
|
|
15
|
-
baseURL: process.env.ANTHROPIC_BASE_URL,
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
return client;
|
|
19
|
-
}
|
|
20
|
-
|
|
21
10
|
export async function anthropicChat({
|
|
22
11
|
messages,
|
|
23
|
-
model = "claude-3-
|
|
12
|
+
model = "claude-3-sonnet",
|
|
24
13
|
temperature = 0.7,
|
|
25
|
-
maxTokens =
|
|
26
|
-
responseFormat,
|
|
14
|
+
maxTokens = 8192,
|
|
15
|
+
responseFormat = "json",
|
|
27
16
|
topP,
|
|
28
|
-
|
|
29
|
-
stopSequences,
|
|
17
|
+
stop,
|
|
30
18
|
maxRetries = 3,
|
|
31
19
|
}) {
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
const
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
});
|
|
46
|
-
}
|
|
47
|
-
}
|
|
20
|
+
console.log("\n[Anthropic] Starting anthropicChat call");
|
|
21
|
+
console.log("[Anthropic] Model:", model);
|
|
22
|
+
console.log("[Anthropic] Response format:", responseFormat);
|
|
23
|
+
|
|
24
|
+
// Enforce JSON mode - reject calls without proper JSON responseFormat
|
|
25
|
+
ensureJsonResponseFormat(responseFormat, "Anthropic");
|
|
26
|
+
|
|
27
|
+
const { systemMsg, userMsg } = extractMessages(messages);
|
|
28
|
+
console.log("[Anthropic] System message length:", systemMsg.length);
|
|
29
|
+
console.log("[Anthropic] User message length:", userMsg.length);
|
|
30
|
+
|
|
31
|
+
// Build system guard for JSON enforcement
|
|
32
|
+
let system = systemMsg;
|
|
48
33
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
anthropicMessages.unshift({ role: "user", content: "Hello" });
|
|
34
|
+
if (responseFormat === "json" || responseFormat?.type === "json_object") {
|
|
35
|
+
system = `${systemMsg}\n\nYou must output strict JSON only with no extra text.`;
|
|
52
36
|
}
|
|
53
37
|
|
|
54
38
|
let lastError;
|
|
@@ -58,54 +42,84 @@ export async function anthropicChat({
|
|
|
58
42
|
}
|
|
59
43
|
|
|
60
44
|
try {
|
|
61
|
-
|
|
45
|
+
console.log(`[Anthropic] Attempt ${attempt + 1}/${maxRetries + 1}`);
|
|
46
|
+
|
|
47
|
+
const requestBody = {
|
|
62
48
|
model,
|
|
63
|
-
|
|
64
|
-
|
|
49
|
+
system,
|
|
50
|
+
messages: [{ role: "user", content: userMsg }],
|
|
65
51
|
temperature,
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
stop_sequences:
|
|
52
|
+
max_tokens: maxTokens,
|
|
53
|
+
...(topP !== undefined ? { top_p: topP } : {}),
|
|
54
|
+
...(stop !== undefined ? { stop_sequences: stop } : {}),
|
|
69
55
|
};
|
|
70
56
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
57
|
+
console.log("[Anthropic] Calling Anthropic API...");
|
|
58
|
+
const response = await fetch("https://api.anthropic.com/v1/messages", {
|
|
59
|
+
method: "POST",
|
|
60
|
+
headers: {
|
|
61
|
+
"Content-Type": "application/json",
|
|
62
|
+
"x-api-key": process.env.ANTHROPIC_API_KEY,
|
|
63
|
+
"anthropic-version": "2023-06-01",
|
|
64
|
+
},
|
|
65
|
+
body: JSON.stringify(requestBody),
|
|
66
|
+
});
|
|
77
67
|
|
|
78
|
-
|
|
79
|
-
|
|
68
|
+
if (!response.ok) {
|
|
69
|
+
const error = await response
|
|
70
|
+
.json()
|
|
71
|
+
.catch(() => ({ error: response.statusText }));
|
|
72
|
+
throw { status: response.status, ...error };
|
|
73
|
+
}
|
|
80
74
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
75
|
+
const data = await response.json();
|
|
76
|
+
console.log("[Anthropic] Response received from Anthropic API");
|
|
77
|
+
|
|
78
|
+
// Extract text from response.content blocks
|
|
79
|
+
const blocks = Array.isArray(data?.content) ? data.content : [];
|
|
80
|
+
const text = blocks
|
|
81
|
+
.filter((b) => b?.type === "text" && typeof b.text === "string")
|
|
82
|
+
.map((b) => b.text)
|
|
83
|
+
.join("");
|
|
84
|
+
console.log("[Anthropic] Response text length:", text.length);
|
|
85
|
+
|
|
86
|
+
// Parse JSON - this is required for all calls
|
|
87
|
+
const parsed = tryParseJSON(text);
|
|
88
|
+
if (!parsed) {
|
|
89
|
+
throw new ProviderJsonParseError(
|
|
90
|
+
"Anthropic",
|
|
91
|
+
model,
|
|
92
|
+
text.substring(0, 200),
|
|
93
|
+
"Failed to parse JSON response from Anthropic API"
|
|
94
|
+
);
|
|
89
95
|
}
|
|
90
96
|
|
|
97
|
+
// Normalize usage (if provided)
|
|
98
|
+
const prompt_tokens = data?.usage?.input_tokens;
|
|
99
|
+
const completion_tokens = data?.usage?.output_tokens;
|
|
100
|
+
const total_tokens = (prompt_tokens ?? 0) + (completion_tokens ?? 0);
|
|
101
|
+
const usage =
|
|
102
|
+
prompt_tokens != null && completion_tokens != null
|
|
103
|
+
? { prompt_tokens, completion_tokens, total_tokens }
|
|
104
|
+
: undefined;
|
|
105
|
+
|
|
106
|
+
console.log("[Anthropic] Returning response from Anthropic API");
|
|
91
107
|
return {
|
|
92
|
-
content: parsed
|
|
93
|
-
text
|
|
94
|
-
usage: {
|
|
95
|
-
|
|
96
|
-
completion_tokens: result.usage.output_tokens,
|
|
97
|
-
total_tokens: result.usage.input_tokens + result.usage.output_tokens,
|
|
98
|
-
cache_read_input_tokens: result.usage.cache_creation_input_tokens,
|
|
99
|
-
cache_write_input_tokens: result.usage.cache_write_input_tokens,
|
|
100
|
-
},
|
|
101
|
-
raw: result,
|
|
108
|
+
content: parsed,
|
|
109
|
+
text,
|
|
110
|
+
...(usage ? { usage } : {}),
|
|
111
|
+
raw: data,
|
|
102
112
|
};
|
|
103
113
|
} catch (error) {
|
|
104
114
|
lastError = error;
|
|
115
|
+
const msg = error?.error?.message || error?.message || "";
|
|
116
|
+
console.error("[Anthropic] Error occurred:", msg);
|
|
117
|
+
console.error("[Anthropic] Error status:", error?.status);
|
|
105
118
|
|
|
106
119
|
if (error.status === 401) throw error;
|
|
107
120
|
|
|
108
121
|
if (isRetryableError(error) && attempt < maxRetries) {
|
|
122
|
+
console.log("[Anthropic] Retrying due to retryable error");
|
|
109
123
|
continue;
|
|
110
124
|
}
|
|
111
125
|
|
package/src/providers/base.js
CHANGED
|
@@ -69,3 +69,55 @@ export function tryParseJSON(text) {
|
|
|
69
69
|
}
|
|
70
70
|
}
|
|
71
71
|
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Error thrown when JSON response format is required but not provided
|
|
75
|
+
*/
|
|
76
|
+
export class ProviderJsonModeError extends Error {
|
|
77
|
+
constructor(providerName, message) {
|
|
78
|
+
super(message);
|
|
79
|
+
this.name = "ProviderJsonModeError";
|
|
80
|
+
this.provider = providerName;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Error thrown when JSON parsing fails and should not be retried
|
|
86
|
+
*/
|
|
87
|
+
export class ProviderJsonParseError extends Error {
|
|
88
|
+
constructor(provider, model, sample, message = "Failed to parse JSON response") {
|
|
89
|
+
super(message);
|
|
90
|
+
this.name = "ProviderJsonParseError";
|
|
91
|
+
this.provider = provider;
|
|
92
|
+
this.model = model;
|
|
93
|
+
this.sample = sample;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Ensures that responseFormat is configured for JSON output
|
|
99
|
+
* @param {*} responseFormat - The response format object or string
|
|
100
|
+
* @param {string} providerName - Name of the provider for error reporting
|
|
101
|
+
* @throws {ProviderJsonModeError} When JSON format is not properly configured
|
|
102
|
+
*/
|
|
103
|
+
export function ensureJsonResponseFormat(responseFormat, providerName) {
|
|
104
|
+
if (!responseFormat) {
|
|
105
|
+
throw new ProviderJsonModeError(
|
|
106
|
+
providerName,
|
|
107
|
+
`${providerName} requires responseFormat to be set for JSON mode`
|
|
108
|
+
);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Check for valid JSON format types
|
|
112
|
+
const isValidJsonFormat =
|
|
113
|
+
responseFormat === "json" ||
|
|
114
|
+
responseFormat?.type === "json_object" ||
|
|
115
|
+
responseFormat?.type === "json_schema";
|
|
116
|
+
|
|
117
|
+
if (!isValidJsonFormat) {
|
|
118
|
+
throw new ProviderJsonModeError(
|
|
119
|
+
providerName,
|
|
120
|
+
`${providerName} only supports JSON response format. Got: ${JSON.stringify(responseFormat)}`
|
|
121
|
+
);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
@@ -3,6 +3,8 @@ import {
|
|
|
3
3
|
isRetryableError,
|
|
4
4
|
sleep,
|
|
5
5
|
tryParseJSON,
|
|
6
|
+
ensureJsonResponseFormat,
|
|
7
|
+
ProviderJsonParseError,
|
|
6
8
|
} from "./base.js";
|
|
7
9
|
|
|
8
10
|
export async function deepseekChat({
|
|
@@ -10,13 +12,16 @@ export async function deepseekChat({
|
|
|
10
12
|
model = "deepseek-chat",
|
|
11
13
|
temperature = 0.7,
|
|
12
14
|
maxTokens,
|
|
13
|
-
responseFormat
|
|
15
|
+
responseFormat,
|
|
14
16
|
topP,
|
|
15
17
|
frequencyPenalty,
|
|
16
18
|
presencePenalty,
|
|
17
19
|
stop,
|
|
18
20
|
maxRetries = 3,
|
|
19
21
|
}) {
|
|
22
|
+
// Enforce JSON mode - reject calls without proper JSON responseFormat
|
|
23
|
+
ensureJsonResponseFormat(responseFormat, "DeepSeek");
|
|
24
|
+
|
|
20
25
|
if (!process.env.DEEPSEEK_API_KEY) {
|
|
21
26
|
throw new Error("DeepSeek API key not configured");
|
|
22
27
|
}
|
|
@@ -44,7 +49,7 @@ export async function deepseekChat({
|
|
|
44
49
|
stop,
|
|
45
50
|
};
|
|
46
51
|
|
|
47
|
-
// Add response format
|
|
52
|
+
// Add response format - this is now required for all calls
|
|
48
53
|
if (responseFormat?.type === "json_object" || responseFormat === "json") {
|
|
49
54
|
requestBody.response_format = { type: "json_object" };
|
|
50
55
|
}
|
|
@@ -71,9 +76,21 @@ export async function deepseekChat({
|
|
|
71
76
|
const data = await response.json();
|
|
72
77
|
const content = data.choices[0].message.content;
|
|
73
78
|
|
|
79
|
+
// Parse JSON - this is now required for all calls
|
|
80
|
+
const parsed = tryParseJSON(content);
|
|
81
|
+
if (!parsed) {
|
|
82
|
+
throw new ProviderJsonParseError(
|
|
83
|
+
"DeepSeek",
|
|
84
|
+
model,
|
|
85
|
+
content.substring(0, 200),
|
|
86
|
+
"Failed to parse JSON response from DeepSeek API"
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
|
|
74
90
|
return {
|
|
75
|
-
content:
|
|
91
|
+
content: parsed,
|
|
76
92
|
usage: data.usage,
|
|
93
|
+
raw: data,
|
|
77
94
|
};
|
|
78
95
|
} catch (error) {
|
|
79
96
|
lastError = error;
|
|
@@ -90,21 +107,3 @@ export async function deepseekChat({
|
|
|
90
107
|
|
|
91
108
|
throw lastError || new Error(`Failed after ${maxRetries + 1} attempts`);
|
|
92
109
|
}
|
|
93
|
-
|
|
94
|
-
// Keep backward compatibility
|
|
95
|
-
export async function queryDeepSeek(
|
|
96
|
-
system,
|
|
97
|
-
prompt,
|
|
98
|
-
model = "deepseek-reasoner"
|
|
99
|
-
) {
|
|
100
|
-
const response = await deepseekChat({
|
|
101
|
-
messages: [
|
|
102
|
-
{ role: "system", content: system },
|
|
103
|
-
{ role: "user", content: prompt },
|
|
104
|
-
],
|
|
105
|
-
model,
|
|
106
|
-
responseFormat: "json",
|
|
107
|
-
});
|
|
108
|
-
|
|
109
|
-
return response.content;
|
|
110
|
-
}
|