ydc-mcp-server 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +140 -0
- package/README_JA.md +138 -0
- package/README_ZH_CN.md +138 -0
- package/README_ZH_TW.md +138 -0
- package/index.js +604 -0
- package/lib/advanced-versions.js +113 -0
- package/lib/api-client.js +134 -0
- package/lib/auth-middleware.js +44 -0
- package/lib/conversation-store.js +271 -0
- package/lib/openai-mapper.js +215 -0
- package/lib/routes/chat.js +199 -0
- package/lib/routes/conversations.js +94 -0
- package/lib/routes/health.js +31 -0
- package/lib/routes/models.js +111 -0
- package/openai-server.js +93 -0
- package/package.json +62 -0
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Parameter Mapper Module
|
|
3
|
+
* Maps OpenAI API parameters to You.com API parameters
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import {
|
|
7
|
+
getAdvancedVersion,
|
|
8
|
+
isAdvancedVersion,
|
|
9
|
+
getDefaultAdvancedVersion,
|
|
10
|
+
adjustWorkflowSteps
|
|
11
|
+
} from './advanced-versions.js';
|
|
12
|
+
|
|
13
|
+
// Parse custom agents from env
|
|
14
|
+
function getCustomAgents() {
|
|
15
|
+
const raw = process.env.YDC_CUSTOM_AGENTS || '';
|
|
16
|
+
if (!raw) return new Map();
|
|
17
|
+
|
|
18
|
+
const map = new Map();
|
|
19
|
+
raw.split(',').forEach(entry => {
|
|
20
|
+
const trimmed = entry.trim();
|
|
21
|
+
if (!trimmed) return;
|
|
22
|
+
|
|
23
|
+
const colonIndex = trimmed.indexOf(':');
|
|
24
|
+
if (colonIndex > 0) {
|
|
25
|
+
map.set(trimmed.substring(0, colonIndex), trimmed.substring(colonIndex + 1));
|
|
26
|
+
} else {
|
|
27
|
+
map.set(trimmed, trimmed);
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
return map;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Map OpenAI request parameters to You.com parameters
|
|
35
|
+
*/
|
|
36
|
+
export function mapOpenAIToYouParams(openaiRequest) {
|
|
37
|
+
const {
|
|
38
|
+
model = 'advanced-3.0-high',
|
|
39
|
+
messages,
|
|
40
|
+
temperature = 0.7,
|
|
41
|
+
max_tokens = 1000,
|
|
42
|
+
stream = false,
|
|
43
|
+
tools = []
|
|
44
|
+
} = openaiRequest;
|
|
45
|
+
|
|
46
|
+
let input = '';
|
|
47
|
+
let systemPrompt = '';
|
|
48
|
+
const conversationHistory = [];
|
|
49
|
+
|
|
50
|
+
messages.forEach(msg => {
|
|
51
|
+
if (msg.role === 'system') {
|
|
52
|
+
systemPrompt = msg.content;
|
|
53
|
+
} else if (msg.role === 'user') {
|
|
54
|
+
conversationHistory.push(`User: ${msg.content}`);
|
|
55
|
+
} else if (msg.role === 'assistant') {
|
|
56
|
+
conversationHistory.push(`Assistant: ${msg.content}`);
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
if (systemPrompt) {
|
|
61
|
+
input = `[System Instructions]\n${systemPrompt}\n\n`;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (conversationHistory.length > 1) {
|
|
65
|
+
input += `[Conversation History]\n${conversationHistory.slice(0, -1).join('\n\n')}\n\n`;
|
|
66
|
+
input += `[Current Message]\n${conversationHistory[conversationHistory.length - 1].replace(/^User: /, '')}`;
|
|
67
|
+
} else if (conversationHistory.length === 1) {
|
|
68
|
+
input += conversationHistory[0].replace(/^User: /, '');
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Check if it's an advanced version model
|
|
72
|
+
if (isAdvancedVersion(model)) {
|
|
73
|
+
const versionConfig = getAdvancedVersion(model);
|
|
74
|
+
if (versionConfig) {
|
|
75
|
+
const adjustedSteps = adjustWorkflowSteps(versionConfig.max_workflow_steps, temperature);
|
|
76
|
+
|
|
77
|
+
return {
|
|
78
|
+
agent: 'advanced',
|
|
79
|
+
input,
|
|
80
|
+
stream,
|
|
81
|
+
verbosity: versionConfig.verbosity,
|
|
82
|
+
tools: tools.length > 0 ? tools : versionConfig.tools,
|
|
83
|
+
workflow_config: {
|
|
84
|
+
max_workflow_steps: adjustedSteps
|
|
85
|
+
},
|
|
86
|
+
timeout: versionConfig.timeout
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Handle legacy models (express, research, advanced)
|
|
92
|
+
let agent = model;
|
|
93
|
+
let verbosity = 'medium';
|
|
94
|
+
let defaultTools = tools;
|
|
95
|
+
let timeout = 300000;
|
|
96
|
+
|
|
97
|
+
if (model === 'advanced') {
|
|
98
|
+
const defaultVersion = getDefaultAdvancedVersion(temperature);
|
|
99
|
+
const versionConfig = getAdvancedVersion(defaultVersion);
|
|
100
|
+
if (versionConfig) {
|
|
101
|
+
const adjustedSteps = adjustWorkflowSteps(versionConfig.max_workflow_steps, temperature);
|
|
102
|
+
return {
|
|
103
|
+
agent: 'advanced',
|
|
104
|
+
input,
|
|
105
|
+
stream,
|
|
106
|
+
verbosity: versionConfig.verbosity,
|
|
107
|
+
tools: tools.length > 0 ? tools : versionConfig.tools,
|
|
108
|
+
workflow_config: {
|
|
109
|
+
max_workflow_steps: adjustedSteps
|
|
110
|
+
},
|
|
111
|
+
timeout: versionConfig.timeout
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
if (temperature <= 0.3) verbosity = 'medium';
|
|
117
|
+
else if (temperature >= 0.8) verbosity = 'high';
|
|
118
|
+
|
|
119
|
+
const max_workflow_steps = Math.min(Math.max(Math.floor(max_tokens / 100), 1), 20);
|
|
120
|
+
|
|
121
|
+
// Check if model is a known legacy type
|
|
122
|
+
const knownAgents = ['express', 'research', 'advanced'];
|
|
123
|
+
|
|
124
|
+
// Check custom agents mapping
|
|
125
|
+
const customAgents = getCustomAgents();
|
|
126
|
+
if (customAgents.has(model)) {
|
|
127
|
+
return {
|
|
128
|
+
agent: customAgents.get(model),
|
|
129
|
+
input,
|
|
130
|
+
stream,
|
|
131
|
+
timeout: 300000
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (!knownAgents.includes(agent)) {
|
|
136
|
+
// Treat unknown model as custom agent ID
|
|
137
|
+
return {
|
|
138
|
+
agent: model,
|
|
139
|
+
input,
|
|
140
|
+
stream,
|
|
141
|
+
timeout: 300000
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (agent === 'advanced') {
|
|
146
|
+
timeout = 3000000;
|
|
147
|
+
if (tools.length === 0) {
|
|
148
|
+
defaultTools = [
|
|
149
|
+
{ type: 'research', search_effort: 'auto', report_verbosity: 'medium' },
|
|
150
|
+
{ type: 'compute' }
|
|
151
|
+
];
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
return {
|
|
156
|
+
agent,
|
|
157
|
+
input,
|
|
158
|
+
stream,
|
|
159
|
+
verbosity,
|
|
160
|
+
tools: defaultTools,
|
|
161
|
+
workflow_config: {
|
|
162
|
+
max_workflow_steps
|
|
163
|
+
},
|
|
164
|
+
timeout
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Convert You.com response to OpenAI format
|
|
170
|
+
*/
|
|
171
|
+
export function convertToOpenAIResponse(youResponse, model) {
|
|
172
|
+
const content = youResponse.output && Array.isArray(youResponse.output)
|
|
173
|
+
? youResponse.output
|
|
174
|
+
.filter(item => item.type === 'message.answer')
|
|
175
|
+
.map(item => item.text)
|
|
176
|
+
.join('\n\n')
|
|
177
|
+
: 'No response content';
|
|
178
|
+
|
|
179
|
+
return {
|
|
180
|
+
id: `chatcmpl-${Date.now()}`,
|
|
181
|
+
object: 'chat.completion',
|
|
182
|
+
created: Math.floor(Date.now() / 1000),
|
|
183
|
+
model: `you-${model}`,
|
|
184
|
+
choices: [{
|
|
185
|
+
index: 0,
|
|
186
|
+
message: {
|
|
187
|
+
role: 'assistant',
|
|
188
|
+
content: content
|
|
189
|
+
},
|
|
190
|
+
finish_reason: 'stop'
|
|
191
|
+
}],
|
|
192
|
+
usage: {
|
|
193
|
+
prompt_tokens: Math.floor(Math.random() * 100) + 50,
|
|
194
|
+
completion_tokens: Math.floor(content.length / 4),
|
|
195
|
+
total_tokens: Math.floor(Math.random() * 100) + 50 + Math.floor(content.length / 4)
|
|
196
|
+
}
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Create streaming chunk in OpenAI format
|
|
202
|
+
*/
|
|
203
|
+
export function createStreamChunk(model, content, finishReason = null) {
|
|
204
|
+
return {
|
|
205
|
+
id: `chatcmpl-${Date.now()}`,
|
|
206
|
+
object: 'chat.completion.chunk',
|
|
207
|
+
created: Math.floor(Date.now() / 1000),
|
|
208
|
+
model: `you-${model}`,
|
|
209
|
+
choices: [{
|
|
210
|
+
index: 0,
|
|
211
|
+
delta: content ? { content } : {},
|
|
212
|
+
finish_reason: finishReason
|
|
213
|
+
}]
|
|
214
|
+
};
|
|
215
|
+
}
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Chat Completions Route
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { Router } from 'express';
|
|
6
|
+
import { authenticate } from '../auth-middleware.js';
|
|
7
|
+
import { mapOpenAIToYouParams, convertToOpenAIResponse, createStreamChunk } from '../openai-mapper.js';
|
|
8
|
+
import { callYouApi, extractText } from '../api-client.js';
|
|
9
|
+
import {
|
|
10
|
+
getConversation,
|
|
11
|
+
createConversation,
|
|
12
|
+
addMessageToConversation,
|
|
13
|
+
generateConversationId
|
|
14
|
+
} from '../conversation-store.js';
|
|
15
|
+
|
|
16
|
+
const router = Router();
|
|
17
|
+
const API_KEY = process.env.YDC_API_KEY;
|
|
18
|
+
|
|
19
|
+
router.post('/v1/chat/completions', authenticate, async (req, res) => {
|
|
20
|
+
try {
|
|
21
|
+
if (!API_KEY) {
|
|
22
|
+
return res.status(500).json({
|
|
23
|
+
error: {
|
|
24
|
+
message: 'YDC_API_KEY not configured on server',
|
|
25
|
+
type: 'server_error',
|
|
26
|
+
code: 'missing_api_key'
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const { conversation_id, messages } = req.body;
|
|
32
|
+
let conversationId = conversation_id;
|
|
33
|
+
let fullMessages = messages || [];
|
|
34
|
+
|
|
35
|
+
if (conversationId) {
|
|
36
|
+
const existingConv = getConversation(conversationId);
|
|
37
|
+
if (existingConv && existingConv.messages.length > 0) {
|
|
38
|
+
const storedMessages = existingConv.messages.map(m => ({ role: m.role, content: m.content }));
|
|
39
|
+
const newUserMessages = fullMessages.filter(m => m.role === 'user');
|
|
40
|
+
const systemMsg = fullMessages.find(m => m.role === 'system') || storedMessages.find(m => m.role === 'system');
|
|
41
|
+
|
|
42
|
+
fullMessages = systemMsg ? [systemMsg] : [];
|
|
43
|
+
fullMessages.push(...storedMessages.filter(m => m.role !== 'system'));
|
|
44
|
+
|
|
45
|
+
const lastNewUserMsg = newUserMessages[newUserMessages.length - 1];
|
|
46
|
+
if (lastNewUserMsg) {
|
|
47
|
+
const alreadyExists = fullMessages.some(m => m.role === 'user' && m.content === lastNewUserMsg.content);
|
|
48
|
+
if (!alreadyExists) {
|
|
49
|
+
fullMessages.push(lastNewUserMsg);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
} else {
|
|
54
|
+
conversationId = generateConversationId();
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const lastUserMsg = fullMessages.filter(m => m.role === 'user').pop();
|
|
58
|
+
if (lastUserMsg) {
|
|
59
|
+
addMessageToConversation(conversationId, 'user', lastUserMsg.content);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const systemMsg = fullMessages.find(m => m.role === 'system');
|
|
63
|
+
if (systemMsg) {
|
|
64
|
+
const conv = getConversation(conversationId);
|
|
65
|
+
if (conv && !conv.messages.some(m => m.role === 'system')) {
|
|
66
|
+
conv.messages.unshift({ role: 'system', content: systemMsg.content, timestamp: Date.now() });
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const youParams = mapOpenAIToYouParams({ ...req.body, messages: fullMessages });
|
|
71
|
+
|
|
72
|
+
console.log('📤 Sending request to You.com:', JSON.stringify({ ...youParams, input: youParams.input.substring(0, 200) + '...' }, null, 2));
|
|
73
|
+
console.log(`💬 Conversation ID: ${conversationId}, Messages: ${fullMessages.length}`);
|
|
74
|
+
|
|
75
|
+
const timeoutMs = youParams.timeout || (youParams.agent === 'advanced' ? 3000000 : 300000);
|
|
76
|
+
|
|
77
|
+
if (req.body.stream) {
|
|
78
|
+
const response = await callYouApi(API_KEY, { ...youParams, stream: true }, { timeout: timeoutMs });
|
|
79
|
+
await handleStreamingResponse(req, res, response, youParams, conversationId);
|
|
80
|
+
} else {
|
|
81
|
+
const response = await callYouApi(API_KEY, youParams, { timeout: timeoutMs });
|
|
82
|
+
await handleNonStreamingResponse(req, res, response, conversationId);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
} catch (error) {
|
|
86
|
+
console.error('❌ Server error:', error);
|
|
87
|
+
|
|
88
|
+
if (error.name === 'AbortError') {
|
|
89
|
+
return res.status(408).json({
|
|
90
|
+
error: {
|
|
91
|
+
message: 'Request timeout - Advanced agent responses may require extended processing time',
|
|
92
|
+
type: 'timeout_error',
|
|
93
|
+
code: 'request_timeout'
|
|
94
|
+
}
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
res.status(500).json({
|
|
99
|
+
error: {
|
|
100
|
+
message: error.message,
|
|
101
|
+
type: 'server_error',
|
|
102
|
+
code: 'internal_error'
|
|
103
|
+
}
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
async function handleStreamingResponse(req, res, response, youParams, conversationId) {
|
|
109
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
110
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
111
|
+
res.setHeader('Connection', 'keep-alive');
|
|
112
|
+
res.setHeader('Access-Control-Allow-Origin', '*');
|
|
113
|
+
|
|
114
|
+
if (!response.body) {
|
|
115
|
+
res.write(`data: {"error": "No response body"}\n\n`);
|
|
116
|
+
res.end();
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const reader = response.body.getReader();
|
|
121
|
+
const decoder = new TextDecoder();
|
|
122
|
+
let buffer = '';
|
|
123
|
+
const model = req.body.model || 'advanced';
|
|
124
|
+
const STREAM_TIMEOUT = youParams.timeout || (youParams.agent === 'advanced' ? 3000000 : 300000);
|
|
125
|
+
|
|
126
|
+
try {
|
|
127
|
+
let streamTimeout = setTimeout(() => {
|
|
128
|
+
const chunk = createStreamChunk(model, `\n\n[Response timeout]`, 'length');
|
|
129
|
+
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
|
130
|
+
res.write('data: [DONE]\n\n');
|
|
131
|
+
res.end();
|
|
132
|
+
}, STREAM_TIMEOUT);
|
|
133
|
+
|
|
134
|
+
while (true) {
|
|
135
|
+
const { done, value } = await reader.read();
|
|
136
|
+
if (done) {
|
|
137
|
+
clearTimeout(streamTimeout);
|
|
138
|
+
break;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
buffer += decoder.decode(value, { stream: true });
|
|
142
|
+
const lines = buffer.split('\n');
|
|
143
|
+
buffer = lines.pop() || '';
|
|
144
|
+
|
|
145
|
+
for (const line of lines) {
|
|
146
|
+
if (line.startsWith('data: ')) {
|
|
147
|
+
try {
|
|
148
|
+
const data = JSON.parse(line.slice(6));
|
|
149
|
+
|
|
150
|
+
if (data.type === 'response.output_text.delta' &&
|
|
151
|
+
data.response?.type === 'message.answer' &&
|
|
152
|
+
data.response?.delta) {
|
|
153
|
+
|
|
154
|
+
clearTimeout(streamTimeout);
|
|
155
|
+
streamTimeout = setTimeout(() => {
|
|
156
|
+
const chunk = createStreamChunk(model, `\n\n[Response timeout]`, 'length');
|
|
157
|
+
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
|
158
|
+
res.write('data: [DONE]\n\n');
|
|
159
|
+
res.end();
|
|
160
|
+
}, STREAM_TIMEOUT);
|
|
161
|
+
|
|
162
|
+
const chunk = createStreamChunk(model, data.response.delta);
|
|
163
|
+
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
|
164
|
+
}
|
|
165
|
+
} catch (e) {
|
|
166
|
+
console.error('❌ Error parsing streaming data:', e);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
clearTimeout(streamTimeout);
|
|
173
|
+
} catch (streamError) {
|
|
174
|
+
console.error('❌ Streaming error:', streamError);
|
|
175
|
+
res.write(`data: {"error": "Streaming error: ${streamError.message}"}\n\n`);
|
|
176
|
+
} finally {
|
|
177
|
+
const finalChunk = createStreamChunk(model, null, 'stop');
|
|
178
|
+
res.write(`data: ${JSON.stringify(finalChunk)}\n\n`);
|
|
179
|
+
res.write('data: [DONE]\n\n');
|
|
180
|
+
res.end();
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
async function handleNonStreamingResponse(req, res, response, conversationId) {
|
|
185
|
+
const data = await response.json();
|
|
186
|
+
console.log('📥 You.com response:', JSON.stringify(data, null, 2));
|
|
187
|
+
|
|
188
|
+
const openaiResponse = convertToOpenAIResponse(data, req.body.model || 'advanced');
|
|
189
|
+
|
|
190
|
+
const assistantContent = openaiResponse.choices?.[0]?.message?.content;
|
|
191
|
+
if (assistantContent && conversationId) {
|
|
192
|
+
addMessageToConversation(conversationId, 'assistant', assistantContent);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
openaiResponse.conversation_id = conversationId;
|
|
196
|
+
res.json(openaiResponse);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
export default router;
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Conversations Route
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { Router } from 'express';
|
|
6
|
+
import { authenticate } from '../auth-middleware.js';
|
|
7
|
+
import {
|
|
8
|
+
getConversation,
|
|
9
|
+
createConversation,
|
|
10
|
+
addMessageToConversation,
|
|
11
|
+
listAllConversations,
|
|
12
|
+
deleteConversation,
|
|
13
|
+
clearAllConversations
|
|
14
|
+
} from '../conversation-store.js';
|
|
15
|
+
|
|
16
|
+
const router = Router();
|
|
17
|
+
|
|
18
|
+
// List all conversations
|
|
19
|
+
router.get('/v1/conversations', authenticate, (req, res) => {
|
|
20
|
+
const conversations = listAllConversations();
|
|
21
|
+
|
|
22
|
+
res.json({
|
|
23
|
+
object: 'list',
|
|
24
|
+
data: conversations.sort((a, b) => new Date(b.updated_at) - new Date(a.updated_at)),
|
|
25
|
+
total: conversations.length
|
|
26
|
+
});
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
// Get single conversation
|
|
30
|
+
router.get('/v1/conversations/:id', authenticate, (req, res) => {
|
|
31
|
+
const conv = getConversation(req.params.id);
|
|
32
|
+
if (!conv) {
|
|
33
|
+
return res.status(404).json({
|
|
34
|
+
error: {
|
|
35
|
+
message: 'Conversation not found',
|
|
36
|
+
type: 'not_found_error',
|
|
37
|
+
code: 'conversation_not_found'
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
res.json({
|
|
43
|
+
id: conv.id,
|
|
44
|
+
messages: conv.messages.map(m => ({
|
|
45
|
+
role: m.role,
|
|
46
|
+
content: m.content,
|
|
47
|
+
timestamp: new Date(m.timestamp).toISOString()
|
|
48
|
+
})),
|
|
49
|
+
created_at: new Date(conv.createdAt).toISOString(),
|
|
50
|
+
updated_at: new Date(conv.updatedAt).toISOString(),
|
|
51
|
+
metadata: conv.metadata
|
|
52
|
+
});
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
// Create new conversation
|
|
56
|
+
router.post('/v1/conversations', authenticate, (req, res) => {
|
|
57
|
+
const { metadata = {}, system_message } = req.body;
|
|
58
|
+
const conv = createConversation(null, metadata);
|
|
59
|
+
|
|
60
|
+
if (system_message) {
|
|
61
|
+
addMessageToConversation(conv.id, 'system', system_message);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
res.status(201).json({
|
|
65
|
+
id: conv.id,
|
|
66
|
+
created_at: new Date(conv.createdAt).toISOString(),
|
|
67
|
+
metadata: conv.metadata
|
|
68
|
+
});
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
// Delete conversation
|
|
72
|
+
router.delete('/v1/conversations/:id', authenticate, (req, res) => {
|
|
73
|
+
const conv = getConversation(req.params.id);
|
|
74
|
+
if (!conv) {
|
|
75
|
+
return res.status(404).json({
|
|
76
|
+
error: {
|
|
77
|
+
message: 'Conversation not found',
|
|
78
|
+
type: 'not_found_error',
|
|
79
|
+
code: 'conversation_not_found'
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
deleteConversation(req.params.id);
|
|
85
|
+
res.json({ deleted: true, id: req.params.id });
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
// Clear all conversations
|
|
89
|
+
router.delete('/v1/conversations', authenticate, (req, res) => {
|
|
90
|
+
const count = clearAllConversations();
|
|
91
|
+
res.json({ deleted: true, count });
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
export default router;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Health Check Route
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { Router } from 'express';
|
|
6
|
+
import { getConversationCount, storeConfig } from '../conversation-store.js';
|
|
7
|
+
import { authConfig } from '../auth-middleware.js';
|
|
8
|
+
|
|
9
|
+
const router = Router();
|
|
10
|
+
const API_KEY = process.env.YDC_API_KEY;
|
|
11
|
+
|
|
12
|
+
router.get('/health', (req, res) => {
|
|
13
|
+
res.json({
|
|
14
|
+
status: 'healthy',
|
|
15
|
+
timestamp: new Date().toISOString(),
|
|
16
|
+
ydc_api_key_configured: !!API_KEY,
|
|
17
|
+
auth: {
|
|
18
|
+
token_auth_enabled: authConfig.REQUIRE_TOKEN_AUTH,
|
|
19
|
+
allowed_tokens_count: authConfig.ACCESS_TOKENS_COUNT
|
|
20
|
+
},
|
|
21
|
+
conversations: {
|
|
22
|
+
store_type: storeConfig.STORE_TYPE,
|
|
23
|
+
db_path: storeConfig.STORE_TYPE === 'sqlite' && storeConfig.isDbConnected() ? storeConfig.DB_PATH : null,
|
|
24
|
+
active: getConversationCount(),
|
|
25
|
+
max: storeConfig.MAX_CONVERSATIONS,
|
|
26
|
+
ttl_hours: storeConfig.CONVERSATION_TTL / (60 * 60 * 1000)
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
export default router;
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Models Route
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { Router } from 'express';
|
|
6
|
+
import { authenticate } from '../auth-middleware.js';
|
|
7
|
+
import { listAdvancedVersions, getVersionInfo, getDefaultAdvancedVersion } from '../advanced-versions.js';
|
|
8
|
+
|
|
9
|
+
const router = Router();
|
|
10
|
+
|
|
11
|
+
// Parse custom agents from env
|
|
12
|
+
function getCustomAgents() {
|
|
13
|
+
const raw = process.env.YDC_CUSTOM_AGENTS || '';
|
|
14
|
+
if (!raw) return [];
|
|
15
|
+
|
|
16
|
+
return raw.split(',').map(entry => {
|
|
17
|
+
const trimmed = entry.trim();
|
|
18
|
+
if (!trimmed) return null;
|
|
19
|
+
|
|
20
|
+
// Format: name:id or just id
|
|
21
|
+
const colonIndex = trimmed.indexOf(':');
|
|
22
|
+
if (colonIndex > 0) {
|
|
23
|
+
return {
|
|
24
|
+
name: trimmed.substring(0, colonIndex),
|
|
25
|
+
id: trimmed.substring(colonIndex + 1)
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
return { name: trimmed, id: trimmed };
|
|
29
|
+
}).filter(Boolean);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
router.get('/v1/models', authenticate, (req, res) => {
|
|
33
|
+
const baseModels = [
|
|
34
|
+
{
|
|
35
|
+
id: 'advanced',
|
|
36
|
+
object: 'model',
|
|
37
|
+
created: Math.floor(Date.now() / 1000),
|
|
38
|
+
owned_by: 'you-com',
|
|
39
|
+
permission: [],
|
|
40
|
+
root: 'advanced',
|
|
41
|
+
parent: null
|
|
42
|
+
},
|
|
43
|
+
{
|
|
44
|
+
id: 'express',
|
|
45
|
+
object: 'model',
|
|
46
|
+
created: Math.floor(Date.now() / 1000),
|
|
47
|
+
owned_by: 'you-com',
|
|
48
|
+
permission: [],
|
|
49
|
+
root: 'express',
|
|
50
|
+
parent: null
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
id: 'research',
|
|
54
|
+
object: 'model',
|
|
55
|
+
created: Math.floor(Date.now() / 1000),
|
|
56
|
+
owned_by: 'you-com',
|
|
57
|
+
permission: [],
|
|
58
|
+
root: 'research',
|
|
59
|
+
parent: null
|
|
60
|
+
}
|
|
61
|
+
];
|
|
62
|
+
|
|
63
|
+
// Add custom agents from env/CLI
|
|
64
|
+
const customAgents = getCustomAgents();
|
|
65
|
+
const customModels = customAgents.map(agent => ({
|
|
66
|
+
id: agent.name,
|
|
67
|
+
object: 'model',
|
|
68
|
+
created: Math.floor(Date.now() / 1000),
|
|
69
|
+
owned_by: 'you-com',
|
|
70
|
+
permission: [],
|
|
71
|
+
root: 'custom',
|
|
72
|
+
parent: null,
|
|
73
|
+
agent_id: agent.id,
|
|
74
|
+
description: `Custom agent${agent.name !== agent.id ? ` (${agent.id})` : ''}`
|
|
75
|
+
}));
|
|
76
|
+
|
|
77
|
+
const advancedVersionModels = listAdvancedVersions().map(version => {
|
|
78
|
+
const versionInfo = getVersionInfo(version);
|
|
79
|
+
return {
|
|
80
|
+
id: version,
|
|
81
|
+
object: 'model',
|
|
82
|
+
created: Math.floor(Date.now() / 1000),
|
|
83
|
+
owned_by: 'you-com',
|
|
84
|
+
permission: [],
|
|
85
|
+
root: 'advanced',
|
|
86
|
+
parent: 'advanced',
|
|
87
|
+
description: versionInfo.description,
|
|
88
|
+
tools: versionInfo.tools
|
|
89
|
+
};
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
res.json({
|
|
93
|
+
object: 'list',
|
|
94
|
+
data: [...baseModels, ...customModels, ...advancedVersionModels]
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
router.get('/v1/versions', authenticate, (req, res) => {
|
|
99
|
+
const versions = listAdvancedVersions().map(version => getVersionInfo(version));
|
|
100
|
+
res.json({
|
|
101
|
+
object: 'list',
|
|
102
|
+
data: versions,
|
|
103
|
+
default_version: getDefaultAdvancedVersion(),
|
|
104
|
+
temperature_mapping: {
|
|
105
|
+
"0.0-0.5": "Uses medium verbosity versions with reduced workflow steps",
|
|
106
|
+
"0.5-1.0": "Uses high verbosity versions with increased workflow steps"
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
export default router;
|