kyd-shared-badge 0.3.38 → 0.3.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/lib/routes.ts +83 -26
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kyd-shared-badge",
3
- "version": "0.3.38",
3
+ "version": "0.3.39",
4
4
  "private": false,
5
5
  "main": "./src/index.ts",
6
6
  "module": "./src/index.ts",
package/src/lib/routes.ts CHANGED
@@ -20,6 +20,7 @@ import { createSession } from './chat-store';
20
20
 
21
21
  export const runtime = 'nodejs';
22
22
 
23
+
23
24
  export async function chatStreamRoute(req: NextRequest, userId: string, companyId?: string) {
24
25
  try {
25
26
 
@@ -41,43 +42,99 @@ export async function chatStreamRoute(req: NextRequest, userId: string, companyI
41
42
  const graphData = await getReportGraphData(badgeId);
42
43
  const system = buildAllContextPrompt(cleaned, graphData, { concise: true });
43
44
  const history = await getHistory(sessionId, 20);
45
+ const apiKey = process.env.OPENROUTER_API_KEY;
46
+ if (!apiKey) {
47
+ return Response.json({ error: 'Server misconfigured: missing OPENROUTER_API_KEY' }, { status: 500 });
48
+ }
49
+
50
+ const chatMessages = [
51
+ { role: 'system', content: system },
52
+ ...history.map((m: any) => ({ role: m.role, content: m.content })),
53
+ { role: 'user', content },
54
+ ];
55
+
56
+ const upstream = await fetch('https://openrouter.ai/api/v1/chat/completions', {
57
+ method: 'POST',
58
+ headers: {
59
+ 'Content-Type': 'application/json',
60
+ 'Authorization': `Bearer ${apiKey}`,
61
+ },
62
+ body: JSON.stringify({
63
+ model: 'openai/o4-mini',
64
+ messages: chatMessages,
65
+ stream: true,
66
+ max_tokens: 1024,
67
+ }),
68
+ });
69
+
70
+ if (!upstream.ok || !upstream.body) {
71
+ const errText = await upstream.text().catch(() => '');
72
+ return Response.json({ error: `Upstream error: ${upstream.status}`, details: errText }, { status: 502 });
73
+ }
44
74
 
45
- const openrouter = createOpenAI({
46
- apiKey: process.env.OPENROUTER_API_KEY,
47
- baseURL: 'https://openrouter.ai/api/v1',
75
+ const encoder = new TextEncoder();
76
+ let sseBuffer = '';
77
+ let assistantText = '';
78
+ const stream = new ReadableStream<Uint8Array>({
79
+ start(controller) {
80
+ const reader = upstream.body!.getReader();
81
+ const decoder = new TextDecoder();
82
+ const pump = async (): Promise<void> => {
83
+ try {
84
+ const { value, done } = await reader.read();
85
+ if (done) {
86
+ try { if (assistantText) await putMessage({ sessionId, role: 'assistant', content: assistantText }); } catch {}
87
+ controller.close();
88
+ return;
89
+ }
90
+ sseBuffer += decoder.decode(value, { stream: true });
91
+ const lines = sseBuffer.split(/\r?\n/);
92
+ sseBuffer = lines.pop() ?? '';
93
+ for (const l of lines) {
94
+ const line = l.trim();
95
+ if (!line.startsWith('data:')) continue;
96
+ const data = line.slice(5).trim();
97
+ if (!data) continue;
98
+ if (data === '[DONE]') {
99
+ try { if (assistantText) await putMessage({ sessionId, role: 'assistant', content: assistantText }); } catch {}
100
+ controller.close();
101
+ return;
102
+ }
103
+ try {
104
+ const json = JSON.parse(data);
105
+ const delta = json?.choices?.[0]?.delta?.content ?? '';
106
+ if (delta) {
107
+ assistantText += delta;
108
+ controller.enqueue(encoder.encode(delta));
109
+ }
110
+ } catch {}
111
+ }
112
+ pump();
113
+ } catch (err) {
114
+ controller.error(err);
115
+ }
116
+ };
117
+ pump();
118
+ },
119
+ cancel() {
120
+ try { (upstream as any).body?.cancel?.(); } catch {}
121
+ },
48
122
  });
49
123
 
50
- const result = await streamText({
51
- model: openrouter('openai/o4-mini'),
52
- system,
53
- messages: [...history, { role: 'user' as const, content }],
54
- maxTokens: 1024,
55
- temperature: 0.2,
56
- // onFinish: async ({ text }: { text: string }) => {
57
- // // Attempt to capture evidence block if present (UI also parses, but cache server-side)
58
- // const evidence = tryParseEvidenceServer(text);
59
- // await putMessage({ sessionId, role: 'assistant', content: text, evidenceJson: evidence || undefined });
60
- // },
124
+ return new Response(stream, {
125
+ headers: {
126
+ 'Content-Type': 'text/plain; charset=utf-8',
127
+ 'Cache-Control': 'no-cache, no-transform',
128
+ 'X-Accel-Buffering': 'no',
129
+ },
61
130
  });
62
131
 
63
- return result.toTextStreamResponse();
64
132
  } catch (e) {
65
133
  console.error('chat error', e);
66
134
  return new Response('An error occurred. Please try again.', { status: 500 });
67
135
  }
68
136
  }
69
137
 
70
- // function tryParseEvidenceServer(text: string): any | null {
71
- // try {
72
- // const start = text.indexOf('```json');
73
- // const end = text.indexOf('```', start + 7);
74
- // const raw = start >= 0 && end > start ? text.slice(start + 7, end).trim() : text.trim();
75
- // const obj = JSON.parse(raw);
76
- // if (obj && obj.type === 'evidence' && Array.isArray(obj.claims)) return obj;
77
- // } catch {}
78
- // return null;
79
- // }
80
-
81
138
  export async function createSessionRoute(req: NextRequest, userId: string, companyId?: string) {
82
139
  try {
83
140
  const body = await req.json().catch(() => ({}));