kyd-shared-badge 0.3.53 → 0.3.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kyd-shared-badge",
3
- "version": "0.3.53",
3
+ "version": "0.3.54",
4
4
  "private": false,
5
5
  "main": "./src/index.ts",
6
6
  "module": "./src/index.ts",
@@ -56,7 +56,7 @@ export function useChatStreaming(cfg: UseChatStreamingConfig) {
56
56
  }
57
57
  const res = await fetch(api, {
58
58
  method: 'POST',
59
- headers: { 'Content-Type': 'application/json', 'Accept': 'text/event-stream' },
59
+ headers: { 'Content-Type': 'application/json' },
60
60
  body: JSON.stringify({ content, sessionId: sid, badgeId }),
61
61
  signal: abortRef.current.signal,
62
62
  });
@@ -65,23 +65,13 @@ export function useChatStreaming(cfg: UseChatStreamingConfig) {
65
65
 
66
66
  const reader = res.body.getReader();
67
67
  const decoder = new TextDecoder();
68
- let buffer = '';
69
68
  let done = false;
70
69
  while (!done) {
71
- const { value, done: readerDone } = await reader.read();
72
- done = readerDone || false;
73
- if (value) {
74
- buffer += decoder.decode(value, { stream: true });
75
- const lines = buffer.split(/\r?\n/);
76
- buffer = lines.pop() ?? '';
77
- for (const raw of lines) {
78
- const line = raw.trim();
79
- if (!line.startsWith('data:')) continue;
80
- const data = line.slice(5).trim();
81
- if (!data) continue;
82
- if (data === '[DONE]') { done = true; break; }
83
- setMessages(m => m.map(msg => msg.id === assistantId ? { ...msg, content: msg.content + data } : msg));
84
- }
70
+ const chunk = await reader.read();
71
+ done = chunk.done || false;
72
+ if (chunk.value) {
73
+ const textPart = decoder.decode(chunk.value, { stream: true });
74
+ setMessages(m => m.map(msg => msg.id === assistantId ? { ...msg, content: msg.content + textPart } : msg));
85
75
  }
86
76
  }
87
77
  } catch (e) {
@@ -9,7 +9,7 @@ import { FiInfo, FiAlertTriangle } from 'react-icons/fi';
9
9
  // Register English locale once at module import time
10
10
  countriesLib.registerLocale(enLocale);
11
11
 
12
- const getBadgeImageUrl = (score: number) => {
12
+ export const getBadgeImageUrl = (score: number) => {
13
13
  if (score >= 75) return '/badgegreen2.png';
14
14
  if (score >= 50) return '/badgeyellow2.png';
15
15
  return '/badgered2.png';
package/src/index.ts CHANGED
@@ -4,5 +4,4 @@ export { default as PrintableBadgeDisplay } from './PrintableBadgeDisplay';
4
4
  export { default as ChatWindowStreaming } from './chat/ChatWindowStreaming';
5
5
  export { default as ChatWidget } from './chat/ChatWidget';
6
6
  export * from './utils/date';
7
-
8
-
7
+ export { getBadgeImageUrl } from './components/ReportHeader';
package/src/lib/routes.ts CHANGED
@@ -2,6 +2,8 @@
2
2
  // POST /api/chat { content: string, sessionId?: string }
3
3
 
4
4
  import { NextRequest } from 'next/server';
5
+ import { streamText } from 'ai';
6
+ import { createOpenAI } from '@ai-sdk/openai';
5
7
 
6
8
  import { getHistory, putMessage } from './chat-store';
7
9
  import {
@@ -32,116 +34,42 @@ export async function chatStreamRoute(req: NextRequest, userId: string, companyI
32
34
  return Response.json({ error: 'Rate limit exceeded' }, { status: 429 });
33
35
  }
34
36
 
35
- // Persist the user message first so it is included in history
36
37
  await putMessage({ sessionId, role: 'user', content });
37
38
 
38
- // Build required context in parallel to reduce latency
39
- const [badgeUserId, history] = await Promise.all([
40
- getBadgeUserId(badgeId),
41
- getHistory(sessionId, 20),
42
- ]);
43
- const [aggregated, graphData] = await Promise.all([
44
- aggregateUserData(badgeUserId, !!companyId, companyId),
45
- getReportGraphData(badgeId),
46
- ]);
39
+ const badgeUserId = await getBadgeUserId(badgeId);
40
+ const aggregated = await aggregateUserData(badgeUserId, !!companyId, companyId);
47
41
  const cleaned = cleanDeveloperProfile(aggregated);
42
+ const graphData = await getReportGraphData(badgeId);
48
43
  const system = buildAllContextPrompt(cleaned, graphData, { concise: true });
49
-
44
+ const history = await getHistory(sessionId, 20);
50
45
  const apiKey = process.env.OPENROUTER_API_KEY;
51
46
  if (!apiKey) {
52
47
  return Response.json({ error: 'Server misconfigured: missing OPENROUTER_API_KEY' }, { status: 500 });
53
48
  }
54
49
 
55
- const model = process.env.OPENROUTER_MODEL || 'openai/o4-mini';
56
-
57
50
  const chatMessages = [
58
51
  { role: 'system', content: system },
59
52
  ...history.map((m: any) => ({ role: m.role, content: m.content })),
60
53
  { role: 'user', content },
61
54
  ];
62
55
 
63
- const upstream = await fetch('https://openrouter.ai/api/v1/chat/completions', {
64
- method: 'POST',
65
- headers: {
66
- 'Content-Type': 'application/json',
67
- 'Authorization': `Bearer ${apiKey}`,
68
- 'Accept': 'text/event-stream',
69
- },
70
- body: JSON.stringify({
71
- model,
72
- messages: chatMessages,
73
- stream: true,
74
- max_tokens: 1024,
75
- }),
56
+ const openai = createOpenAI({
57
+ apiKey,
58
+ baseURL: 'https://openrouter.ai/api/v1',
76
59
  });
77
60
 
78
- if (!upstream.ok || !upstream.body) {
79
- const errText = await upstream.text().catch(() => '');
80
- return Response.json({ error: `Upstream error: ${upstream.status}`, details: errText }, { status: 502 });
81
- }
82
-
83
- const encoder = new TextEncoder();
84
- let sseBuffer = '';
85
- let assistantText = '';
86
- const stream = new ReadableStream<Uint8Array>({
87
- start(controller) {
88
- const reader = upstream.body!.getReader();
89
- const decoder = new TextDecoder();
90
- const pump = async (): Promise<void> => {
91
- try {
92
- const { value, done } = await reader.read();
93
- if (done) {
94
- try { if (assistantText) await putMessage({ sessionId, role: 'assistant', content: assistantText }); } catch {}
95
- // Send final SSE terminator
96
- controller.enqueue(encoder.encode('data: [DONE]\n\n'));
97
- controller.close();
98
- return;
99
- }
100
- sseBuffer += decoder.decode(value, { stream: true });
101
- const lines = sseBuffer.split(/\r?\n/);
102
- sseBuffer = lines.pop() ?? '';
103
- for (const l of lines) {
104
- const line = l.trim();
105
- if (!line.startsWith('data:')) continue;
106
- const data = line.slice(5).trim();
107
- if (!data) continue;
108
- if (data === '[DONE]') {
109
- try { if (assistantText) await putMessage({ sessionId, role: 'assistant', content: assistantText }); } catch {}
110
- controller.enqueue(encoder.encode('data: [DONE]\n\n'));
111
- controller.close();
112
- return;
113
- }
114
- try {
115
- const json = JSON.parse(data);
116
- const delta = json?.choices?.[0]?.delta?.content ?? '';
117
- if (delta) {
118
- assistantText += delta;
119
- // Emit proper SSE line to the client
120
- controller.enqueue(encoder.encode(`data: ${delta}\n\n`));
121
- }
122
- } catch {
123
- // If upstream sends a non-JSON data line, forward as-is
124
- controller.enqueue(encoder.encode(`data: ${data}\n\n`));
125
- }
126
- }
127
- pump();
128
- } catch (err) {
129
- controller.error(err);
130
- }
131
- };
132
- pump();
133
- },
134
- cancel() {
135
- try { (upstream as any).body?.cancel?.(); } catch {}
136
- },
61
+ const result = await streamText({
62
+ model: openai('openai/o4-mini'),
63
+ messages: chatMessages as any,
64
+ maxTokens: 1024,
137
65
  });
138
66
 
139
- return new Response(stream, {
67
+ return result.toAIStreamResponse({
140
68
  headers: {
141
- 'Content-Type': 'text/event-stream; charset=utf-8',
142
- 'Cache-Control': 'no-cache, no-transform',
143
69
  'X-Accel-Buffering': 'no',
144
- 'Connection': 'keep-alive',
70
+ },
71
+ onFinal: async (finalText: string) => {
72
+ try { if (finalText) await putMessage({ sessionId, role: 'assistant', content: finalText }); } catch {}
145
73
  },
146
74
  });
147
75