kyd-shared-badge 0.3.52 → 0.3.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kyd-shared-badge",
3
- "version": "0.3.52",
3
+ "version": "0.3.54",
4
4
  "private": false,
5
5
  "main": "./src/index.ts",
6
6
  "module": "./src/index.ts",
@@ -9,7 +9,7 @@ import { FiInfo, FiAlertTriangle } from 'react-icons/fi';
9
9
  // Register English locale once at module import time
10
10
  countriesLib.registerLocale(enLocale);
11
11
 
12
- const getBadgeImageUrl = (score: number) => {
12
+ export const getBadgeImageUrl = (score: number) => {
13
13
  if (score >= 75) return '/badgegreen2.png';
14
14
  if (score >= 50) return '/badgeyellow2.png';
15
15
  return '/badgered2.png';
package/src/index.ts CHANGED
@@ -4,5 +4,4 @@ export { default as PrintableBadgeDisplay } from './PrintableBadgeDisplay';
4
4
  export { default as ChatWindowStreaming } from './chat/ChatWindowStreaming';
5
5
  export { default as ChatWidget } from './chat/ChatWidget';
6
6
  export * from './utils/date';
7
-
8
-
7
+ export { getBadgeImageUrl } from './components/ReportHeader';
package/src/lib/routes.ts CHANGED
@@ -53,80 +53,24 @@ export async function chatStreamRoute(req: NextRequest, userId: string, companyI
53
53
  { role: 'user', content },
54
54
  ];
55
55
 
56
- const upstream = await fetch('https://openrouter.ai/api/v1/chat/completions', {
57
- method: 'POST',
58
- headers: {
59
- 'Content-Type': 'application/json',
60
- 'Authorization': `Bearer ${apiKey}`,
61
- },
62
- body: JSON.stringify({
63
- model: 'openai/o4-mini',
64
- messages: chatMessages,
65
- stream: true,
66
- max_tokens: 1024,
67
- }),
56
+ const openai = createOpenAI({
57
+ apiKey,
58
+ baseURL: 'https://openrouter.ai/api/v1',
68
59
  });
69
60
 
70
- if (!upstream.ok || !upstream.body) {
71
- const errText = await upstream.text().catch(() => '');
72
- return Response.json({ error: `Upstream error: ${upstream.status}`, details: errText }, { status: 502 });
73
- }
74
-
75
- const encoder = new TextEncoder();
76
- let sseBuffer = '';
77
- let assistantText = '';
78
- const stream = new ReadableStream<Uint8Array>({
79
- start(controller) {
80
- const reader = upstream.body!.getReader();
81
- const decoder = new TextDecoder();
82
- const pump = async (): Promise<void> => {
83
- try {
84
- const { value, done } = await reader.read();
85
- if (done) {
86
- try { if (assistantText) await putMessage({ sessionId, role: 'assistant', content: assistantText }); } catch {}
87
- controller.close();
88
- return;
89
- }
90
- sseBuffer += decoder.decode(value, { stream: true });
91
- const lines = sseBuffer.split(/\r?\n/);
92
- sseBuffer = lines.pop() ?? '';
93
- for (const l of lines) {
94
- const line = l.trim();
95
- if (!line.startsWith('data:')) continue;
96
- const data = line.slice(5).trim();
97
- if (!data) continue;
98
- if (data === '[DONE]') {
99
- try { if (assistantText) await putMessage({ sessionId, role: 'assistant', content: assistantText }); } catch {}
100
- controller.close();
101
- return;
102
- }
103
- try {
104
- const json = JSON.parse(data);
105
- const delta = json?.choices?.[0]?.delta?.content ?? '';
106
- if (delta) {
107
- assistantText += delta;
108
- controller.enqueue(encoder.encode(delta));
109
- }
110
- } catch {}
111
- }
112
- pump();
113
- } catch (err) {
114
- controller.error(err);
115
- }
116
- };
117
- pump();
118
- },
119
- cancel() {
120
- try { (upstream as any).body?.cancel?.(); } catch {}
121
- },
61
+ const result = await streamText({
62
+ model: openai('openai/o4-mini'),
63
+ messages: chatMessages as any,
64
+ maxTokens: 1024,
122
65
  });
123
66
 
124
- return new Response(stream, {
67
+ return result.toAIStreamResponse({
125
68
  headers: {
126
- 'Content-Type': 'text/plain; charset=utf-8',
127
- 'Cache-Control': 'no-cache, no-transform',
128
69
  'X-Accel-Buffering': 'no',
129
70
  },
71
+ onFinal: async (finalText: string) => {
72
+ try { if (finalText) await putMessage({ sessionId, role: 'assistant', content: finalText }); } catch {}
73
+ },
130
74
  });
131
75
 
132
76
  } catch (e) {