@windrun-huaiin/backend-core 15.1.0 → 16.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/LICENSE +1 -1
  2. package/dist/index.d.ts +1 -0
  3. package/dist/index.d.ts.map +1 -1
  4. package/dist/index.js +44 -0
  5. package/dist/index.mjs +8 -1
  6. package/dist/lib/index.js +19 -0
  7. package/dist/lib/index.mjs +1 -1
  8. package/dist/lib/upstash/qstash.d.ts +20 -7
  9. package/dist/lib/upstash/qstash.d.ts.map +1 -1
  10. package/dist/lib/upstash/qstash.js +33 -7
  11. package/dist/lib/upstash/qstash.mjs +33 -7
  12. package/dist/lib/upstash/redis-structures.d.ts +83 -0
  13. package/dist/lib/upstash/redis-structures.d.ts.map +1 -1
  14. package/dist/lib/upstash/redis-structures.js +220 -0
  15. package/dist/lib/upstash/redis-structures.mjs +202 -1
  16. package/dist/lib/upstash-config.d.ts.map +1 -1
  17. package/dist/lib/upstash-config.js +76 -4
  18. package/dist/lib/upstash-config.mjs +76 -4
  19. package/dist/services/ai/abort.d.ts +2 -0
  20. package/dist/services/ai/abort.d.ts.map +1 -0
  21. package/dist/services/ai/abort.js +24 -0
  22. package/dist/services/ai/abort.mjs +22 -0
  23. package/dist/services/ai/env.d.ts +21 -0
  24. package/dist/services/ai/env.d.ts.map +1 -0
  25. package/dist/services/ai/env.js +85 -0
  26. package/dist/services/ai/env.mjs +80 -0
  27. package/dist/services/ai/error.d.ts +3 -0
  28. package/dist/services/ai/error.d.ts.map +1 -0
  29. package/dist/services/ai/error.js +54 -0
  30. package/dist/services/ai/error.mjs +52 -0
  31. package/dist/services/ai/index.d.ts +9 -0
  32. package/dist/services/ai/index.d.ts.map +1 -0
  33. package/dist/services/ai/index.js +30 -0
  34. package/dist/services/ai/index.mjs +7 -0
  35. package/dist/services/ai/message-builder.d.ts +4 -0
  36. package/dist/services/ai/message-builder.d.ts.map +1 -0
  37. package/dist/services/ai/message-builder.js +15 -0
  38. package/dist/services/ai/message-builder.mjs +13 -0
  39. package/dist/services/ai/mock.d.ts +30 -0
  40. package/dist/services/ai/mock.d.ts.map +1 -0
  41. package/dist/services/ai/mock.js +314 -0
  42. package/dist/services/ai/mock.mjs +308 -0
  43. package/dist/services/ai/openrouter-client.d.ts +12 -0
  44. package/dist/services/ai/openrouter-client.d.ts.map +1 -0
  45. package/dist/services/ai/openrouter-client.js +81 -0
  46. package/dist/services/ai/openrouter-client.mjs +78 -0
  47. package/dist/services/ai/route.d.ts +6 -0
  48. package/dist/services/ai/route.d.ts.map +1 -0
  49. package/dist/services/ai/route.js +178 -0
  50. package/dist/services/ai/route.mjs +173 -0
  51. package/dist/services/ai/types.d.ts +98 -0
  52. package/dist/services/ai/types.d.ts.map +1 -0
  53. package/package.json +11 -4
  54. package/src/index.ts +1 -0
  55. package/src/lib/upstash/qstash.ts +55 -15
  56. package/src/lib/upstash/redis-structures.ts +248 -0
  57. package/src/lib/upstash-config.ts +106 -4
  58. package/src/services/ai/abort.ts +26 -0
  59. package/src/services/ai/env.ts +120 -0
  60. package/src/services/ai/error.ts +64 -0
  61. package/src/services/ai/index.ts +8 -0
  62. package/src/services/ai/message-builder.ts +17 -0
  63. package/src/services/ai/mock.ts +378 -0
  64. package/src/services/ai/openrouter-client.ts +94 -0
  65. package/src/services/ai/route.ts +218 -0
  66. package/src/services/ai/types.ts +131 -0
@@ -0,0 +1,314 @@
1
+ 'use strict';
2
+
3
+ var tslib = require('tslib');
4
+ var ai = require('@windrun-huaiin/contracts/ai');
5
+
6
+ const streamingHeaders = {
7
+ 'Content-Type': 'text/event-stream; charset=utf-8',
8
+ 'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate, no-transform',
9
+ Connection: 'keep-alive',
10
+ Pragma: 'no-cache',
11
+ 'X-Accel-Buffering': 'no',
12
+ };
13
+ function encodeEvent(event) {
14
+ return `data: ${JSON.stringify(event)}\n\n`;
15
+ }
16
+ function createStreamResponse(events) {
17
+ const encoder = new TextEncoder();
18
+ const stream = new ReadableStream({
19
+ start(controller) {
20
+ for (const event of events) {
21
+ controller.enqueue(encoder.encode(encodeEvent(event)));
22
+ }
23
+ controller.close();
24
+ },
25
+ });
26
+ return new Response(stream, {
27
+ headers: streamingHeaders,
28
+ });
29
+ }
30
+ function createSimpleMockHandler(text) {
31
+ return (context) => {
32
+ const messageId = `mock-${context.requestId}`;
33
+ return createStreamResponse([
34
+ {
35
+ type: 'message_started',
36
+ messageId,
37
+ createdAt: Date.now(),
38
+ },
39
+ {
40
+ type: 'text_delta',
41
+ messageId,
42
+ text,
43
+ },
44
+ {
45
+ type: 'message_completed',
46
+ messageId,
47
+ createdAt: Date.now(),
48
+ },
49
+ ]);
50
+ };
51
+ }
52
+ function createErrorMockResponse(statusCode, message) {
53
+ return Response.json(ai.createAIErrorPayload({
54
+ message,
55
+ upstreamStatusCode: statusCode,
56
+ }), { status: statusCode });
57
+ }
58
+ function getMockScenario(mockType, mockTimeoutMs) {
59
+ switch (mockType) {
60
+ case 1:
61
+ return {
62
+ mode: 'text_stream',
63
+ initialDelayMs: mockTimeoutMs,
64
+ };
65
+ case 2:
66
+ return {
67
+ mode: 'text_stream',
68
+ immediateErrorType: 'timeout',
69
+ };
70
+ case 3:
71
+ return {
72
+ mode: 'text_stream',
73
+ streamFailureType: 'timeout',
74
+ streamFailureAfterChunks: 3,
75
+ };
76
+ case 4:
77
+ return {
78
+ mode: 'text_stream',
79
+ streamFailureType: 'request_aborted',
80
+ streamFailureAfterChunks: 3,
81
+ };
82
+ case 5:
83
+ return {
84
+ mode: 'text_stream',
85
+ streamFailureType: 'stream_error',
86
+ streamFailureAfterChunks: 3,
87
+ };
88
+ case 6:
89
+ return {
90
+ mode: 'event_sequence',
91
+ };
92
+ case 7:
93
+ return {
94
+ mode: 'event_sequence',
95
+ };
96
+ default:
97
+ return {
98
+ mode: 'text_stream',
99
+ };
100
+ }
101
+ }
102
+ function sleep(delayInMs) {
103
+ return tslib.__awaiter(this, void 0, void 0, function* () {
104
+ yield new Promise((resolve) => setTimeout(resolve, delayInMs));
105
+ });
106
+ }
107
+ function createMockErrorPayload(failureType) {
108
+ if (failureType === 'timeout') {
109
+ return ai.createAIErrorPayload({
110
+ message: 'Request timed out',
111
+ upstreamStatusCode: 408,
112
+ });
113
+ }
114
+ if (failureType === 'request_aborted') {
115
+ return ai.createAIErrorPayload({
116
+ message: 'Request aborted',
117
+ upstreamStatusCode: 499,
118
+ });
119
+ }
120
+ return ai.createAIErrorPayload({
121
+ message: 'Error communicating with AI',
122
+ upstreamStatusCode: 502,
123
+ failureReason: 'stream_error',
124
+ });
125
+ }
126
+ function createMockFailureResponse(failureType) {
127
+ var _a;
128
+ const payload = createMockErrorPayload(failureType);
129
+ return Response.json(payload, { status: (_a = payload.upstreamStatusCode) !== null && _a !== void 0 ? _a : 500 });
130
+ }
131
+ function chunkTextByWords(text, chunkSize) {
132
+ var _a;
133
+ const wordChunks = (_a = text.match(/\S+\s*/g)) !== null && _a !== void 0 ? _a : [text];
134
+ const normalizedChunkSize = Math.max(1, chunkSize);
135
+ const chunks = [];
136
+ for (let index = 0; index < wordChunks.length; index += normalizedChunkSize) {
137
+ chunks.push(wordChunks.slice(index, index + normalizedChunkSize).join(''));
138
+ }
139
+ return chunks;
140
+ }
141
+ function createConfigurableMockHandler(options) {
142
+ return (context) => tslib.__awaiter(this, void 0, void 0, function* () {
143
+ var _a, _b, _c, _d;
144
+ if (((_a = options.initialDelayMs) !== null && _a !== void 0 ? _a : 0) > 0) {
145
+ yield sleep(options.initialDelayMs);
146
+ }
147
+ const messageId = `mock-${context.requestId}`;
148
+ const chunks = chunkTextByWords(options.text, (_b = options.chunkSize) !== null && _b !== void 0 ? _b : 4);
149
+ const chunkDelayMs = Math.max(0, (_c = options.chunkDelayMs) !== null && _c !== void 0 ? _c : 0);
150
+ const failureAfterChunks = (_d = options.streamFailureAfterChunks) !== null && _d !== void 0 ? _d : 0;
151
+ const encoder = new TextEncoder();
152
+ const stream = new ReadableStream({
153
+ start(controller) {
154
+ return tslib.__awaiter(this, void 0, void 0, function* () {
155
+ controller.enqueue(encoder.encode(encodeEvent({
156
+ type: 'message_started',
157
+ messageId,
158
+ createdAt: Date.now(),
159
+ })));
160
+ for (let index = 0; index < chunks.length; index += 1) {
161
+ if (options.streamFailureType &&
162
+ failureAfterChunks > 0 &&
163
+ index >= failureAfterChunks) {
164
+ controller.enqueue(encoder.encode(encodeEvent({
165
+ type: 'error',
166
+ error: createMockErrorPayload(options.streamFailureType),
167
+ })));
168
+ controller.close();
169
+ return;
170
+ }
171
+ controller.enqueue(encoder.encode(encodeEvent({
172
+ type: 'text_delta',
173
+ messageId,
174
+ text: chunks[index],
175
+ })));
176
+ if (chunkDelayMs > 0) {
177
+ yield sleep(chunkDelayMs);
178
+ }
179
+ }
180
+ controller.enqueue(encoder.encode(encodeEvent({
181
+ type: 'message_completed',
182
+ messageId,
183
+ createdAt: Date.now(),
184
+ })));
185
+ controller.close();
186
+ });
187
+ },
188
+ });
189
+ return new Response(stream, {
190
+ headers: streamingHeaders,
191
+ });
192
+ });
193
+ }
194
+ function createEventSequenceMockHandler(events) {
195
+ return () => tslib.__awaiter(this, void 0, void 0, function* () { return createStreamResponse(events); });
196
+ }
197
+ function createMarkdownShowcaseEvents(messageId) {
198
+ return [
199
+ {
200
+ type: 'message_started',
201
+ messageId,
202
+ createdAt: Date.now(),
203
+ },
204
+ {
205
+ type: 'text_delta',
206
+ messageId,
207
+ text: [
208
+ '# Markdown Showcase',
209
+ '',
210
+ 'This scenario verifies headings, lists, quotes, tables, code, and image rendering in the chat message body.',
211
+ '',
212
+ '## Bullet List',
213
+ '',
214
+ '- Bullet list item one',
215
+ '- Bullet list item two',
216
+ '',
217
+ '## Quote',
218
+ '',
219
+ '> Blockquote content for layout verification.',
220
+ '',
221
+ '## Table',
222
+ '',
223
+ '| Column | Value |',
224
+ '| --- | --- |',
225
+ '| Status | OK |',
226
+ '| Mode | Markdown |',
227
+ '',
228
+ '## Code',
229
+ '',
230
+ '```ts',
231
+ "const mode = 'markdown-showcase';",
232
+ 'console.log(mode);',
233
+ '```',
234
+ '',
235
+ '## Image',
236
+ '',
237
+ '![Mock Image](https://r2.d8ger.com/default.webp)',
238
+ ].join('\n'),
239
+ },
240
+ {
241
+ type: 'message_completed',
242
+ messageId,
243
+ createdAt: Date.now(),
244
+ },
245
+ ];
246
+ }
247
+ function createTrophyCardShowcaseEvents(messageId) {
248
+ return [
249
+ {
250
+ type: 'message_started',
251
+ messageId,
252
+ createdAt: Date.now(),
253
+ },
254
+ {
255
+ type: 'text_delta',
256
+ messageId,
257
+ text: [
258
+ '# Trophy Card Showcase',
259
+ '',
260
+ 'This scenario verifies a structured chat part rendered between normal markdown blocks.',
261
+ '',
262
+ 'The card below is emitted as a dedicated `trophy_card` part, not as Markdown component syntax.',
263
+ ].join('\n'),
264
+ },
265
+ {
266
+ type: 'part',
267
+ messageId,
268
+ part: {
269
+ type: 'trophy_card',
270
+ title: 'Structured Trophy Card',
271
+ description: 'This is rendered from `MessagePart`, which is the recommended path for chat-specific rich blocks.\n\n- Reusable shared React component\n- Chat-specific structured part\n- Ready to extend to file, audio, and video cards',
272
+ },
273
+ },
274
+ {
275
+ type: 'text_delta',
276
+ messageId,
277
+ text: '\n\nUse this as the reference pattern for future file, audio, video, or tool result parts.',
278
+ },
279
+ {
280
+ type: 'message_completed',
281
+ messageId,
282
+ createdAt: Date.now(),
283
+ },
284
+ ];
285
+ }
286
+ function createScenarioMockHandler(params) {
287
+ const scenario = getMockScenario(params.mockType, params.mockTimeoutSeconds * 1000);
288
+ const messageId = `mock-scenario-${params.mockType}`;
289
+ if (scenario.immediateErrorType) {
290
+ return () => tslib.__awaiter(this, void 0, void 0, function* () { return createMockFailureResponse(scenario.immediateErrorType); });
291
+ }
292
+ if (scenario.mode === 'event_sequence') {
293
+ if (params.mockType === 6) {
294
+ return createEventSequenceMockHandler(createMarkdownShowcaseEvents(messageId));
295
+ }
296
+ if (params.mockType === 7) {
297
+ return createEventSequenceMockHandler(createTrophyCardShowcaseEvents(messageId));
298
+ }
299
+ }
300
+ return createConfigurableMockHandler({
301
+ text: params.text,
302
+ initialDelayMs: scenario.initialDelayMs,
303
+ chunkDelayMs: params.mockStreamChunkDelayMs,
304
+ chunkSize: params.mockStreamChunkSize,
305
+ streamFailureType: scenario.streamFailureType,
306
+ streamFailureAfterChunks: scenario.streamFailureAfterChunks,
307
+ });
308
+ }
309
+
310
+ exports.createConfigurableMockHandler = createConfigurableMockHandler;
311
+ exports.createErrorMockResponse = createErrorMockResponse;
312
+ exports.createScenarioMockHandler = createScenarioMockHandler;
313
+ exports.createSimpleMockHandler = createSimpleMockHandler;
314
+ exports.getMockScenario = getMockScenario;
@@ -0,0 +1,308 @@
1
+ import { __awaiter } from 'tslib';
2
+ import { createAIErrorPayload } from '@windrun-huaiin/contracts/ai';
3
+
4
+ const streamingHeaders = {
5
+ 'Content-Type': 'text/event-stream; charset=utf-8',
6
+ 'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate, no-transform',
7
+ Connection: 'keep-alive',
8
+ Pragma: 'no-cache',
9
+ 'X-Accel-Buffering': 'no',
10
+ };
11
+ function encodeEvent(event) {
12
+ return `data: ${JSON.stringify(event)}\n\n`;
13
+ }
14
+ function createStreamResponse(events) {
15
+ const encoder = new TextEncoder();
16
+ const stream = new ReadableStream({
17
+ start(controller) {
18
+ for (const event of events) {
19
+ controller.enqueue(encoder.encode(encodeEvent(event)));
20
+ }
21
+ controller.close();
22
+ },
23
+ });
24
+ return new Response(stream, {
25
+ headers: streamingHeaders,
26
+ });
27
+ }
28
+ function createSimpleMockHandler(text) {
29
+ return (context) => {
30
+ const messageId = `mock-${context.requestId}`;
31
+ return createStreamResponse([
32
+ {
33
+ type: 'message_started',
34
+ messageId,
35
+ createdAt: Date.now(),
36
+ },
37
+ {
38
+ type: 'text_delta',
39
+ messageId,
40
+ text,
41
+ },
42
+ {
43
+ type: 'message_completed',
44
+ messageId,
45
+ createdAt: Date.now(),
46
+ },
47
+ ]);
48
+ };
49
+ }
50
+ function createErrorMockResponse(statusCode, message) {
51
+ return Response.json(createAIErrorPayload({
52
+ message,
53
+ upstreamStatusCode: statusCode,
54
+ }), { status: statusCode });
55
+ }
56
+ function getMockScenario(mockType, mockTimeoutMs) {
57
+ switch (mockType) {
58
+ case 1:
59
+ return {
60
+ mode: 'text_stream',
61
+ initialDelayMs: mockTimeoutMs,
62
+ };
63
+ case 2:
64
+ return {
65
+ mode: 'text_stream',
66
+ immediateErrorType: 'timeout',
67
+ };
68
+ case 3:
69
+ return {
70
+ mode: 'text_stream',
71
+ streamFailureType: 'timeout',
72
+ streamFailureAfterChunks: 3,
73
+ };
74
+ case 4:
75
+ return {
76
+ mode: 'text_stream',
77
+ streamFailureType: 'request_aborted',
78
+ streamFailureAfterChunks: 3,
79
+ };
80
+ case 5:
81
+ return {
82
+ mode: 'text_stream',
83
+ streamFailureType: 'stream_error',
84
+ streamFailureAfterChunks: 3,
85
+ };
86
+ case 6:
87
+ return {
88
+ mode: 'event_sequence',
89
+ };
90
+ case 7:
91
+ return {
92
+ mode: 'event_sequence',
93
+ };
94
+ default:
95
+ return {
96
+ mode: 'text_stream',
97
+ };
98
+ }
99
+ }
100
+ function sleep(delayInMs) {
101
+ return __awaiter(this, void 0, void 0, function* () {
102
+ yield new Promise((resolve) => setTimeout(resolve, delayInMs));
103
+ });
104
+ }
105
+ function createMockErrorPayload(failureType) {
106
+ if (failureType === 'timeout') {
107
+ return createAIErrorPayload({
108
+ message: 'Request timed out',
109
+ upstreamStatusCode: 408,
110
+ });
111
+ }
112
+ if (failureType === 'request_aborted') {
113
+ return createAIErrorPayload({
114
+ message: 'Request aborted',
115
+ upstreamStatusCode: 499,
116
+ });
117
+ }
118
+ return createAIErrorPayload({
119
+ message: 'Error communicating with AI',
120
+ upstreamStatusCode: 502,
121
+ failureReason: 'stream_error',
122
+ });
123
+ }
124
+ function createMockFailureResponse(failureType) {
125
+ var _a;
126
+ const payload = createMockErrorPayload(failureType);
127
+ return Response.json(payload, { status: (_a = payload.upstreamStatusCode) !== null && _a !== void 0 ? _a : 500 });
128
+ }
129
+ function chunkTextByWords(text, chunkSize) {
130
+ var _a;
131
+ const wordChunks = (_a = text.match(/\S+\s*/g)) !== null && _a !== void 0 ? _a : [text];
132
+ const normalizedChunkSize = Math.max(1, chunkSize);
133
+ const chunks = [];
134
+ for (let index = 0; index < wordChunks.length; index += normalizedChunkSize) {
135
+ chunks.push(wordChunks.slice(index, index + normalizedChunkSize).join(''));
136
+ }
137
+ return chunks;
138
+ }
139
+ function createConfigurableMockHandler(options) {
140
+ return (context) => __awaiter(this, void 0, void 0, function* () {
141
+ var _a, _b, _c, _d;
142
+ if (((_a = options.initialDelayMs) !== null && _a !== void 0 ? _a : 0) > 0) {
143
+ yield sleep(options.initialDelayMs);
144
+ }
145
+ const messageId = `mock-${context.requestId}`;
146
+ const chunks = chunkTextByWords(options.text, (_b = options.chunkSize) !== null && _b !== void 0 ? _b : 4);
147
+ const chunkDelayMs = Math.max(0, (_c = options.chunkDelayMs) !== null && _c !== void 0 ? _c : 0);
148
+ const failureAfterChunks = (_d = options.streamFailureAfterChunks) !== null && _d !== void 0 ? _d : 0;
149
+ const encoder = new TextEncoder();
150
+ const stream = new ReadableStream({
151
+ start(controller) {
152
+ return __awaiter(this, void 0, void 0, function* () {
153
+ controller.enqueue(encoder.encode(encodeEvent({
154
+ type: 'message_started',
155
+ messageId,
156
+ createdAt: Date.now(),
157
+ })));
158
+ for (let index = 0; index < chunks.length; index += 1) {
159
+ if (options.streamFailureType &&
160
+ failureAfterChunks > 0 &&
161
+ index >= failureAfterChunks) {
162
+ controller.enqueue(encoder.encode(encodeEvent({
163
+ type: 'error',
164
+ error: createMockErrorPayload(options.streamFailureType),
165
+ })));
166
+ controller.close();
167
+ return;
168
+ }
169
+ controller.enqueue(encoder.encode(encodeEvent({
170
+ type: 'text_delta',
171
+ messageId,
172
+ text: chunks[index],
173
+ })));
174
+ if (chunkDelayMs > 0) {
175
+ yield sleep(chunkDelayMs);
176
+ }
177
+ }
178
+ controller.enqueue(encoder.encode(encodeEvent({
179
+ type: 'message_completed',
180
+ messageId,
181
+ createdAt: Date.now(),
182
+ })));
183
+ controller.close();
184
+ });
185
+ },
186
+ });
187
+ return new Response(stream, {
188
+ headers: streamingHeaders,
189
+ });
190
+ });
191
+ }
192
+ function createEventSequenceMockHandler(events) {
193
+ return () => __awaiter(this, void 0, void 0, function* () { return createStreamResponse(events); });
194
+ }
195
+ function createMarkdownShowcaseEvents(messageId) {
196
+ return [
197
+ {
198
+ type: 'message_started',
199
+ messageId,
200
+ createdAt: Date.now(),
201
+ },
202
+ {
203
+ type: 'text_delta',
204
+ messageId,
205
+ text: [
206
+ '# Markdown Showcase',
207
+ '',
208
+ 'This scenario verifies headings, lists, quotes, tables, code, and image rendering in the chat message body.',
209
+ '',
210
+ '## Bullet List',
211
+ '',
212
+ '- Bullet list item one',
213
+ '- Bullet list item two',
214
+ '',
215
+ '## Quote',
216
+ '',
217
+ '> Blockquote content for layout verification.',
218
+ '',
219
+ '## Table',
220
+ '',
221
+ '| Column | Value |',
222
+ '| --- | --- |',
223
+ '| Status | OK |',
224
+ '| Mode | Markdown |',
225
+ '',
226
+ '## Code',
227
+ '',
228
+ '```ts',
229
+ "const mode = 'markdown-showcase';",
230
+ 'console.log(mode);',
231
+ '```',
232
+ '',
233
+ '## Image',
234
+ '',
235
+ '![Mock Image](https://r2.d8ger.com/default.webp)',
236
+ ].join('\n'),
237
+ },
238
+ {
239
+ type: 'message_completed',
240
+ messageId,
241
+ createdAt: Date.now(),
242
+ },
243
+ ];
244
+ }
245
+ function createTrophyCardShowcaseEvents(messageId) {
246
+ return [
247
+ {
248
+ type: 'message_started',
249
+ messageId,
250
+ createdAt: Date.now(),
251
+ },
252
+ {
253
+ type: 'text_delta',
254
+ messageId,
255
+ text: [
256
+ '# Trophy Card Showcase',
257
+ '',
258
+ 'This scenario verifies a structured chat part rendered between normal markdown blocks.',
259
+ '',
260
+ 'The card below is emitted as a dedicated `trophy_card` part, not as Markdown component syntax.',
261
+ ].join('\n'),
262
+ },
263
+ {
264
+ type: 'part',
265
+ messageId,
266
+ part: {
267
+ type: 'trophy_card',
268
+ title: 'Structured Trophy Card',
269
+ description: 'This is rendered from `MessagePart`, which is the recommended path for chat-specific rich blocks.\n\n- Reusable shared React component\n- Chat-specific structured part\n- Ready to extend to file, audio, and video cards',
270
+ },
271
+ },
272
+ {
273
+ type: 'text_delta',
274
+ messageId,
275
+ text: '\n\nUse this as the reference pattern for future file, audio, video, or tool result parts.',
276
+ },
277
+ {
278
+ type: 'message_completed',
279
+ messageId,
280
+ createdAt: Date.now(),
281
+ },
282
+ ];
283
+ }
284
+ function createScenarioMockHandler(params) {
285
+ const scenario = getMockScenario(params.mockType, params.mockTimeoutSeconds * 1000);
286
+ const messageId = `mock-scenario-${params.mockType}`;
287
+ if (scenario.immediateErrorType) {
288
+ return () => __awaiter(this, void 0, void 0, function* () { return createMockFailureResponse(scenario.immediateErrorType); });
289
+ }
290
+ if (scenario.mode === 'event_sequence') {
291
+ if (params.mockType === 6) {
292
+ return createEventSequenceMockHandler(createMarkdownShowcaseEvents(messageId));
293
+ }
294
+ if (params.mockType === 7) {
295
+ return createEventSequenceMockHandler(createTrophyCardShowcaseEvents(messageId));
296
+ }
297
+ }
298
+ return createConfigurableMockHandler({
299
+ text: params.text,
300
+ initialDelayMs: scenario.initialDelayMs,
301
+ chunkDelayMs: params.mockStreamChunkDelayMs,
302
+ chunkSize: params.mockStreamChunkSize,
303
+ streamFailureType: scenario.streamFailureType,
304
+ streamFailureAfterChunks: scenario.streamFailureAfterChunks,
305
+ });
306
+ }
307
+
308
+ export { createConfigurableMockHandler, createErrorMockResponse, createScenarioMockHandler, createSimpleMockHandler, getMockScenario };
@@ -0,0 +1,12 @@
1
+ import type { OpenRouterClientConfig, OpenRouterRequestBody, OpenRouterStreamResult } from './types';
2
+ export declare function callOpenRouterStream(config: OpenRouterClientConfig, body: OpenRouterRequestBody, signal: AbortSignal): Promise<OpenRouterStreamResult>;
3
+ export declare function guardedOpenRouterStreamStart(response: Response): Promise<{
4
+ ok: false;
5
+ error: import("@windrun-huaiin/contracts/ai").AIErrorPayload;
6
+ stream?: undefined;
7
+ } | {
8
+ ok: true;
9
+ stream: ReadableStream<Uint8Array<ArrayBufferLike>>;
10
+ error?: undefined;
11
+ }>;
12
+ //# sourceMappingURL=openrouter-client.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openrouter-client.d.ts","sourceRoot":"","sources":["../../../src/services/ai/openrouter-client.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,sBAAsB,EACtB,qBAAqB,EACrB,sBAAsB,EACvB,MAAM,SAAS,CAAC;AAIjB,wBAAsB,oBAAoB,CACxC,MAAM,EAAE,sBAAsB,EAC9B,IAAI,EAAE,qBAAqB,EAC3B,MAAM,EAAE,WAAW,GAClB,OAAO,CAAC,sBAAsB,CAAC,CAiCjC;AAED,wBAAsB,4BAA4B,CAChD,QAAQ,EAAE,QAAQ;;;;;;;;GA4CnB"}