@astro-minimax/ai 0.2.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/LICENSE +21 -0
  2. package/dist/intelligence/citation-guard.d.ts +7 -0
  3. package/dist/intelligence/citation-guard.d.ts.map +1 -1
  4. package/dist/intelligence/citation-guard.js +57 -13
  5. package/dist/intelligence/index.d.ts +4 -2
  6. package/dist/intelligence/index.d.ts.map +1 -1
  7. package/dist/intelligence/index.js +2 -2
  8. package/dist/intelligence/intent-detect.d.ts +12 -1
  9. package/dist/intelligence/intent-detect.d.ts.map +1 -1
  10. package/dist/intelligence/intent-detect.js +67 -0
  11. package/dist/prompt/dynamic-layer.d.ts.map +1 -1
  12. package/dist/prompt/dynamic-layer.js +30 -7
  13. package/dist/prompt/static-layer.d.ts.map +1 -1
  14. package/dist/prompt/static-layer.js +73 -3
  15. package/dist/prompt/types.d.ts +1 -0
  16. package/dist/prompt/types.d.ts.map +1 -1
  17. package/dist/provider-manager/openai.d.ts.map +1 -1
  18. package/dist/provider-manager/openai.js +40 -11
  19. package/dist/server/chat-handler.d.ts.map +1 -1
  20. package/dist/server/chat-handler.js +64 -241
  21. package/dist/server/dev-server.js +0 -0
  22. package/dist/server/index.d.ts +1 -0
  23. package/dist/server/index.d.ts.map +1 -1
  24. package/dist/server/index.js +1 -0
  25. package/dist/server/notify.d.ts.map +1 -1
  26. package/dist/server/notify.js +13 -1
  27. package/dist/server/stream-helpers.d.ts +45 -0
  28. package/dist/server/stream-helpers.d.ts.map +1 -0
  29. package/dist/server/stream-helpers.js +197 -0
  30. package/dist/server/types.d.ts +1 -0
  31. package/dist/server/types.d.ts.map +1 -1
  32. package/dist/utils/i18n.d.ts +1 -1
  33. package/dist/utils/i18n.d.ts.map +1 -1
  34. package/dist/utils/i18n.js +8 -0
  35. package/package.json +36 -13
@@ -1,12 +1,37 @@
1
1
  import { createUIMessageStream, createUIMessageStreamResponse, streamText, convertToModelMessages, } from 'ai';
2
2
  import { t, getLang } from '../utils/i18n.js';
3
- import { getClientIP, checkRateLimit, rateLimitResponse, searchArticles, searchProjects, getSessionCacheKey, getCachedContext, setCachedContext, shouldReuseSearchContext, buildLocalSearchQuery, shouldRunKeywordExtraction, extractSearchKeywords, KEYWORD_EXTRACTION_TIMEOUT_MS, shouldSkipAnalysis, analyzeRetrievedEvidence, buildEvidenceSection, EVIDENCE_ANALYSIS_TIMEOUT_MS, getCitationGuardPreflight, buildSystemPrompt, getAuthorContext, getVoiceProfile, mergeResults, ProviderManager, createCacheAdapter, detectPublicQuestion, getGlobalSearchCache, setGlobalSearchCache, getGlobalCacheTTL, getResponseCache, setResponseCache, getResponseCacheConfig, createResponsePlaybackGenerator, } from '../index.js';
3
+ import { getClientIP, checkRateLimit, rateLimitResponse, searchArticles, searchProjects, getSessionCacheKey, getCachedContext, setCachedContext, shouldReuseSearchContext, buildLocalSearchQuery, shouldRunKeywordExtraction, extractSearchKeywords, KEYWORD_EXTRACTION_TIMEOUT_MS, shouldSkipAnalysis, analyzeRetrievedEvidence, buildEvidenceSection, EVIDENCE_ANALYSIS_TIMEOUT_MS, getCitationGuardPreflight, buildSystemPrompt, getAuthorContext, getVoiceProfile, mergeResults, getProviderManager, createCacheAdapter, detectPublicQuestion, getGlobalSearchCache, setGlobalSearchCache, getGlobalCacheTTL, getResponseCache, setResponseCache, getResponseCacheConfig, rankArticlesByIntent, } from '../index.js';
4
4
  import { createChatStatusData } from './types.js';
5
5
  import { errors, corsPreflightResponse } from './errors.js';
6
6
  import { notifyAiChat } from './notify.js';
7
+ import { writeSearchStatus, writeGeneratingStatus, writeSourceArticles, streamLLMResponse, streamMockFallback, streamCachedResponse, } from './stream-helpers.js';
7
8
  const MAX_HISTORY_MESSAGES = 20;
8
9
  const MAX_INPUT_LENGTH = 500;
9
10
  const REQUEST_TIMEOUT_MS = 45_000;
11
+ function sendNotification(args) {
12
+ const { env, messages, responseText, relatedArticles, model, usage, timing, cacheKey, waitUntil } = args;
13
+ const sessionId = cacheKey || `dev-${Date.now().toString(36)}`;
14
+ const notifyArticles = relatedArticles.slice(0, 5).map(a => ({
15
+ title: a.title,
16
+ url: a.url,
17
+ }));
18
+ const notifyPromise = notifyAiChat({
19
+ env,
20
+ sessionId,
21
+ messages,
22
+ aiResponse: responseText,
23
+ referencedArticles: notifyArticles,
24
+ model,
25
+ usage,
26
+ timing,
27
+ });
28
+ if (waitUntil) {
29
+ waitUntil(notifyPromise);
30
+ }
31
+ else {
32
+ void notifyPromise;
33
+ }
34
+ }
10
35
  // ── Message Helpers ───────────────────────────────────────────
11
36
  function getMessageText(message) {
12
37
  if (Array.isArray(message.parts)) {
@@ -64,7 +89,7 @@ function buildArticleContextPrompt(context) {
64
89
  }
65
90
  // ── Main Handler ──────────────────────────────────────────────
66
91
  export async function handleChatRequest(options) {
67
- const { env, request: req } = options;
92
+ const { env, request: req, waitUntil } = options;
68
93
  if (req.method === 'OPTIONS')
69
94
  return corsPreflightResponse();
70
95
  if (req.method !== 'POST')
@@ -97,7 +122,7 @@ export async function handleChatRequest(options) {
97
122
  const requestAbort = new AbortController();
98
123
  const requestTimer = setTimeout(() => requestAbort.abort(), REQUEST_TIMEOUT_MS);
99
124
  try {
100
- return await runPipeline({ env, messages, latestText, context, req, requestAbort, lang });
125
+ return await runPipeline({ env, messages, latestText, context, req, requestAbort, lang, waitUntil });
101
126
  }
102
127
  catch (err) {
103
128
  if (requestAbort.signal.aborted)
@@ -110,11 +135,11 @@ export async function handleChatRequest(options) {
110
135
  }
111
136
  }
112
137
  async function runPipeline(args) {
113
- const { env, messages, latestText, context, req, lang } = args;
138
+ const { env, messages, latestText, context, req, lang, waitUntil } = args;
114
139
  const timing = { start: Date.now() };
115
140
  const cache = createCacheAdapter(env);
116
141
  const responseCacheConfig = getResponseCacheConfig(env);
117
- const manager = new ProviderManager(env, {
142
+ const manager = getProviderManager(env, {
118
143
  enableMockFallback: true,
119
144
  unhealthyThreshold: 3,
120
145
  healthRecoveryTTL: 60_000,
@@ -136,93 +161,14 @@ async function runPipeline(args) {
136
161
  if (cachedResponse) {
137
162
  globalCacheHit = true;
138
163
  globalCacheType = publicQuestion.type;
164
+ const notifyTiming = { total: Date.now() - timing.start };
165
+ sendNotification({ env, messages, responseText: cachedResponse.response, relatedArticles: cachedResponse.articles, timing: notifyTiming, waitUntil });
139
166
  const stream = createUIMessageStream({
140
167
  execute: async ({ writer }) => {
141
- writer.write({
142
- type: 'message-metadata',
143
- messageMetadata: createChatStatusData({
144
- stage: 'search',
145
- message: t('ai.status.found', lang, { count: cachedResponse.articles.length + cachedResponse.projects.length }),
146
- progress: 40,
147
- }),
148
- });
149
- writer.write({
150
- type: 'message-metadata',
151
- messageMetadata: createChatStatusData({
152
- stage: 'answer',
153
- message: t('ai.status.generating', lang),
154
- progress: 60,
155
- }),
156
- });
157
- for (const article of cachedResponse.articles.slice(0, 3)) {
158
- try {
159
- writer.write({
160
- type: 'source-url',
161
- sourceId: `source-${article.title}`,
162
- url: article.url ?? '#',
163
- title: article.title,
164
- });
165
- }
166
- catch { /* best-effort */ }
167
- }
168
- writer.write({
169
- type: 'message-metadata',
170
- messageMetadata: createChatStatusData({
171
- stage: 'answer',
172
- message: t('ai.status.generating', lang),
173
- progress: 70,
174
- }),
175
- });
176
- const playbackGenerator = createResponsePlaybackGenerator(cachedResponse, responseCacheConfig);
177
- let hasThinking = !!cachedResponse.thinking;
178
- let thinkingId;
179
- const textId = `text-${Date.now()}`;
180
- let textStarted = false;
181
- for await (const chunk of playbackGenerator) {
182
- if (chunk.type === 'thinking') {
183
- if (!thinkingId) {
184
- thinkingId = `thinking-${Date.now()}`;
185
- writer.write({ type: 'reasoning-start', id: thinkingId });
186
- }
187
- writer.write({ type: 'reasoning-delta', id: thinkingId, delta: chunk.text });
188
- }
189
- else {
190
- if (thinkingId) {
191
- writer.write({ type: 'reasoning-end', id: thinkingId });
192
- thinkingId = undefined;
193
- }
194
- if (!textStarted) {
195
- writer.write({ type: 'text-start', id: textId });
196
- textStarted = true;
197
- }
198
- writer.write({ type: 'text-delta', id: textId, delta: chunk.text });
199
- }
200
- }
201
- if (thinkingId) {
202
- writer.write({ type: 'reasoning-end', id: thinkingId });
203
- }
204
- if (textStarted) {
205
- writer.write({ type: 'text-end', id: textId });
206
- }
207
- writer.write({
208
- type: 'message-metadata',
209
- messageMetadata: createChatStatusData({
210
- stage: 'answer',
211
- message: t('ai.status.generating', lang),
212
- progress: 100,
213
- done: true,
214
- }),
215
- });
216
- writer.write({ type: 'finish', finishReason: 'stop' });
217
- },
218
- });
219
- return createUIMessageStreamResponse({
220
- stream,
221
- headers: {
222
- 'Access-Control-Allow-Origin': '*',
223
- 'Cache-Control': 'no-cache',
168
+ await streamCachedResponse(writer, cachedResponse, responseCacheConfig, lang);
224
169
  },
225
170
  });
171
+ return createUIMessageStreamResponse({ stream, headers: { 'Access-Control-Allow-Origin': '*', 'Cache-Control': 'no-cache' } });
226
172
  }
227
173
  }
228
174
  // Check search context cache
@@ -232,159 +178,37 @@ async function runPipeline(args) {
232
178
  globalCacheType = publicQuestion.type;
233
179
  const stream = createUIMessageStream({
234
180
  execute: async ({ writer }) => {
235
- writer.write({
236
- type: 'message-metadata',
237
- messageMetadata: createChatStatusData({
238
- stage: 'search',
239
- message: t('ai.status.found', lang, { count: cachedSearch.articles.length + cachedSearch.projects.length }),
240
- progress: 40,
241
- }),
242
- });
243
- const articleCount = cachedSearch.articles.length + cachedSearch.projects.length;
244
- if (articleCount > 0) {
245
- writer.write({
246
- type: 'message-metadata',
247
- messageMetadata: createChatStatusData({
248
- stage: 'answer',
249
- message: t('ai.status.generating', lang),
250
- progress: 60,
251
- }),
252
- });
253
- }
254
- for (const article of cachedSearch.articles.slice(0, 3)) {
255
- try {
256
- writer.write({
257
- type: 'source-url',
258
- sourceId: `source-${article.title}`,
259
- url: article.url ?? '#',
260
- title: article.title,
261
- });
262
- }
263
- catch { /* best-effort */ }
181
+ const w = writer;
182
+ writeSearchStatus(w, cachedSearch.articles.length + cachedSearch.projects.length, lang);
183
+ if (cachedSearch.articles.length + cachedSearch.projects.length > 0) {
184
+ writeGeneratingStatus(w, lang);
264
185
  }
186
+ writeSourceArticles(w, cachedSearch.articles);
265
187
  let responseText = '';
266
188
  if (adapter) {
267
- try {
268
- const provider = adapter.getProvider();
269
- const articlePrompt = buildArticleContextPrompt(context);
270
- const systemPrompt = buildSystemPrompt({
271
- static: {
272
- authorName: env.SITE_AUTHOR || '博主',
273
- siteUrl: env.SITE_URL || '',
274
- lang,
275
- },
276
- semiStatic: {
277
- authorContext: getAuthorContext(),
278
- voiceProfile: getVoiceProfile(),
279
- },
280
- dynamic: {
281
- userQuery: cachedSearch.query,
282
- articles: cachedSearch.articles,
283
- projects: cachedSearch.projects,
284
- evidenceSection: articlePrompt,
285
- },
286
- });
287
- const result = streamText({
288
- model: provider.chatModel(adapter.model),
289
- system: systemPrompt,
290
- messages: await convertToModelMessages(messages),
291
- temperature: 0.3,
292
- maxOutputTokens: 2500,
293
- });
294
- const streamErrors = [];
295
- writer.merge(result.toUIMessageStream({ sendFinish: false }));
296
- await result.consumeStream({
297
- onError: (error) => {
298
- streamErrors.push(error instanceof Error ? error : new Error(String(error)));
299
- },
300
- });
301
- const text = await result.text;
302
- const reasoningPromise = result.reasoning;
303
- let reasoningText;
304
- if (reasoningPromise) {
305
- try {
306
- const reasoningOutput = await Promise.resolve(reasoningPromise);
307
- reasoningText = typeof reasoningOutput === 'string' ? reasoningOutput :
308
- (Array.isArray(reasoningOutput) ? reasoningOutput.map((r) => {
309
- if (typeof r === 'object' && r !== null && 'text' in r)
310
- return r.text;
311
- return String(r);
312
- }).join('') : undefined);
313
- }
314
- catch { /* reasoning is optional */ }
315
- }
316
- responseText = text;
317
- if (streamErrors.length > 0) {
318
- adapter.recordFailure(streamErrors[0]);
319
- const errorId = `error-${Date.now()}`;
320
- writer.write({ type: 'text-start', id: errorId });
321
- writer.write({
322
- type: 'text-delta',
323
- id: errorId,
324
- delta: t('ai.error.generic', lang)
325
- });
326
- writer.write({ type: 'text-end', id: errorId });
327
- writer.write({ type: 'finish', finishReason: 'error' });
328
- }
329
- else if (text.length > 0) {
330
- adapter.recordSuccess();
331
- writer.write({ type: 'finish', finishReason: 'stop' });
332
- }
333
- else {
334
- const noOutputId = `no-output-${Date.now()}`;
335
- writer.write({ type: 'text-start', id: noOutputId });
336
- writer.write({ type: 'text-delta', id: noOutputId, delta: t('ai.error.noOutput', lang) });
337
- writer.write({ type: 'text-end', id: noOutputId });
338
- writer.write({ type: 'finish', finishReason: 'stop' });
339
- }
340
- // Save to response cache if enabled
341
- if (responseCacheConfig.enabled && text.length > 0 && streamErrors.length === 0) {
342
- const globalTTL = getGlobalCacheTTL(publicQuestion.type);
343
- const responseCacheData = {
344
- query: cachedSearch.query,
345
- thinking: reasoningText,
346
- response: text,
347
- articles: cachedSearch.articles,
348
- projects: cachedSearch.projects,
349
- lang,
350
- model: adapter.model,
351
- updatedAt: Date.now(),
352
- };
353
- await setResponseCache(cache, publicQuestion.type, responseCacheData, globalTTL, globalCacheContext);
354
- }
355
- }
356
- catch (err) {
357
- console.error('[chat-handler] Global cache LLM error:', err);
358
- const errorId = `error-${Date.now()}`;
359
- writer.write({ type: 'text-start', id: errorId });
360
- writer.write({
361
- type: 'text-delta',
362
- id: errorId,
363
- delta: t('ai.error.generic', lang)
364
- });
365
- writer.write({ type: 'text-end', id: errorId });
366
- writer.write({ type: 'finish', finishReason: 'error' });
189
+ const articlePrompt = buildArticleContextPrompt(context);
190
+ const systemPrompt = buildSystemPrompt({
191
+ static: { authorName: env.SITE_AUTHOR || '博主', siteUrl: env.SITE_URL || '', lang },
192
+ semiStatic: { authorContext: getAuthorContext(), voiceProfile: getVoiceProfile() },
193
+ dynamic: { userQuery: cachedSearch.query, articles: cachedSearch.articles, projects: cachedSearch.projects, evidenceSection: articlePrompt },
194
+ });
195
+ const llmResult = await streamLLMResponse({ writer: w, adapter, systemPrompt, messages, lang });
196
+ responseText = llmResult.responseText;
197
+ if (responseCacheConfig.enabled && llmResult.success && llmResult.responseText.length > 0) {
198
+ const globalTTL = getGlobalCacheTTL(publicQuestion.type);
199
+ const responseCacheData = {
200
+ query: cachedSearch.query, thinking: llmResult.reasoningText, response: llmResult.responseText,
201
+ articles: cachedSearch.articles, projects: cachedSearch.projects, lang, model: adapter.model, updatedAt: Date.now(),
202
+ };
203
+ await setResponseCache(cache, publicQuestion.type, responseCacheData, globalTTL, globalCacheContext);
367
204
  }
368
205
  }
369
206
  else {
370
- const { getMockResponse } = await import('../providers/mock.js');
371
- const mockText = getMockResponse(latestText, lang);
372
- responseText = mockText;
373
- const mockId = `mock-${Date.now()}`;
374
- writer.write({ type: 'text-start', id: mockId });
375
- writer.write({ type: 'text-delta', id: mockId, delta: mockText });
376
- writer.write({ type: 'text-end', id: mockId });
377
- writer.write({ type: 'finish', finishReason: 'stop' });
207
+ responseText = await streamMockFallback(w, latestText, lang);
378
208
  }
379
209
  },
380
210
  });
381
- return createUIMessageStreamResponse({
382
- stream,
383
- headers: {
384
- 'Access-Control-Allow-Origin': '*',
385
- 'Cache-Control': 'no-cache',
386
- },
387
- });
211
+ return createUIMessageStreamResponse({ stream, headers: { 'Access-Control-Allow-Origin': '*', 'Cache-Control': 'no-cache' } });
388
212
  }
389
213
  }
390
214
  // ── Search / Retrieval ──────────────────────────────────────
@@ -445,6 +269,7 @@ async function runPipeline(args) {
445
269
  relatedProjects = searchProjects(searchQuery);
446
270
  timing.search = Date.now() - searchStart;
447
271
  }
272
+ relatedArticles = rankArticlesByIntent(latestText, relatedArticles);
448
273
  if (cacheKey) {
449
274
  await setCachedContext(cacheKey, {
450
275
  query: searchQuery,
@@ -499,6 +324,7 @@ async function runPipeline(args) {
499
324
  userQuery: latestText,
500
325
  articles: relatedArticles,
501
326
  projects: relatedProjects,
327
+ lang,
502
328
  });
503
329
  // ── Build System Prompt ─────────────────────────────────────
504
330
  const articlePrompt = buildArticleContextPrompt(context);
@@ -519,6 +345,7 @@ async function runPipeline(args) {
519
345
  evidenceSection: articlePrompt
520
346
  ? `${evidenceSection}\n${articlePrompt}`
521
347
  : evidenceSection,
348
+ lang,
522
349
  },
523
350
  });
524
351
  // ── Stream Response via createUIMessageStream ───────────────
@@ -722,20 +549,16 @@ async function runPipeline(args) {
722
549
  provider: env.AI_PROVIDER || undefined,
723
550
  apiHost: env.AI_BASE_URL || undefined,
724
551
  } : undefined;
725
- const notifyArticles = relatedArticles.slice(0, 5).map(a => ({
726
- title: a.title,
727
- url: a.url,
728
- }));
729
- const sessionId = cacheKey || `dev-${Date.now().toString(36)}`;
730
- void notifyAiChat({
552
+ sendNotification({
731
553
  env,
732
- sessionId,
733
554
  messages,
734
- aiResponse: responseText,
735
- referencedArticles: notifyArticles,
555
+ responseText,
556
+ relatedArticles,
736
557
  model: notifyModel,
737
558
  usage: tokenUsage,
738
559
  timing: notifyTiming,
560
+ cacheKey,
561
+ waitUntil,
739
562
  });
740
563
  }
741
564
  },
File without changes
@@ -3,6 +3,7 @@ export { initializeMetadata, resetMetadataInit } from './metadata-init.js';
3
3
  export { errors, corsPreflightResponse, chatError } from './errors.js';
4
4
  export { notifyAiChat } from './notify.js';
5
5
  export type { ChatNotifyOptions } from './notify.js';
6
+ export { writeSearchStatus, writeGeneratingStatus, writeDoneStatus, writeSourceArticles, writeTextChunk, writeFinish, streamLLMResponse, streamMockFallback, streamCachedResponse, } from './stream-helpers.js';
6
7
  export { createChatStatusData, isChatStatusData, } from './types.js';
7
8
  export type { ChatContext, ArticleChatContext, ChatRequestBody, ChatHandlerEnv, ChatHandlerOptions, ChatStatusData, ChatStatusStage, ChatErrorResponse, MetadataConfig, } from './types.js';
8
9
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,mBAAmB,CAAC;AACtD,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAC3E,OAAO,EAAE,MAAM,EAAE,qBAAqB,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACvE,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAC3C,YAAY,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACrD,OAAO,EACL,oBAAoB,EACpB,gBAAgB,GACjB,MAAM,YAAY,CAAC;AACpB,YAAY,EACV,WAAW,EACX,kBAAkB,EAClB,eAAe,EACf,cAAc,EACd,kBAAkB,EAClB,cAAc,EACd,eAAe,EACf,iBAAiB,EACjB,cAAc,GACf,MAAM,YAAY,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,mBAAmB,CAAC;AACtD,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAC3E,OAAO,EAAE,MAAM,EAAE,qBAAqB,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACvE,OAAO,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAC3C,YAAY,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACrD,OAAO,EACL,iBAAiB,EACjB,qBAAqB,EACrB,eAAe,EACf,mBAAmB,EACnB,cAAc,EACd,WAAW,EACX,iBAAiB,EACjB,kBAAkB,EAClB,oBAAoB,GACrB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EACL,oBAAoB,EACpB,gBAAgB,GACjB,MAAM,YAAY,CAAC;AACpB,YAAY,EACV,WAAW,EACX,kBAAkB,EAClB,eAAe,EACf,cAAc,EACd,kBAAkB,EAClB,cAAc,EACd,eAAe,EACf,iBAAiB,EACjB,cAAc,GACf,MAAM,YAAY,CAAC"}
@@ -2,4 +2,5 @@ export { handleChatRequest } from './chat-handler.js';
2
2
  export { initializeMetadata, resetMetadataInit } from './metadata-init.js';
3
3
  export { errors, corsPreflightResponse, chatError } from './errors.js';
4
4
  export { notifyAiChat } from './notify.js';
5
+ export { writeSearchStatus, writeGeneratingStatus, writeDoneStatus, writeSourceArticles, writeTextChunk, writeFinish, streamLLMResponse, streamMockFallback, streamCachedResponse, } from './stream-helpers.js';
5
6
  export { createChatStatusData, isChatStatusData, } from './types.js';
@@ -1 +1 @@
1
- {"version":3,"file":"notify.d.ts","sourceRoot":"","sources":["../../src/server/notify.ts"],"names":[],"mappings":"AAAA,OAAO,EAAiC,KAAK,YAAY,EAAE,KAAK,UAAU,EAAE,KAAK,SAAS,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,MAAM,uBAAuB,CAAC;AAC7J,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,IAAI,CAAC;AAEpC,UAAU,SAAS;IACjB,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC,uBAAuB,CAAC,EAAE,MAAM,CAAC;IACjC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACxB;AAuCD,MAAM,WAAW,iBAAiB;IAChC,GAAG,EAAE,SAAS,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,SAAS,EAAE,CAAC;IACtB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,kBAAkB,CAAC,EAAE,UAAU,EAAE,CAAC;IAClC,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,KAAK,CAAC,EAAE,UAAU,CAAC;IACnB,MAAM,CAAC,EAAE,WAAW,CAAC;CACtB;AAED,wBAAgB,YAAY,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC,CAgCrF"}
1
+ {"version":3,"file":"notify.d.ts","sourceRoot":"","sources":["../../src/server/notify.ts"],"names":[],"mappings":"AAAA,OAAO,EAAiC,KAAK,YAAY,EAAE,KAAK,UAAU,EAAE,KAAK,SAAS,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,MAAM,uBAAuB,CAAC;AAC7J,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,IAAI,CAAC;AAEpC,UAAU,SAAS;IACjB,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC,uBAAuB,CAAC,EAAE,MAAM,CAAC;IACjC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACxB;AAiDD,MAAM,WAAW,iBAAiB;IAChC,GAAG,EAAE,SAAS,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,SAAS,EAAE,CAAC;IACtB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,kBAAkB,CAAC,EAAE,UAAU,EAAE,CAAC;IAClC,KAAK,CAAC,EAAE,SAAS,CAAC;IAClB,KAAK,CAAC,EAAE,UAAU,CAAC;IACnB,MAAM,CAAC,EAAE,WAAW,CAAC;CACtB;AAED,wBAAgB,YAAY,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC,CAkCrF"}
@@ -4,8 +4,18 @@ function getNotifier(env) {
4
4
  if (notifierInstance)
5
5
  return notifierInstance;
6
6
  const hasConfig = env.NOTIFY_TELEGRAM_BOT_TOKEN || env.NOTIFY_WEBHOOK_URL || env.NOTIFY_RESEND_API_KEY;
7
- if (!hasConfig)
7
+ if (!hasConfig) {
8
+ console.warn('[notify] No notification providers configured. Missing environment variables: NOTIFY_TELEGRAM_BOT_TOKEN, NOTIFY_WEBHOOK_URL, or NOTIFY_RESEND_API_KEY');
8
9
  return null;
10
+ }
11
+ const providers = [];
12
+ if (env.NOTIFY_TELEGRAM_BOT_TOKEN && env.NOTIFY_TELEGRAM_CHAT_ID)
13
+ providers.push('telegram');
14
+ if (env.NOTIFY_WEBHOOK_URL)
15
+ providers.push('webhook');
16
+ if (env.NOTIFY_RESEND_API_KEY && env.NOTIFY_RESEND_FROM && env.NOTIFY_RESEND_TO)
17
+ providers.push('email');
18
+ console.log(`[notify] Initializing notifier with providers: ${providers.join(', ') || 'none'}`);
9
19
  notifierInstance = createNotifier({
10
20
  telegram: env.NOTIFY_TELEGRAM_BOT_TOKEN && env.NOTIFY_TELEGRAM_CHAT_ID ? {
11
21
  botToken: env.NOTIFY_TELEGRAM_BOT_TOKEN,
@@ -36,11 +46,13 @@ export function notifyAiChat(options) {
36
46
  const { env, sessionId, messages, aiResponse, referencedArticles, model, usage, timing } = options;
37
47
  const notifier = getNotifier(env);
38
48
  if (!notifier) {
49
+ console.warn('[notify] AI chat notification skipped: no notifier available. Check environment variables.');
39
50
  return Promise.resolve(null);
40
51
  }
41
52
  const userMessages = messages.filter(m => m.role === 'user');
42
53
  const lastUserMessage = userMessages[userMessages.length - 1];
43
54
  if (!lastUserMessage) {
55
+ console.warn('[notify] AI chat notification skipped: no user message found in messages array');
44
56
  return Promise.resolve(null);
45
57
  }
46
58
  const userMessage = getMessageText(lastUserMessage);
@@ -0,0 +1,45 @@
1
+ /**
2
+ * Stream helper utilities for chat-handler.
3
+ *
4
+ * Extracts duplicated stream-writing logic into reusable functions,
5
+ * eliminating 34+ `as never` casts and reducing chat-handler.ts size.
6
+ */
7
+ import { type UIMessage } from 'ai';
8
+ import type { TokenUsage } from '@astro-minimax/notify';
9
+ import type { ProviderAdapter } from '../provider-manager/types.js';
10
+ import type { CachedAIResponse, ResponseCacheConfig } from '../cache/response-cache.js';
11
+ type MessageStreamWriter = {
12
+ write: (part: unknown) => void;
13
+ merge: (stream: ReadableStream) => void;
14
+ };
15
+ interface SourceArticle {
16
+ title: string;
17
+ url?: string;
18
+ }
19
+ interface StreamLLMParams {
20
+ writer: MessageStreamWriter;
21
+ adapter: ProviderAdapter;
22
+ systemPrompt: string;
23
+ messages: UIMessage[];
24
+ lang: string;
25
+ temperature?: number;
26
+ maxOutputTokens?: number;
27
+ }
28
+ interface StreamLLMResult {
29
+ success: boolean;
30
+ responseText: string;
31
+ reasoningText?: string;
32
+ tokenUsage?: TokenUsage;
33
+ generationMs: number;
34
+ }
35
+ export declare function writeSearchStatus(writer: MessageStreamWriter, count: number, lang: string): void;
36
+ export declare function writeGeneratingStatus(writer: MessageStreamWriter, lang: string, progress?: number): void;
37
+ export declare function writeDoneStatus(writer: MessageStreamWriter, lang: string): void;
38
+ export declare function writeSourceArticles(writer: MessageStreamWriter, articles: SourceArticle[], max?: number): void;
39
+ export declare function writeTextChunk(writer: MessageStreamWriter, text: string, idPrefix?: string): void;
40
+ export declare function writeFinish(writer: MessageStreamWriter, reason?: 'stop' | 'error'): void;
41
+ export declare function streamLLMResponse(params: StreamLLMParams): Promise<StreamLLMResult>;
42
+ export declare function streamMockFallback(writer: MessageStreamWriter, question: string, lang: string): Promise<string>;
43
+ export declare function streamCachedResponse(writer: MessageStreamWriter, cachedResponse: CachedAIResponse, config: ResponseCacheConfig, lang: string): Promise<void>;
44
+ export {};
45
+ //# sourceMappingURL=stream-helpers.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream-helpers.d.ts","sourceRoot":"","sources":["../../src/server/stream-helpers.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EACL,KAAK,SAAS,EAGf,MAAM,IAAI,CAAC;AAGZ,OAAO,KAAK,EAAyB,UAAU,EAAe,MAAM,uBAAuB,CAAC;AAC5F,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;AACpE,OAAO,KAAK,EACV,gBAAgB,EAChB,mBAAmB,EACpB,MAAM,4BAA4B,CAAC;AAKpC,KAAK,mBAAmB,GAAG;IACzB,KAAK,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,IAAI,CAAC;IAC/B,KAAK,EAAE,CAAC,MAAM,EAAE,cAAc,KAAK,IAAI,CAAC;CACzC,CAAC;AAEF,UAAU,aAAa;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED,UAAU,eAAe;IACvB,MAAM,EAAE,mBAAmB,CAAC;IAC5B,OAAO,EAAE,eAAe,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,SAAS,EAAE,CAAC;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED,UAAU,eAAe;IACvB,OAAO,EAAE,OAAO,CAAC;IACjB,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB,YAAY,EAAE,MAAM,CAAC;CACtB;AAID,wBAAgB,iBAAiB,CAC/B,MAAM,EAAE,mBAAmB,EAC3B,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,MAAM,GACX,IAAI,CASN;AAED,wBAAgB,qBAAqB,CACnC,MAAM,EAAE,mBAAmB,EAC3B,IAAI,EAAE,MAAM,EACZ,QAAQ,SAAK,GACZ,IAAI,CASN;AAED,wBAAgB,eAAe,CAC7B,MAAM,EAAE,mBAAmB,EAC3B,IAAI,EAAE,MAAM,GACX,IAAI,CAUN;AAED,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,mBAAmB,EAC3B,QAAQ,EAAE,aAAa,EAAE,EACzB,GAAG,SAAI,GACN,IAAI,CAWN;AAED,wBAAgB,cAAc,CAC5B,MAAM,EAAE,mBAAmB,EAC3B,IAAI,EAAE,MAAM,EACZ,QAAQ,SAAS,GAChB,IAAI,CAKN;AAED,wBAAgB,WAAW,CACzB,MAAM,EAAE,mBAAmB,EAC3B,MAAM,GAAE,MAAM,GAAG,OAAgB,GAChC,IAAI,CAEN;AAID,wBAAsB,iBAAiB,CACrC,MAAM,EAAE,eAAe,GACtB,OAAO,CAAC,eAAe,CAAC,CA6F1B;AAID,wBAAsB,kBAAkB,CACtC,MAAM,EAAE,mBAAmB,EAC3B,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,GACX,OAAO,CAAC,MAAM,CAAC,CAgBjB;AAID,wBAAsB,oBAAoB,CACxC,MAAM,EAAE,mBAAmB,EAC3B,cAAc,EAAE,gBAAgB,EAChC,MAAM,EAAE,mBAAmB,EAC3B,IAAI,EAAE,MAAM,GACX,OAAO,CAAC,IAAI,CAAC,CAyCf"}