@anyul/koishi-plugin-rss 5.2.1 → 5.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/lib/commands/error-handler.js +2 -5
  2. package/lib/commands/index.d.ts +17 -1
  3. package/lib/commands/index.js +388 -2
  4. package/lib/commands/subscription-edit.d.ts +7 -0
  5. package/lib/commands/subscription-edit.js +177 -0
  6. package/lib/commands/subscription-management.d.ts +12 -0
  7. package/lib/commands/subscription-management.js +176 -0
  8. package/lib/commands/utils.d.ts +13 -1
  9. package/lib/commands/utils.js +43 -2
  10. package/lib/config.js +19 -0
  11. package/lib/core/ai.d.ts +16 -2
  12. package/lib/core/ai.js +73 -6
  13. package/lib/core/feeder.d.ts +1 -1
  14. package/lib/core/feeder.js +238 -125
  15. package/lib/core/item-processor.d.ts +5 -0
  16. package/lib/core/item-processor.js +66 -136
  17. package/lib/core/notification-queue.d.ts +2 -0
  18. package/lib/core/notification-queue.js +80 -33
  19. package/lib/core/parser.js +12 -0
  20. package/lib/core/renderer.d.ts +15 -0
  21. package/lib/core/renderer.js +105 -16
  22. package/lib/index.js +28 -784
  23. package/lib/tsconfig.tsbuildinfo +1 -1
  24. package/lib/types.d.ts +24 -0
  25. package/lib/utils/common.js +52 -3
  26. package/lib/utils/error-handler.d.ts +8 -0
  27. package/lib/utils/error-handler.js +27 -0
  28. package/lib/utils/error-tracker.js +24 -8
  29. package/lib/utils/fetcher.js +68 -9
  30. package/lib/utils/logger.d.ts +4 -2
  31. package/lib/utils/logger.js +144 -6
  32. package/lib/utils/media.js +3 -6
  33. package/lib/utils/sanitizer.d.ts +58 -0
  34. package/lib/utils/sanitizer.js +227 -0
  35. package/lib/utils/security.d.ts +75 -0
  36. package/lib/utils/security.js +312 -0
  37. package/lib/utils/structured-logger.js +3 -20
  38. package/package.json +2 -1
@@ -41,12 +41,27 @@ exports.feeder = feeder;
41
41
  exports.startFeeder = startFeeder;
42
42
  exports.stopFeeder = stopFeeder;
43
43
  const koishi_1 = require("koishi");
44
+ const error_handler_1 = require("../utils/error-handler");
45
+ const error_tracker_1 = require("../utils/error-tracker");
44
46
  const logger_1 = require("../utils/logger");
45
47
  const common_1 = require("../utils/common");
46
48
  const parser_1 = require("./parser");
47
49
  const constants_1 = require("../constants");
48
50
  let interval = null;
49
51
  let queueInterval = null;
52
+ function buildFeedLogContext(rssItem) {
53
+ return {
54
+ subscribeId: String(rssItem.id),
55
+ rssId: rssItem.rssId || rssItem.title,
56
+ rssTitle: rssItem.title,
57
+ url: rssItem.url,
58
+ guildId: rssItem.guildId,
59
+ platform: rssItem.platform,
60
+ };
61
+ }
62
+ function createFeedDebug(config, rssItem) {
63
+ return (0, logger_1.createDebugWithContext)(config, buildFeedLogContext(rssItem));
64
+ }
50
65
  function findRssItem(rssList, keyword) {
51
66
  // 优先匹配列表索引(用户看到的序号 1, 2, 3...)
52
67
  if (typeof keyword === 'number' || /^\d+$/.test(String(keyword))) {
@@ -56,10 +71,14 @@ function findRssItem(rssList, keyword) {
56
71
  }
57
72
  }
58
73
  // 其他匹配方式:按 rssId、url、title 等
59
- let index = ((rssList.findIndex(i => i.rssId === +keyword) + 1) ||
74
+ const index = ((rssList.findIndex(i => i.rssId === +keyword) + 1) ||
60
75
  (rssList.findIndex(i => i.url == keyword) + 1) ||
61
76
  (rssList.findIndex(i => i.url.indexOf(keyword) + 1) + 1) ||
62
77
  (rssList.findIndex(i => i.title.indexOf(keyword) + 1) + 1)) - 1;
78
+ // 边界检查:确保索引有效
79
+ if (index < 0 || index >= rssList.length) {
80
+ return undefined;
81
+ }
63
82
  return rssList[index];
64
83
  }
65
84
  function getLastContent(item, config) {
@@ -69,6 +88,12 @@ function getLastContent(item, config) {
69
88
  }
70
89
  function formatArg(options, config) {
71
90
  let { arg, template, auth } = options;
91
+ const parseArrayArg = (value) => {
92
+ return value
93
+ .split('/')
94
+ .map(item => item.trim())
95
+ .filter(Boolean);
96
+ };
72
97
  // 特殊处理:提取完整的 proxyAgent URL
73
98
  let proxyAgentUrl;
74
99
  if (arg && arg.includes('proxyAgent:')) {
@@ -95,9 +120,9 @@ function formatArg(options, config) {
95
120
  json.forceLength = parseInt(json.forceLength);
96
121
  // Array conversions
97
122
  if (json.filter && typeof json.filter === 'string')
98
- json.filter = json.filter.split("/");
123
+ json.filter = parseArrayArg(json.filter);
99
124
  if (json.block && typeof json.block === 'string')
100
- json.block = json.block.split("/");
125
+ json.block = parseArrayArg(json.block);
101
126
  // Proxy Argument Parsing (使用提取的完整 URL)
102
127
  if (proxyAgentUrl) {
103
128
  if (['false', 'none', ''].includes(String(proxyAgentUrl))) {
@@ -199,6 +224,142 @@ function mixinArg(arg, config) {
199
224
  (0, logger_1.debug)(config, `[DEBUG_PROXY] mixinArg return: ${JSON.stringify(res.proxyAgent)}`, 'mixin', 'details');
200
225
  return res;
201
226
  }
227
+ // ============ 拆分出的辅助函数 ============
228
+ /**
229
+ * 1. 抓取 RSS 数据
230
+ */
231
+ async function fetchRssItems(ctx, config, $http, rssItem, arg, feedDebug) {
232
+ const rssHubUrl = config.msg?.rssHubUrl || 'https://hub.slarker.me';
233
+ try {
234
+ const urls = rssItem.url.split("|").map((u) => (0, common_1.parseQuickUrl)(u, rssHubUrl, constants_1.quickList));
235
+ const fetchPromises = urls.map((url) => (0, parser_1.getRssData)(ctx, config, $http, url, arg));
236
+ const results = await Promise.all(fetchPromises);
237
+ return results.flat(1);
238
+ }
239
+ catch (err) {
240
+ const normalizedError = (0, error_handler_1.normalizeError)(err);
241
+ feedDebug(`Fetch failed for ${rssItem.title}: ${normalizedError.message}`, 'feeder', 'error', {
242
+ stage: 'fetch',
243
+ });
244
+ (0, error_tracker_1.trackError)(normalizedError, {
245
+ ...buildFeedLogContext(rssItem),
246
+ stage: 'fetch',
247
+ });
248
+ return [];
249
+ }
250
+ }
251
+ /**
252
+ * 2. 过滤关键字
253
+ */
254
+ function filterItems(config, items, arg, feedDebug) {
255
+ return items.filter(item => {
256
+ const matchKeyword = arg.filter?.find((keyword) => new RegExp(keyword, 'im').test(item.title) || new RegExp(keyword, 'im').test(item.description));
257
+ if (matchKeyword) {
258
+ feedDebug(`filter:${matchKeyword}`, 'feeder', 'info', { matchedKeyword: matchKeyword });
259
+ feedDebug(item, 'filter rss item', 'info', { matchedKeyword: matchKeyword });
260
+ }
261
+ return !matchKeyword;
262
+ });
263
+ }
264
+ /**
265
+ * 3. 检查更新(时间+内容)
266
+ */
267
+ function checkForUpdates(config, rssItem, items, arg, feedDebug) {
268
+ // 按时间排序
269
+ let itemArray = items
270
+ .sort((a, b) => (0, common_1.parsePubDate)(config, b.pubDate).getTime() - (0, common_1.parsePubDate)(config, a.pubDate).getTime());
271
+ if (itemArray.length === 0) {
272
+ return { newItems: [], latestPubDate: new Date(), currentContent: [] };
273
+ }
274
+ const latestItem = itemArray[0];
275
+ const lastPubDate = (0, common_1.parsePubDate)(config, latestItem.pubDate);
276
+ feedDebug(`${rssItem.title}: Latest item date=${lastPubDate.toISOString()}, DB date=${rssItem.lastPubDate ? new Date(rssItem.lastPubDate).toISOString() : 'none'}`, 'feeder', 'details');
277
+ // 准备去重内容
278
+ const currentContent = config.basic?.resendUpdataContent === 'all'
279
+ ? itemArray.map((i) => getLastContent(i, config))
280
+ : [getLastContent(latestItem, config)];
281
+ // 反转顺序(发送顺序:最早的先发)
282
+ if (arg.reverse) {
283
+ itemArray = itemArray.reverse();
284
+ }
285
+ let rssItemArray = [];
286
+ if (rssItem.arg.forceLength) {
287
+ // 强制长度模式:忽略时间,只取 N 条
288
+ rssItemArray = itemArray.slice(0, rssItem.arg.forceLength);
289
+ feedDebug(`${rssItem.title}: Force length mode, taking ${rssItemArray.length} items`, 'feeder', 'details');
290
+ }
291
+ else {
292
+ // 标准模式:时间 + 内容检查
293
+ feedDebug(`${rssItem.title}: Checking ${itemArray.length} items for updates`, 'feeder', 'details');
294
+ rssItemArray = itemArray.filter((v, i) => {
295
+ const currentItemTime = (0, common_1.parsePubDate)(config, v.pubDate).getTime();
296
+ const lastTime = rssItem.lastPubDate ? (0, common_1.parsePubDate)(config, rssItem.lastPubDate).getTime() : 0;
297
+ feedDebug(`[${i}] ${v.title?.substring(0, 30)}: time=${new Date(currentItemTime).toISOString()} > last=${new Date(lastTime).toISOString()} ? ${currentItemTime > lastTime}`, 'feeder', 'details');
298
+ // 严格时间检查
299
+ if (currentItemTime > lastTime) {
300
+ feedDebug(`[${i}] ✓ Item is new (time check)`, 'feeder', 'details');
301
+ return true;
302
+ }
303
+ // 内容哈希检查(时间相同但内容变化)
304
+ if (config.basic?.resendUpdataContent !== 'disable') {
305
+ const newItemContent = getLastContent(v, config);
306
+ const oldItemMatch = rssItem.lastContent?.itemArray?.find((old) => (newItemContent.guid && old.guid === newItemContent.guid) ||
307
+ (old.link === newItemContent.link && old.title === newItemContent.title));
308
+ if (oldItemMatch) {
309
+ const descriptionChanged = JSON.stringify(oldItemMatch.description) !== JSON.stringify(newItemContent.description);
310
+ if (descriptionChanged) {
311
+ feedDebug(`[${i}] ✓ Item is updated (content changed)`, 'feeder', 'details');
312
+ }
313
+ else {
314
+ feedDebug(`[${i}] ✗ Item filtered (already sent)`, 'feeder', 'details');
315
+ }
316
+ return descriptionChanged;
317
+ }
318
+ else {
319
+ feedDebug(`[${i}] ✗ Item filtered (no match in lastContent)`, 'feeder', 'details');
320
+ }
321
+ }
322
+ feedDebug(`[${i}] ✗ Item filtered (failed all checks)`, 'feeder', 'details');
323
+ return false;
324
+ });
325
+ // 应用最大条目限制
326
+ if (arg.maxRssItem) {
327
+ rssItemArray = rssItemArray.slice(0, arg.maxRssItem);
328
+ }
329
+ }
330
+ return { newItems: rssItemArray, latestPubDate: lastPubDate, currentContent };
331
+ }
332
+ /**
333
+ * 4. 生成消息
334
+ */
335
+ async function generateMessages(processor, items, rssItem, arg) {
336
+ const itemsToSend = [...items].reverse();
337
+ // 生成所有消息
338
+ const messageList = (await Promise.all(itemsToSend.map(async (i) => await processor.parseRssItem(i, { ...rssItem, ...arg }, rssItem.author)))).filter(m => m);
339
+ return { messageList, itemsToSend };
340
+ }
341
+ /**
342
+ * 5. 构建最终消息
343
+ */
344
+ function buildFinalMessage(config, messageList, rssItem, arg) {
345
+ let message = "";
346
+ const shouldMerge = arg.merge === true || config.basic?.merge === '一直合并' || (config.basic?.merge === '有多条更新时合并' && messageList.length > 1);
347
+ // 检查是否需要合并视频
348
+ const hasVideo = config.basic?.margeVideo && messageList.some(msg => /<video/.test(msg));
349
+ if (shouldMerge || hasVideo) {
350
+ message = `<message forward><author id="${rssItem.author}"/>${messageList.map(m => `<message>${m}</message>`).join("")}</message>`;
351
+ }
352
+ else {
353
+ message = messageList.join("");
354
+ }
355
+ // 添加提及
356
+ if (rssItem.followers && rssItem.followers.length > 0) {
357
+ const mentions = rssItem.followers.map((id) => `<at ${id === 'all' ? 'type="all"' : `id="${id}"`}/>`).join(" ");
358
+ message += `<message>${mentions}</message>`;
359
+ }
360
+ return message;
361
+ }
362
+ // ============ 主函数 ============
202
363
  /**
203
364
  * 生产者:抓取 RSS,发现新消息,存入队列
204
365
  */
@@ -210,9 +371,10 @@ async function feeder(deps, processor) {
210
371
  return;
211
372
  for (const rssItem of rssList) {
212
373
  try {
374
+ const feedDebug = createFeedDebug(config, rssItem);
213
375
  // 1. Prepare Arguments
214
376
  let arg = mixinArg(rssItem.arg || {}, config);
215
- (0, logger_1.debug)(config, `[DEBUG_PROXY] feeder mixinArg result proxyAgent: ${JSON.stringify(arg.proxyAgent)}`, 'feeder', 'details');
377
+ feedDebug(`[DEBUG_PROXY] feeder mixinArg result proxyAgent: ${JSON.stringify(arg.proxyAgent)}`, 'feeder', 'details');
216
378
  let originalArg = (0, koishi_1.clone)(rssItem.arg || {});
217
379
  // 2. Interval Check
218
380
  if (rssItem.arg.interval) {
@@ -229,132 +391,52 @@ async function feeder(deps, processor) {
229
391
  }
230
392
  }
231
393
  // 3. Fetch RSS Data
232
- // Use config.msg.rssHubUrl for quick url parsing
233
- const rssHubUrl = config.msg?.rssHubUrl || 'https://hub.slarker.me';
234
- let rssItemList = [];
235
- try {
236
- const urls = rssItem.url.split("|").map((u) => (0, common_1.parseQuickUrl)(u, rssHubUrl, constants_1.quickList));
237
- const fetchPromises = urls.map((url) => (0, parser_1.getRssData)(ctx, config, $http, url, arg));
238
- const results = await Promise.all(fetchPromises);
239
- rssItemList = results.flat(1);
240
- }
241
- catch (err) {
242
- (0, logger_1.debug)(config, `Fetch failed for ${rssItem.title}: ${err.message}`, 'feeder', 'info');
243
- continue;
244
- }
245
- if (rssItemList.length === 0)
246
- continue;
247
- // 4. Sort and Filter
248
- let itemArray = rssItemList
249
- .sort((a, b) => (0, common_1.parsePubDate)(config, b.pubDate).getTime() - (0, common_1.parsePubDate)(config, a.pubDate).getTime())
250
- .filter(item => {
251
- // Keyword filter
252
- const matchKeyword = arg.filter?.find((keyword) => new RegExp(keyword, 'im').test(item.title) || new RegExp(keyword, 'im').test(item.description));
253
- if (matchKeyword) {
254
- (0, logger_1.debug)(config, `filter:${matchKeyword}`, '', 'info');
255
- (0, logger_1.debug)(config, item, 'filter rss item', 'info');
256
- }
257
- return !matchKeyword;
258
- });
259
- if (itemArray.length === 0)
394
+ const rssItemList = await fetchRssItems(ctx, config, $http, rssItem, arg, feedDebug);
395
+ if (rssItemList.length === 0) {
396
+ await ctx.database.set('rssOwl', { id: rssItem.id }, {
397
+ lastPubDate: rssItem.lastPubDate,
398
+ arg: originalArg,
399
+ lastContent: rssItem.lastContent || { itemArray: [] }
400
+ });
260
401
  continue;
261
- // 5. Check for Updates
262
- const latestItem = itemArray[0];
263
- const lastPubDate = (0, common_1.parsePubDate)(config, latestItem.pubDate);
264
- (0, logger_1.debug)(config, `${rssItem.title}: Latest item date=${lastPubDate.toISOString()}, DB date=${rssItem.lastPubDate ? new Date(rssItem.lastPubDate).toISOString() : 'none'}`, 'feeder', 'details');
265
- // Prepare content for deduplication
266
- const currentContent = config.basic?.resendUpdataContent === 'all'
267
- ? itemArray.map((i) => getLastContent(i, config))
268
- : [getLastContent(latestItem, config)];
269
- // Reverse if needed for sending order (oldest first usually)
270
- if (arg.reverse) {
271
- itemArray = itemArray.reverse();
272
402
  }
273
- let rssItemArray = [];
274
- if (rssItem.arg.forceLength) {
275
- // Force length mode: ignore time, just take N items
276
- rssItemArray = itemArray.slice(0, arg.forceLength);
277
- (0, logger_1.debug)(config, `${rssItem.title}: Force length mode, taking ${rssItemArray.length} items`, 'feeder', 'details');
278
- }
279
- else {
280
- // Standard mode: Time & Content check
281
- (0, logger_1.debug)(config, `${rssItem.title}: Checking ${itemArray.length} items for updates`, 'feeder', 'details');
282
- rssItemArray = itemArray.filter((v, i) => {
283
- const currentItemTime = (0, common_1.parsePubDate)(config, v.pubDate).getTime();
284
- const lastTime = rssItem.lastPubDate ? (0, common_1.parsePubDate)(config, rssItem.lastPubDate).getTime() : 0;
285
- (0, logger_1.debug)(config, `[${i}] ${v.title?.substring(0, 30)}: time=${new Date(currentItemTime).toISOString()} > last=${new Date(lastTime).toISOString()} ? ${currentItemTime > lastTime}`, 'feeder', 'details');
286
- // Strict time check
287
- if (currentItemTime > lastTime) {
288
- (0, logger_1.debug)(config, `[${i}] ✓ Item is new (time check)`, 'feeder', 'details');
289
- return true;
290
- }
291
- // Content hash check (if time is same but content changed)
292
- if (config.basic?.resendUpdataContent !== 'disable') {
293
- const newItemContent = getLastContent(v, config);
294
- const oldItemMatch = rssItem.lastContent?.itemArray?.find((old) => (newItemContent.guid && old.guid === newItemContent.guid) ||
295
- (old.link === newItemContent.link && old.title === newItemContent.title));
296
- if (oldItemMatch) {
297
- // If description changed, it's an update
298
- const descriptionChanged = JSON.stringify(oldItemMatch.description) !== JSON.stringify(newItemContent.description);
299
- if (descriptionChanged) {
300
- (0, logger_1.debug)(config, `[${i}] ✓ Item is updated (content changed)`, 'feeder', 'details');
301
- }
302
- else {
303
- (0, logger_1.debug)(config, `[${i}] ✗ Item filtered (already sent)`, 'feeder', 'details');
304
- }
305
- return descriptionChanged;
306
- }
307
- else {
308
- (0, logger_1.debug)(config, `[${i}] ✗ Item filtered (no match in lastContent)`, 'feeder', 'details');
309
- }
310
- }
311
- (0, logger_1.debug)(config, `[${i}] ✗ Item filtered (failed all checks)`, 'feeder', 'details');
312
- return false;
403
+ // 4. Filter Items
404
+ const filteredItems = filterItems(config, rssItemList, arg, feedDebug);
405
+ if (filteredItems.length === 0) {
406
+ const latestItem = [...rssItemList]
407
+ .sort((a, b) => (0, common_1.parsePubDate)(config, b.pubDate).getTime() - (0, common_1.parsePubDate)(config, a.pubDate).getTime())[0];
408
+ await ctx.database.set('rssOwl', { id: rssItem.id }, {
409
+ lastPubDate: latestItem ? (0, common_1.parsePubDate)(config, latestItem.pubDate) : rssItem.lastPubDate,
410
+ arg: originalArg,
411
+ lastContent: latestItem
412
+ ? { itemArray: [getLastContent(latestItem, config)] }
413
+ : (rssItem.lastContent || { itemArray: [] })
313
414
  });
314
- // Apply Max Item Limit
315
- if (arg.maxRssItem) {
316
- rssItemArray = rssItemArray.slice(0, arg.maxRssItem);
317
- }
415
+ continue;
318
416
  }
319
- if (rssItemArray.length === 0) {
320
- (0, logger_1.debug)(config, `${rssItem.title}: No new items found after filtering`, 'feeder', 'info');
321
- // No new items, but we should still update 'lastContent' to latest state to prevent future drifts
417
+ // 5. Check for Updates
418
+ const { newItems, latestPubDate, currentContent } = checkForUpdates(config, rssItem, filteredItems, arg, feedDebug);
419
+ if (newItems.length === 0) {
420
+ feedDebug(`${rssItem.title}: No new items found after filtering`, 'feeder', 'info', { newItemCount: 0 });
322
421
  await ctx.database.set('rssOwl', { id: rssItem.id }, {
323
- lastPubDate,
422
+ lastPubDate: latestPubDate,
324
423
  arg: originalArg,
325
424
  lastContent: { itemArray: currentContent }
326
425
  });
327
426
  continue;
328
427
  }
329
- (0, logger_1.debug)(config, `${rssItem.title}: Found ${rssItemArray.length} new items`, 'feeder', 'info');
330
- (0, logger_1.debug)(config, rssItemArray.map(i => i.title), '', 'info');
331
- // 6. 生成消息并添加到队列(生产者核心逻辑)
332
- const itemsToSend = [...rssItemArray].reverse();
333
- // 生成所有消息
334
- const messageList = (await Promise.all(itemsToSend.map(async (i) => await processor.parseRssItem(i, { ...rssItem, ...arg }, rssItem.author)))).filter(m => m); // Filter empty messages
428
+ feedDebug(`${rssItem.title}: Found ${newItems.length} new items`, 'feeder', 'info', { newItemCount: newItems.length });
429
+ feedDebug(newItems.map(i => i.title), 'feeder', 'info', { newItemCount: newItems.length });
430
+ // 6. Generate Messages
431
+ const { messageList, itemsToSend } = await generateMessages(processor, newItems, rssItem, arg);
335
432
  if (messageList.length === 0) {
336
- (0, logger_1.debug)(config, `${rssItem.title}: Items found but parsed to empty messages`, 'feeder', 'info');
337
- // Items found but parsed to empty (e.g. filtered by video mode)
338
- await ctx.database.set('rssOwl', { id: rssItem.id }, { lastPubDate, arg: originalArg, lastContent: { itemArray: currentContent } });
433
+ feedDebug(`${rssItem.title}: Items found but parsed to empty messages`, 'feeder', 'info', { newItemCount: newItems.length });
434
+ await ctx.database.set('rssOwl', { id: rssItem.id }, { lastPubDate: latestPubDate, arg: originalArg, lastContent: { itemArray: currentContent } });
339
435
  continue;
340
436
  }
341
- // 7. 构建最终消息
342
- let message = "";
343
- const shouldMerge = arg.merge === true || config.basic?.merge === '一直合并' || (config.basic?.merge === '有多条更新时合并' && messageList.length > 1);
344
- // Check for video merge requirement
345
- const hasVideo = config.basic?.margeVideo && messageList.some(msg => /<video/.test(msg));
346
- if (shouldMerge || hasVideo) {
347
- message = `<message forward><author id="${rssItem.author}"/>${messageList.map(m => `<message>${m}</message>`).join("")}</message>`;
348
- }
349
- else {
350
- message = messageList.join("");
351
- }
352
- // Add mentions
353
- if (rssItem.followers && rssItem.followers.length > 0) {
354
- const mentions = rssItem.followers.map((id) => `<at ${id === 'all' ? 'type="all"' : `id="${id}"`}/>`).join(" ");
355
- message += `<message>${mentions}</message>`;
356
- }
357
- // 8. 添加任务到队列(关键变更:不再直接发送)
437
+ // 7. Build Final Message
438
+ const message = buildFinalMessage(config, messageList, rssItem, arg);
439
+ // 8. Add to Queue
358
440
  const taskContent = {
359
441
  message,
360
442
  originalItem: itemsToSend[0],
@@ -373,24 +455,38 @@ async function feeder(deps, processor) {
373
455
  platform: rssItem.platform,
374
456
  content: taskContent
375
457
  });
376
- (0, logger_1.debug)(config, `✓ 已添加到发送队列: ${rssItem.title}`, 'feeder', 'info');
377
- // 9. 更新数据库状态(关键:无论发送是否成功,都更新 lastPubDate)
378
- // 这样即使 Bot 掉线,重启后也不会重复发送旧消息
458
+ feedDebug(`✓ 已添加到发送队列: ${rssItem.title}`, 'feeder', 'info', {
459
+ queuedItemTitle: itemsToSend[0]?.title,
460
+ });
461
+ // 9. Update Database State
379
462
  await ctx.database.set('rssOwl', { id: rssItem.id }, {
380
- lastPubDate,
463
+ lastPubDate: latestPubDate,
381
464
  arg: originalArg,
382
465
  lastContent: { itemArray: currentContent }
383
466
  });
384
467
  }
385
468
  catch (err) {
386
- (0, logger_1.debug)(config, `Feeder error for ${rssItem.url}: ${err.message}`, 'feeder', 'error');
469
+ const normalizedError = (0, error_handler_1.normalizeError)(err);
470
+ const feedContext = buildFeedLogContext(rssItem);
471
+ (0, logger_1.debug)(config, `Feeder error for ${rssItem.url}: ${normalizedError.message}`, 'feeder', 'error', feedContext);
472
+ (0, error_tracker_1.trackError)(normalizedError, feedContext);
387
473
  }
388
474
  }
389
475
  }
390
476
  function startFeeder(ctx, config, $http, processor, queueManager) {
391
477
  const deps = { ctx, config, $http, queueManager };
478
+ const lifecycleDebug = (0, logger_1.createDebugWithContext)(config, { lifecycle: 'feeder' });
392
479
  // Initial run
393
- feeder(deps, processor).catch(err => console.error("Initial feeder run failed:", err));
480
+ feeder(deps, processor).catch(err => {
481
+ const normalizedError = (0, error_handler_1.normalizeError)(err);
482
+ lifecycleDebug(`Initial feeder run failed: ${normalizedError.message}`, 'feeder', 'error', {
483
+ operation: 'initial-feeder-run',
484
+ });
485
+ (0, error_tracker_1.trackError)(normalizedError, {
486
+ lifecycle: 'feeder',
487
+ operation: 'initial-feeder-run',
488
+ });
489
+ });
394
490
  // 启动生产者定时器(抓取 RSS)
395
491
  const refreshInterval = (config.basic?.refresh || 600) * 1000;
396
492
  interval = setInterval(async () => {
@@ -407,9 +503,22 @@ function startFeeder(ctx, config, $http, processor, queueManager) {
407
503
  await queueManager.processQueue();
408
504
  }, queueProcessInterval);
409
505
  // 立即处理一次队列(启动时)
410
- queueManager.processQueue().catch(err => console.error("Initial queue processing failed:", err));
506
+ queueManager.processQueue().catch(err => {
507
+ const normalizedError = (0, error_handler_1.normalizeError)(err);
508
+ lifecycleDebug(`Initial queue processing failed: ${normalizedError.message}`, 'queue', 'error', {
509
+ operation: 'initial-queue-processing',
510
+ });
511
+ (0, error_tracker_1.trackError)(normalizedError, {
512
+ lifecycle: 'feeder',
513
+ operation: 'initial-queue-processing',
514
+ });
515
+ });
516
+ lifecycleDebug('Feeder started', 'feeder', 'info', {
517
+ refreshInterval,
518
+ queueProcessInterval,
519
+ });
411
520
  }
412
- function stopFeeder() {
521
+ function stopFeeder(config) {
413
522
  if (interval) {
414
523
  clearInterval(interval);
415
524
  interval = null;
@@ -418,4 +527,8 @@ function stopFeeder() {
418
527
  clearInterval(queueInterval);
419
528
  queueInterval = null;
420
529
  }
530
+ if (config) {
531
+ const lifecycleDebug = (0, logger_1.createDebugWithContext)(config, { lifecycle: 'feeder' });
532
+ lifecycleDebug('Feeder stopped', 'feeder', 'info');
533
+ }
421
534
  }
@@ -16,4 +16,9 @@ export declare class RssItemProcessor {
16
16
  private processLinkTemplate;
17
17
  private processVideos;
18
18
  private formatVideoList;
19
+ /**
20
+ * 统一的图片渲染方法
21
+ * 提取了 custom、default、only description、link 模板中重复的图片渲染逻辑
22
+ */
23
+ private renderImage;
19
24
  }