@anyul/koishi-plugin-rss 5.0.2 → 5.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,15 @@
1
+ import { Context } from 'koishi';
2
+ import { Config, rssArg } from '../types';
3
+ import { RssItemProcessor } from './item-processor';
4
+ export interface FeederDependencies {
5
+ ctx: Context;
6
+ config: Config;
7
+ $http: any;
8
+ }
9
+ export declare function findRssItem(rssList: any[], keyword: number | string): any;
10
+ export declare function getLastContent(item: any, config: Config): any;
11
+ export declare function formatArg(options: any, config: Config): rssArg;
12
+ export declare function mixinArg(arg: any, config: Config): rssArg;
13
+ export declare function feeder(deps: FeederDependencies, processor: RssItemProcessor): Promise<void>;
14
+ export declare function startFeeder(ctx: Context, config: Config, $http: any, processor: RssItemProcessor): void;
15
+ export declare function stopFeeder(): void;
@@ -0,0 +1,403 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.findRssItem = findRssItem;
4
+ exports.getLastContent = getLastContent;
5
+ exports.formatArg = formatArg;
6
+ exports.mixinArg = mixinArg;
7
+ exports.feeder = feeder;
8
+ exports.startFeeder = startFeeder;
9
+ exports.stopFeeder = stopFeeder;
10
+ const koishi_1 = require("koishi");
11
+ const logger_1 = require("../utils/logger");
12
+ const common_1 = require("../utils/common");
13
+ const media_1 = require("../utils/media");
14
+ const parser_1 = require("./parser");
15
+ const constants_1 = require("../constants");
16
+ const message_cache_1 = require("../utils/message-cache");
17
+ let interval = null;
18
+ function findRssItem(rssList, keyword) {
19
+ let index = ((rssList.findIndex(i => i.rssId === +keyword) + 1) ||
20
+ (rssList.findIndex(i => i.url == keyword) + 1) ||
21
+ (rssList.findIndex(i => i.url.indexOf(keyword) + 1) + 1) ||
22
+ (rssList.findIndex(i => i.title.indexOf(keyword) + 1) + 1)) - 1;
23
+ return rssList[index];
24
+ }
25
+ function getLastContent(item, config) {
26
+ let arr = ['title', 'description', 'link', 'guid'];
27
+ let obj = Object.assign({}, ...arr.map(i => (0, koishi_1.clone)(item?.[i]) ? { [i]: item[i] } : {}));
28
+ return { ...obj, description: String(obj?.description).replaceAll(/\s/g, '') };
29
+ }
30
+ function formatArg(options, config) {
31
+ let { arg, template } = options;
32
+ let json = Object.assign({}, ...(arg?.split(',')?.map((i) => ({ [i.split(":")[0]]: i.split(":")[1] })) || []));
33
+ let key = ["forceLength", "reverse", "timeout", "interval", "merge", "maxRssItem", "firstLoad", "bodyWidth", "bodyPadding", "proxyAgent", "auth"];
34
+ let booleanKey = ['firstLoad', "reverse", 'merge'];
35
+ let numberKey = ['forceLength', "timeout", 'interval', 'maxRssItem', 'bodyWidth', 'bodyPadding'];
36
+ let falseContent = ['false', 'null', ''];
37
+ json = Object.assign({}, ...Object.keys(json).filter((i) => key.some((key) => key == i)).map((key) => ({ [key]: booleanKey.some((bkey) => bkey == key) ? falseContent.some((c) => c == json[key]) : numberKey.some((nkey) => nkey == key) ? (+json[key]) : json[key] })));
38
+ if (template && config.template) {
39
+ json['template'] = template;
40
+ }
41
+ // Date/Number conversions
42
+ if (json.interval)
43
+ json.interval = parseInt(json.interval) * 1000;
44
+ if (json.forceLength)
45
+ json.forceLength = parseInt(json.forceLength);
46
+ // Array conversions
47
+ if (json.filter && typeof json.filter === 'string')
48
+ json.filter = json.filter.split("/");
49
+ if (json.block && typeof json.block === 'string')
50
+ json.block = json.block.split("/");
51
+ // Proxy Argument Parsing
52
+ if (json.proxyAgent) {
53
+ if (['false', 'none', ''].includes(String(json.proxyAgent))) {
54
+ json.proxyAgent = { enabled: false };
55
+ }
56
+ else if (typeof json.proxyAgent === 'string') {
57
+ // Parse string proxy: socks5://127.0.0.1:7890
58
+ let protocolMatch = json.proxyAgent.match(/^(http|https|socks5)/);
59
+ let protocol = protocolMatch ? protocolMatch[1] : 'http';
60
+ let hostMatch = json.proxyAgent.match(/:\/\/([^:\/]+)/);
61
+ let host = hostMatch ? hostMatch[1] : '';
62
+ let portMatch = json.proxyAgent.match(/:(\d+)/);
63
+ let port = portMatch ? parseInt(portMatch[1]) : 7890;
64
+ let proxyAgent = { enabled: true, protocol, host, port };
65
+ if (json.auth) {
66
+ let [username, password] = json.auth.split("/");
67
+ proxyAgent.auth = { username, password };
68
+ }
69
+ json.proxyAgent = proxyAgent;
70
+ }
71
+ }
72
+ return json;
73
+ }
74
+ const mergeProxyAgent = (argProxy, configProxy, config) => {
75
+ // 打印调试信息
76
+ (0, logger_1.debug)(config, `合并代理配置 - argProxy: ${JSON.stringify(argProxy)}, configProxy.enabled: ${configProxy?.enabled}`, 'proxy merge debug', 'details');
77
+ // 1. Explicit disable in Args (必须是明确设置为 false)
78
+ if (argProxy?.enabled === false) {
79
+ (0, logger_1.debug)(config, `订阅明确禁用代理`, 'proxy merge', 'details');
80
+ return { enabled: false };
81
+ }
82
+ // 2. Arg 有完整的 proxy 配置 (enabled=true 且有 host) -> 使用 Arg
83
+ if (argProxy?.enabled === true && argProxy?.host) {
84
+ (0, logger_1.debug)(config, `使用订阅的代理配置`, 'proxy merge', 'details');
85
+ return argProxy;
86
+ }
87
+ // 3. Arg 是空对象、undefined、null,或者没有 enabled 字段 -> 使用全局配置
88
+ // 这是关键:如果订阅没有单独配置代理,就应该使用全局配置
89
+ const shouldUseConfigProxy = !argProxy || Object.keys(argProxy || {}).length === 0 || argProxy?.enabled === undefined || argProxy?.enabled === null;
90
+ if (shouldUseConfigProxy) {
91
+ if (configProxy?.enabled) {
92
+ const result = {
93
+ enabled: true,
94
+ protocol: configProxy.protocol,
95
+ host: configProxy.host,
96
+ port: configProxy.port,
97
+ auth: configProxy.auth?.enabled ? configProxy.auth : undefined
98
+ };
99
+ (0, logger_1.debug)(config, `使用全局代理: ${result.protocol}://${result.host}:${result.port}`, 'proxy merge', 'info');
100
+ return result;
101
+ }
102
+ else {
103
+ (0, logger_1.debug)(config, `全局代理未启用`, 'proxy merge', 'details');
104
+ }
105
+ }
106
+ // 4. Arg 的 enabled=true 但没有 host -> 尝试补充全局配置
107
+ if (argProxy?.enabled === true && !argProxy?.host) {
108
+ const result = {
109
+ ...configProxy,
110
+ ...argProxy,
111
+ auth: configProxy?.auth?.enabled ? configProxy.auth : undefined
112
+ };
113
+ (0, logger_1.debug)(config, `订阅代理配置不完整,补充全局配置`, 'proxy merge', 'details');
114
+ return result;
115
+ }
116
+ // 5. Default disabled
117
+ (0, logger_1.debug)(config, `代理未配置,使用默认(禁用)`, 'proxy merge', 'details');
118
+ return { enabled: false };
119
+ };
120
+ const mergeProxyAgentWithLog = (argProxy, configProxy, config) => {
121
+ const result = mergeProxyAgent(argProxy, configProxy, config);
122
+ (0, logger_1.debug)(config, `[DEBUG_PROXY] mergeProxyAgent input: arg=${JSON.stringify(argProxy)} conf=${JSON.stringify(configProxy)} output=${JSON.stringify(result)}`, 'proxy merge', 'details');
123
+ return result;
124
+ };
125
+ function mixinArg(arg, config) {
126
+ const mergedProxy = mergeProxyAgentWithLog(arg?.proxyAgent, config.net?.proxyAgent, config);
127
+ // 打印代理配置合并结果(方便调试)
128
+ if (mergedProxy?.enabled) {
129
+ (0, logger_1.debug)(config, `使用代理: ${mergedProxy.protocol}://${mergedProxy.host}:${mergedProxy.port}`, 'proxy merge', 'details');
130
+ }
131
+ else {
132
+ (0, logger_1.debug)(config, `代理未启用`, 'proxy merge', 'details');
133
+ }
134
+ // Flatten config into base object, prioritizing Config values
135
+ // We explicitly take known safe config sections
136
+ const baseConfig = {
137
+ ...config.basic,
138
+ // Add other flat config sections if necessary
139
+ };
140
+ const res = {
141
+ ...baseConfig,
142
+ ...arg, // Args override basic config
143
+ filter: [...(config.msg?.keywordFilter || []), ...(arg?.filter || [])],
144
+ block: [...(config.msg?.keywordBlock || []), ...(arg?.block || [])],
145
+ template: arg.template ?? config.basic?.defaultTemplate,
146
+ proxyAgent: mergedProxy
147
+ };
148
+ (0, logger_1.debug)(config, `[DEBUG_PROXY] mixinArg return: ${JSON.stringify(res.proxyAgent)}`, 'mixin', 'details');
149
+ return res;
150
+ }
151
+ async function feeder(deps, processor) {
152
+ const { ctx, config, $http } = deps;
153
+ // debug(config, "feeder run", 'debug');
154
+ // Use type assertion for custom table
155
+ const rssList = await ctx.database.get('rssOwl', {});
156
+ if (!rssList || rssList.length === 0)
157
+ return;
158
+ for (const rssItem of rssList) {
159
+ try {
160
+ // 1. Prepare Arguments
161
+ let arg = mixinArg(rssItem.arg || {}, config);
162
+ (0, logger_1.debug)(config, `[DEBUG_PROXY] feeder mixinArg result proxyAgent: ${JSON.stringify(arg.proxyAgent)}`, 'feeder', 'details');
163
+ let originalArg = (0, koishi_1.clone)(rssItem.arg || {});
164
+ // 2. Interval Check
165
+ if (rssItem.arg.interval) {
166
+ const now = Date.now();
167
+ if (arg.nextUpdataTime && arg.nextUpdataTime > now)
168
+ continue;
169
+ // Calculate next update time
170
+ if (arg.nextUpdataTime) {
171
+ const missed = Math.ceil((now - arg.nextUpdataTime) / arg.interval);
172
+ originalArg.nextUpdataTime = arg.nextUpdataTime + (arg.interval * (missed || 1));
173
+ }
174
+ else {
175
+ originalArg.nextUpdataTime = now + arg.interval;
176
+ }
177
+ }
178
+ // 3. Fetch RSS Data
179
+ // Use config.msg.rssHubUrl for quick url parsing
180
+ const rssHubUrl = config.msg?.rssHubUrl || 'https://hub.slarker.me';
181
+ let rssItemList = [];
182
+ try {
183
+ const urls = rssItem.url.split("|").map((u) => (0, common_1.parseQuickUrl)(u, rssHubUrl, constants_1.quickList));
184
+ const fetchPromises = urls.map((url) => (0, parser_1.getRssData)(ctx, config, $http, url, arg));
185
+ const results = await Promise.all(fetchPromises);
186
+ rssItemList = results.flat(1);
187
+ }
188
+ catch (err) {
189
+ (0, logger_1.debug)(config, `Fetch failed for ${rssItem.title}: ${err.message}`, 'feeder', 'info');
190
+ continue;
191
+ }
192
+ if (rssItemList.length === 0)
193
+ continue;
194
+ // 4. Sort and Filter
195
+ let itemArray = rssItemList
196
+ .sort((a, b) => (0, common_1.parsePubDate)(config, b.pubDate).getTime() - (0, common_1.parsePubDate)(config, a.pubDate).getTime())
197
+ .filter(item => {
198
+ // Keyword filter
199
+ const matchKeyword = arg.filter?.find((keyword) => new RegExp(keyword, 'im').test(item.title) || new RegExp(keyword, 'im').test(item.description));
200
+ if (matchKeyword) {
201
+ (0, logger_1.debug)(config, `filter:${matchKeyword}`, '', 'info');
202
+ (0, logger_1.debug)(config, item, 'filter rss item', 'info');
203
+ }
204
+ return !matchKeyword;
205
+ });
206
+ if (itemArray.length === 0)
207
+ continue;
208
+ // 5. Check for Updates
209
+ const latestItem = itemArray[0];
210
+ const lastPubDate = (0, common_1.parsePubDate)(config, latestItem.pubDate);
211
+ (0, logger_1.debug)(config, `${rssItem.title}: Latest item date=${lastPubDate.toISOString()}, DB date=${rssItem.lastPubDate ? new Date(rssItem.lastPubDate).toISOString() : 'none'}`, 'feeder', 'details');
212
+ // Prepare content for deduplication
213
+ const currentContent = config.basic?.resendUpdataContent === 'all'
214
+ ? itemArray.map((i) => getLastContent(i, config))
215
+ : [getLastContent(latestItem, config)];
216
+ // Reverse if needed for sending order (oldest first usually)
217
+ if (arg.reverse) {
218
+ itemArray = itemArray.reverse();
219
+ }
220
+ let rssItemArray = [];
221
+ if (rssItem.arg.forceLength) {
222
+ // Force length mode: ignore time, just take N items
223
+ rssItemArray = itemArray.slice(0, arg.forceLength);
224
+ (0, logger_1.debug)(config, `${rssItem.title}: Force length mode, taking ${rssItemArray.length} items`, 'feeder', 'details');
225
+ }
226
+ else {
227
+ // Standard mode: Time & Content check
228
+ (0, logger_1.debug)(config, `${rssItem.title}: Checking ${itemArray.length} items for updates`, 'feeder', 'details');
229
+ rssItemArray = itemArray.filter((v, i) => {
230
+ const currentItemTime = (0, common_1.parsePubDate)(config, v.pubDate).getTime();
231
+ const lastTime = rssItem.lastPubDate ? (0, common_1.parsePubDate)(config, rssItem.lastPubDate).getTime() : 0;
232
+ (0, logger_1.debug)(config, `[${i}] ${v.title?.substring(0, 30)}: time=${new Date(currentItemTime).toISOString()} > last=${new Date(lastTime).toISOString()} ? ${currentItemTime > lastTime}`, 'feeder', 'details');
233
+ // Strict time check
234
+ if (currentItemTime > lastTime) {
235
+ (0, logger_1.debug)(config, `[${i}] ✓ Item is new (time check)`, 'feeder', 'details');
236
+ return true;
237
+ }
238
+ // Content hash check (if time is same but content changed)
239
+ if (config.basic?.resendUpdataContent !== 'disable') {
240
+ const newItemContent = getLastContent(v, config);
241
+ const oldItemMatch = rssItem.lastContent?.itemArray?.find((old) => (newItemContent.guid && old.guid === newItemContent.guid) ||
242
+ (old.link === newItemContent.link && old.title === newItemContent.title));
243
+ if (oldItemMatch) {
244
+ // If description changed, it's an update
245
+ const descriptionChanged = JSON.stringify(oldItemMatch.description) !== JSON.stringify(newItemContent.description);
246
+ if (descriptionChanged) {
247
+ (0, logger_1.debug)(config, `[${i}] ✓ Item is updated (content changed)`, 'feeder', 'details');
248
+ }
249
+ else {
250
+ (0, logger_1.debug)(config, `[${i}] ✗ Item filtered (already sent)`, 'feeder', 'details');
251
+ }
252
+ return descriptionChanged;
253
+ }
254
+ else {
255
+ (0, logger_1.debug)(config, `[${i}] ✗ Item filtered (no match in lastContent)`, 'feeder', 'details');
256
+ }
257
+ }
258
+ (0, logger_1.debug)(config, `[${i}] ✗ Item filtered (failed all checks)`, 'feeder', 'details');
259
+ return false;
260
+ });
261
+ // Apply Max Item Limit
262
+ if (arg.maxRssItem) {
263
+ rssItemArray = rssItemArray.slice(0, arg.maxRssItem);
264
+ }
265
+ }
266
+ if (rssItemArray.length === 0) {
267
+ (0, logger_1.debug)(config, `${rssItem.title}: No new items found after filtering`, 'feeder', 'info');
268
+ // No new items, but we should still update 'lastContent' to latest state to prevent future drifts
269
+ await ctx.database.set('rssOwl', { id: rssItem.id }, {
270
+ lastPubDate,
271
+ arg: originalArg,
272
+ lastContent: { itemArray: currentContent }
273
+ });
274
+ continue;
275
+ }
276
+ (0, logger_1.debug)(config, `${rssItem.title}: Found ${rssItemArray.length} new items`, 'feeder', 'info');
277
+ (0, logger_1.debug)(config, rssItemArray.map(i => i.title), '', 'info');
278
+ // 6. Process Items (Generate Messages)
279
+ const itemsToSend = [...rssItemArray].reverse();
280
+ const messageList = (await Promise.all(itemsToSend.map(async (i) => await processor.parseRssItem(i, { ...rssItem, ...arg }, rssItem.author)))).filter(m => m); // Filter empty messages
281
+ if (messageList.length === 0) {
282
+ (0, logger_1.debug)(config, `${rssItem.title}: Items found but parsed to empty messages`, 'feeder', 'info');
283
+ // Items found but parsed to empty (e.g. filtered by video mode)
284
+ await ctx.database.set('rssOwl', { id: rssItem.id }, { lastPubDate, arg: originalArg, lastContent: { itemArray: currentContent } });
285
+ continue;
286
+ }
287
+ // 7. Construct Final Message
288
+ let message = "";
289
+ const shouldMerge = arg.merge === true || config.basic?.merge === '一直合并' || (config.basic?.merge === '有多条更新时合并' && messageList.length > 1);
290
+ // Check for video merge requirement
291
+ const hasVideo = config.basic?.margeVideo && messageList.some(msg => /<video/.test(msg));
292
+ if (shouldMerge || hasVideo) {
293
+ message = `<message forward><author id="${rssItem.author}"/>${messageList.map(m => `<message>${m}</message>`).join("")}</message>`;
294
+ }
295
+ else {
296
+ message = messageList.join("");
297
+ }
298
+ // Add mentions
299
+ if (rssItem.followers && rssItem.followers.length > 0) {
300
+ const mentions = rssItem.followers.map((id) => `<at ${id === 'all' ? 'type="all"' : `id="${id}"`}/>`).join(" ");
301
+ message += `<message>${mentions}</message>`;
302
+ }
303
+ // 8. Send Broadcast
304
+ try {
305
+ (0, logger_1.debug)(config, `Sending update for ${rssItem.title} to ${rssItem.platform}:${rssItem.guildId}`, 'feeder', 'details');
306
+ // Koishi broadcast 会自动查找可用的 bot,无需手动检查
307
+ // author 字段兼容用户ID和bot selfId两种格式
308
+ // 发送消息
309
+ try {
310
+ await ctx.broadcast([`${rssItem.platform}:${rssItem.guildId}`], message);
311
+ (0, logger_1.debug)(config, `更新成功:${rssItem.title}`, '', 'info');
312
+ }
313
+ catch (sendError) {
314
+ // OneBot retcode 1200: 不支持的消息格式(通常是视频)
315
+ if (sendError.code?.toString?.() === '1200' || sendError.message?.includes('1200')) {
316
+ (0, logger_1.debug)(config, `消息格式不被支持,尝试清理视频元素后重试: ${rssItem.title}`, 'feeder', 'info');
317
+ // 移除 video 元素,保留视频链接
318
+ const fallbackMessage = message
319
+ .replace(/<video[^>]*>.*?<\/video>/gis, (match) => {
320
+ // 提取视频 URL
321
+ const srcMatch = match.match(/src=["']([^"']+)["']/);
322
+ if (srcMatch) {
323
+ return `\n🎬 视频: ${srcMatch[1]}\n`;
324
+ }
325
+ return '\n[视频不支持]\n';
326
+ });
327
+ try {
328
+ await ctx.broadcast([`${rssItem.platform}:${rssItem.guildId}`], fallbackMessage);
329
+ (0, logger_1.debug)(config, `降级发送成功:${rssItem.title}`, '', 'info');
330
+ }
331
+ catch (retryError) {
332
+ (0, logger_1.debug)(config, `降级发送也失败: ${retryError.message}`, 'feeder', 'error');
333
+ throw retryError;
334
+ }
335
+ }
336
+ else {
337
+ throw sendError;
338
+ }
339
+ }
340
+ // 缓存最终发送的消息
341
+ if (config.cache?.enabled && messageList.length > 0) {
342
+ const cache = (0, message_cache_1.getMessageCache)();
343
+ if (cache) {
344
+ // 缓存每条消息的最终形式
345
+ for (let i = 0; i < itemsToSend.length && i < messageList.length; i++) {
346
+ const item = itemsToSend[i];
347
+ const finalMsg = messageList[i];
348
+ try {
349
+ await cache.addMessage({
350
+ rssId: rssItem.rssId.toString(),
351
+ guildId: rssItem.guildId,
352
+ platform: rssItem.platform,
353
+ title: item.title || '',
354
+ content: item.description || '',
355
+ link: item.link || '',
356
+ pubDate: (0, common_1.parsePubDate)(config, item.pubDate),
357
+ imageUrl: item.enclosure?.url || '',
358
+ videoUrl: '',
359
+ finalMessage: finalMsg // 缓存最终发送的消息
360
+ });
361
+ }
362
+ catch (err) {
363
+ (0, logger_1.debug)(config, `缓存消息失败: ${err.message}`, 'cache', 'info');
364
+ }
365
+ }
366
+ }
367
+ }
368
+ }
369
+ catch (err) {
370
+ (0, logger_1.debug)(config, `RSS推送失败 [${rssItem.title}]: ${err.message}`, 'feeder', 'error');
371
+ console.error(`RSS推送失败 [${rssItem.title}]: ${err.message}`);
372
+ // 即使发送失败,也要更新数据库状态,避免无限重试
373
+ }
374
+ // 9. Update Database State
375
+ await ctx.database.set('rssOwl', { id: rssItem.id }, {
376
+ lastPubDate,
377
+ arg: originalArg,
378
+ lastContent: { itemArray: currentContent }
379
+ });
380
+ }
381
+ catch (err) {
382
+ (0, logger_1.debug)(config, `Feeder error for ${rssItem.url}: ${err.message}`, 'feeder', 'error');
383
+ }
384
+ }
385
+ }
386
+ function startFeeder(ctx, config, $http, processor) {
387
+ const deps = { ctx, config, $http };
388
+ // Initial run
389
+ feeder(deps, processor).catch(err => console.error("Initial feeder run failed:", err));
390
+ const refreshInterval = (config.basic?.refresh || 600) * 1000;
391
+ interval = setInterval(async () => {
392
+ if (config.basic?.imageMode === 'File') {
393
+ await (0, media_1.delCache)(config);
394
+ }
395
+ await feeder(deps, processor);
396
+ }, refreshInterval);
397
+ }
398
+ function stopFeeder() {
399
+ if (interval) {
400
+ clearInterval(interval);
401
+ interval = null;
402
+ }
403
+ }
@@ -1,15 +1,20 @@
1
1
  import { Context } from 'koishi';
2
2
  import { Config, rssArg } from '../types';
3
3
  import { RssItemProcessor } from './item-processor';
4
+ import { NotificationQueueManager } from './notification-queue';
4
5
  export interface FeederDependencies {
5
6
  ctx: Context;
6
7
  config: Config;
7
8
  $http: any;
9
+ queueManager: NotificationQueueManager;
8
10
  }
9
11
  export declare function findRssItem(rssList: any[], keyword: number | string): any;
10
12
  export declare function getLastContent(item: any, config: Config): any;
11
13
  export declare function formatArg(options: any, config: Config): rssArg;
12
14
  export declare function mixinArg(arg: any, config: Config): rssArg;
15
+ /**
16
+ * 生产者:抓取 RSS,发现新消息,存入队列
17
+ */
13
18
  export declare function feeder(deps: FeederDependencies, processor: RssItemProcessor): Promise<void>;
14
- export declare function startFeeder(ctx: Context, config: Config, $http: any, processor: RssItemProcessor): void;
19
+ export declare function startFeeder(ctx: Context, config: Config, $http: any, processor: RssItemProcessor, queueManager: NotificationQueueManager): void;
15
20
  export declare function stopFeeder(): void;