cross-seed 6.0.0-8 → 6.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/dist/action.js +177 -71
  2. package/dist/action.js.map +1 -1
  3. package/dist/arr.js +62 -54
  4. package/dist/arr.js.map +1 -1
  5. package/dist/clients/Deluge.js +70 -46
  6. package/dist/clients/Deluge.js.map +1 -1
  7. package/dist/clients/QBittorrent.js +110 -68
  8. package/dist/clients/QBittorrent.js.map +1 -1
  9. package/dist/clients/RTorrent.js +46 -23
  10. package/dist/clients/RTorrent.js.map +1 -1
  11. package/dist/clients/TorrentClient.js +14 -1
  12. package/dist/clients/TorrentClient.js.map +1 -1
  13. package/dist/clients/Transmission.js +30 -10
  14. package/dist/clients/Transmission.js.map +1 -1
  15. package/dist/cmd.js +46 -23
  16. package/dist/cmd.js.map +1 -1
  17. package/dist/config.template.cjs +59 -59
  18. package/dist/config.template.cjs.map +1 -1
  19. package/dist/configSchema.js +90 -26
  20. package/dist/configSchema.js.map +1 -1
  21. package/dist/configuration.js +4 -1
  22. package/dist/configuration.js.map +1 -1
  23. package/dist/constants.js +77 -9
  24. package/dist/constants.js.map +1 -1
  25. package/dist/dataFiles.js +4 -5
  26. package/dist/dataFiles.js.map +1 -1
  27. package/dist/db.js +2 -1
  28. package/dist/db.js.map +1 -1
  29. package/dist/decide.js +279 -169
  30. package/dist/decide.js.map +1 -1
  31. package/dist/diff.js +13 -3
  32. package/dist/diff.js.map +1 -1
  33. package/dist/errors.js.map +1 -1
  34. package/dist/indexers.js +94 -33
  35. package/dist/indexers.js.map +1 -1
  36. package/dist/inject.js +448 -0
  37. package/dist/inject.js.map +1 -0
  38. package/dist/jobs.js +13 -6
  39. package/dist/jobs.js.map +1 -1
  40. package/dist/logger.js +27 -9
  41. package/dist/logger.js.map +1 -1
  42. package/dist/migrations/00-initialSchema.js.map +1 -1
  43. package/dist/migrations/05-caps.js.map +1 -1
  44. package/dist/migrations/06-uniqueDecisions.js +29 -0
  45. package/dist/migrations/06-uniqueDecisions.js.map +1 -0
  46. package/dist/migrations/07-limits.js +12 -0
  47. package/dist/migrations/07-limits.js.map +1 -0
  48. package/dist/migrations/migrations.js +4 -0
  49. package/dist/migrations/migrations.js.map +1 -1
  50. package/dist/parseTorrent.js +6 -0
  51. package/dist/parseTorrent.js.map +1 -1
  52. package/dist/pipeline.js +224 -112
  53. package/dist/pipeline.js.map +1 -1
  54. package/dist/preFilter.js +122 -55
  55. package/dist/preFilter.js.map +1 -1
  56. package/dist/pushNotifier.js +7 -5
  57. package/dist/pushNotifier.js.map +1 -1
  58. package/dist/searchee.js +198 -17
  59. package/dist/searchee.js.map +1 -1
  60. package/dist/server.js +106 -54
  61. package/dist/server.js.map +1 -1
  62. package/dist/startup.js +16 -7
  63. package/dist/startup.js.map +1 -1
  64. package/dist/torrent.js +116 -50
  65. package/dist/torrent.js.map +1 -1
  66. package/dist/torznab.js +323 -153
  67. package/dist/torznab.js.map +1 -1
  68. package/dist/utils.js +229 -44
  69. package/dist/utils.js.map +1 -1
  70. package/package.json +11 -6
package/dist/torznab.js CHANGED
@@ -1,13 +1,15 @@
1
+ import chalk from "chalk";
1
2
  import ms from "ms";
3
+ import { inspect } from "util";
2
4
  import xml2js from "xml2js";
3
- import { getAvailableArrIds, getRelevantArrIds } from "./arr.js";
4
- import { EP_REGEX, SEASON_REGEX, USER_AGENT } from "./constants.js";
5
+ import { arrIdsEqual, formatFoundIds, getRelevantArrIds, scanAllArrsForMedia, } from "./arr.js";
6
+ import { CALIBRE_INDEXNUM_REGEX, EP_REGEX, SEASON_REGEX, UNKNOWN_TRACKER, USER_AGENT, } from "./constants.js";
5
7
  import { db } from "./db.js";
6
8
  import { CrossSeedError } from "./errors.js";
7
- import { getAllIndexers, getEnabledIndexers, IndexerStatus, updateIndexerStatus, } from "./indexers.js";
8
- import { Label, logger } from "./logger.js";
9
+ import { ALL_CAPS, getAllIndexers, getEnabledIndexers, IndexerStatus, updateIndexerCapsById, updateIndexerStatus, } from "./indexers.js";
10
+ import { Label, logger, logOnce } from "./logger.js";
9
11
  import { getRuntimeConfig } from "./runtimeConfig.js";
10
- import { cleanseSeparators, getAnimeQueries, getApikey, getMediaType, MediaType, nMsAgo, reformatTitleForSearching, sanitizeUrl, stripExtension, } from "./utils.js";
12
+ import { cleanTitle, combineAsyncIterables, extractInt, formatAsList, getAnimeQueries, getApikey, getLogString, getMediaType, isTruthy, MediaType, nMsAgo, reformatTitleForSearching, sanitizeUrl, stripExtension, stripMetaFromName, } from "./utils.js";
11
13
  function parseTorznabResults(xml) {
12
14
  const items = xml?.rss?.channel?.[0]?.item;
13
15
  if (!items || !Array.isArray(items)) {
@@ -19,13 +21,17 @@ function parseTorznabResults(xml) {
19
21
  tracker: item?.prowlarrindexer?.[0]?._ ??
20
22
  item?.jackettindexer?.[0]?._ ??
21
23
  item?.indexer?.[0]?._ ??
22
- "Unknown tracker",
24
+ UNKNOWN_TRACKER,
23
25
  link: item.link[0],
24
26
  size: Number(item.size[0]),
25
27
  pubDate: new Date(item.pubDate[0]).getTime(),
26
28
  }));
27
29
  }
28
30
  function parseTorznabCaps(xml) {
31
+ const limits = xml?.caps?.limits?.map((limit) => ({
32
+ default: parseInt(limit.$.default),
33
+ max: parseInt(limit.$.max),
34
+ }))[0] ?? { default: 100, max: 100 };
29
35
  const searchingSection = xml?.caps?.searching?.[0];
30
36
  const isAvailable = (searchTechnique) => searchTechnique?.[0]?.$?.available === "yes";
31
37
  function getSupportedIds(searchTechnique) {
@@ -37,21 +43,39 @@ function parseTorznabCaps(xml) {
37
43
  tvdbId: supportedIds.includes("tvdbid"),
38
44
  tmdbId: supportedIds.includes("tmdbid"),
39
45
  imdbId: supportedIds.includes("imdbid"),
46
+ tvMazeId: supportedIds.includes("tvmazeid"),
40
47
  };
41
48
  }
42
49
  const categoryCaps = xml?.caps?.categories?.[0]?.category;
43
50
  function getCatCaps(item) {
44
- const categoryNames = (item ?? []).map((category) => category.$.name);
45
- function indexerDoesSupportCat(category) {
46
- return categoryNames.some((cat) => cat.toLowerCase().includes(category));
47
- }
48
- return {
49
- movie: indexerDoesSupportCat("movie"),
50
- tv: indexerDoesSupportCat("tv"),
51
- anime: indexerDoesSupportCat("anime"),
52
- audio: indexerDoesSupportCat("audio"),
53
- book: indexerDoesSupportCat("book"),
51
+ const categories = (item ?? []).map((cat) => ({
52
+ id: parseInt(cat.$.id),
53
+ name: cat.$.name.toLowerCase(),
54
+ }));
55
+ const caps = {
56
+ movie: false,
57
+ tv: false,
58
+ anime: false,
59
+ xxx: false,
60
+ audio: false,
61
+ book: false,
62
+ additional: false,
54
63
  };
64
+ const keys = Object.keys(caps);
65
+ keys.splice(keys.indexOf("additional"), 1);
66
+ for (const { id, name } of categories) {
67
+ let isAdditional = true;
68
+ for (const cap of keys) {
69
+ if (name.includes(cap)) {
70
+ caps[cap] = true;
71
+ isAdditional = false;
72
+ }
73
+ }
74
+ if (isAdditional && id < 100000 && (id < 8000 || id > 8999)) {
75
+ caps.additional = true;
76
+ }
77
+ }
78
+ return caps;
55
79
  }
56
80
  return {
57
81
  search: Boolean(isAvailable(searchingSection?.search)),
@@ -60,43 +84,38 @@ function parseTorznabCaps(xml) {
60
84
  movieIdSearch: getSupportedIds(searchingSection?.["movie-search"]),
61
85
  tvIdSearch: getSupportedIds(searchingSection?.["tv-search"]),
62
86
  categories: getCatCaps(categoryCaps),
87
+ limits,
63
88
  };
64
89
  }
65
- async function createTorznabSearchQueries(searchee, ids, caps) {
66
- const nameWithoutExtension = stripExtension(searchee.name);
67
- const extractNumber = (str) => parseInt(str.match(/\d+/)[0]);
68
- const relevantIds = await getRelevantArrIds(searchee, ids, caps);
69
- const shouldUseIdSearch = Object.values(relevantIds).some((id) => id);
70
- const mediaType = getMediaType(searchee);
90
+ async function createTorznabSearchQueries(searchee, mediaType, caps, parsedMedia) {
91
+ const stem = stripExtension(searchee.title);
92
+ const relevantIds = parsedMedia
93
+ ? await getRelevantArrIds(caps, parsedMedia)
94
+ : {};
95
+ const useIds = Object.values(relevantIds).some(isTruthy);
71
96
  if (mediaType === MediaType.EPISODE && caps.tvSearch) {
72
- const match = nameWithoutExtension.match(EP_REGEX);
97
+ const match = stem.match(EP_REGEX);
73
98
  const groups = match.groups;
74
99
  return [
75
100
  {
76
101
  t: "tvsearch",
77
- q: shouldUseIdSearch
78
- ? undefined
79
- : cleanseSeparators(groups.title),
80
- season: groups.season
81
- ? extractNumber(groups.season)
82
- : groups.year,
102
+ q: useIds ? undefined : reformatTitleForSearching(stem),
103
+ season: groups.season ? extractInt(groups.season) : groups.year,
83
104
  ep: groups.episode
84
- ? extractNumber(groups.episode)
105
+ ? extractInt(groups.episode)
85
106
  : `${groups.month}/${groups.day}`,
86
107
  ...relevantIds,
87
108
  },
88
109
  ];
89
110
  }
90
111
  else if (mediaType === MediaType.SEASON && caps.tvSearch) {
91
- const match = nameWithoutExtension.match(SEASON_REGEX);
112
+ const match = stem.match(SEASON_REGEX);
92
113
  const groups = match.groups;
93
114
  return [
94
115
  {
95
116
  t: "tvsearch",
96
- q: shouldUseIdSearch
97
- ? undefined
98
- : cleanseSeparators(groups.title),
99
- season: extractNumber(groups.season),
117
+ q: useIds ? undefined : reformatTitleForSearching(stem),
118
+ season: extractInt(groups.season),
100
119
  ...relevantIds,
101
120
  },
102
121
  ];
@@ -105,28 +124,76 @@ async function createTorznabSearchQueries(searchee, ids, caps) {
105
124
  return [
106
125
  {
107
126
  t: "movie",
108
- q: shouldUseIdSearch
109
- ? undefined
110
- : reformatTitleForSearching(nameWithoutExtension),
127
+ q: useIds ? undefined : reformatTitleForSearching(stem),
111
128
  ...relevantIds,
112
129
  },
113
130
  ];
114
131
  }
132
+ if (useIds && caps.tvSearch && parsedMedia?.series) {
133
+ const eps = parsedMedia.episodes;
134
+ const season = eps.length > 0 ? eps[0].seasonNumber : undefined;
135
+ const ep = eps.length === 1 ? eps[0].episodeNumber : undefined;
136
+ return [
137
+ { t: "tvsearch", q: undefined, season, ep, ...relevantIds },
138
+ ];
139
+ }
140
+ else if (useIds && caps.movieSearch && parsedMedia?.movie) {
141
+ return [{ t: "movie", q: undefined, ...relevantIds }];
142
+ }
115
143
  else if (mediaType === MediaType.ANIME) {
116
- const animeQueries = getAnimeQueries(nameWithoutExtension);
117
- return animeQueries.map((animeQuery) => ({
144
+ return getAnimeQueries(stem).map((animeQuery) => ({
118
145
  t: "search",
119
146
  q: animeQuery,
120
147
  }));
121
148
  }
122
- else {
149
+ else if (mediaType === MediaType.VIDEO) {
123
150
  return [
124
151
  {
125
152
  t: "search",
126
- q: reformatTitleForSearching(nameWithoutExtension),
153
+ q: cleanTitle(stripMetaFromName(stem)),
127
154
  },
128
155
  ];
129
156
  }
157
+ else if (mediaType === MediaType.BOOK && searchee.path) {
158
+ return [
159
+ {
160
+ t: "search",
161
+ q: cleanTitle(stem.replace(CALIBRE_INDEXNUM_REGEX, "")),
162
+ },
163
+ ];
164
+ }
165
+ return [
166
+ {
167
+ t: "search",
168
+ q: cleanTitle(stem),
169
+ },
170
+ ];
171
+ }
172
+ export async function getSearchString(searchee) {
173
+ const mediaType = getMediaType(searchee);
174
+ const params = (await createTorznabSearchQueries(searchee, mediaType, ALL_CAPS))[0];
175
+ const season = params.season !== undefined ? `.S${params.season}` : "";
176
+ const ep = params.ep !== undefined ? `.E${params.ep}` : "";
177
+ return `${params.q}${season}${ep}`.toLowerCase();
178
+ }
179
+ /**
180
+ * Only for testing purposes. (createTorznabSearchQueries now accepts searchee
181
+ * instead of stem (title))
182
+ *
183
+ * Logs the queries that would be sent to indexers for id and non-id searches.
184
+ * Ensure that item exists in your arr for the id search example.
185
+ * Ensure mediaType is what cross-seed would actually parse the item as.
186
+ */
187
+ export async function logQueries(searcheeTitle, mediaType) {
188
+ const stem = stripExtension(searcheeTitle);
189
+ logger.info(
190
+ // @ts-expect-error needs conversion to use searchee instead of stem
191
+ `RAW: ${inspect(await createTorznabSearchQueries(stem, mediaType, ALL_CAPS))}`);
192
+ const res = await scanAllArrsForMedia(searcheeTitle, mediaType);
193
+ const parsedMedia = res.isOk() ? res.unwrap() : undefined;
194
+ logger.info(
195
+ // @ts-expect-error needs conversion to use searchee instead of stem
196
+ `ID: ${inspect(await createTorznabSearchQueries(stem, mediaType, ALL_CAPS, parsedMedia))}`);
130
197
  }
131
198
  export function indexerDoesSupportMediaType(mediaType, caps) {
132
199
  switch (mediaType) {
@@ -136,85 +203,98 @@ export function indexerDoesSupportMediaType(mediaType, caps) {
136
203
  case MediaType.MOVIE:
137
204
  return caps.movie;
138
205
  case MediaType.ANIME:
139
- return caps.anime;
206
+ case MediaType.VIDEO:
207
+ return caps.movie || caps.tv || caps.anime || caps.xxx;
140
208
  case MediaType.AUDIO:
141
209
  return caps.audio;
142
210
  case MediaType.BOOK:
143
211
  return caps.book;
144
212
  case MediaType.OTHER:
145
- return true;
213
+ return caps.additional;
146
214
  }
147
215
  }
148
- export async function queryRssFeeds() {
149
- const candidatesByUrl = await makeRequests(await getEnabledIndexers(), async () => [{ t: "search", q: "" }]);
150
- return candidatesByUrl.flatMap((e) => e.candidates);
216
+ export async function* rssPager(indexer, pageBackUntil) {
217
+ let earliestSeen = Infinity;
218
+ const limit = indexer.limits.max;
219
+ for (let i = 0; i < 10; i++) {
220
+ let currentPageCandidates;
221
+ try {
222
+ currentPageCandidates = await makeRequest({
223
+ indexerId: indexer.id,
224
+ baseUrl: indexer.url,
225
+ apikey: indexer.apikey,
226
+ query: { t: "search", q: "", limit, offset: i * limit },
227
+ });
228
+ }
229
+ catch (e) {
230
+ logger.error({
231
+ label: Label.TORZNAB,
232
+ message: `Paging indexer ${indexer.id} stopped: request failed for page ${i + 1}`,
233
+ });
234
+ logger.debug(e);
235
+ return;
236
+ }
237
+ const allNewPubDates = currentPageCandidates.map((c) => c.pubDate);
238
+ const currentPageEarliest = Math.min(...allNewPubDates);
239
+ const currentPageLatest = Math.max(...allNewPubDates);
240
+ const newCandidates = currentPageCandidates.filter((c) => c.pubDate < earliestSeen && c.pubDate >= pageBackUntil);
241
+ if (currentPageLatest > Date.now() + ms("10 minutes")) {
242
+ logOnce(`timezone-issues-${indexer.id}`, () => void logger.warn(`Indexer ${indexer.url} reported releases in the future. Its timezone may be misconfigured.`), ms("10 minutes"));
243
+ }
244
+ if (!newCandidates.length) {
245
+ logger.verbose({
246
+ label: Label.TORZNAB,
247
+ message: `Paging indexer ${indexer.id} stopped: nothing new in page ${i + 1}`,
248
+ });
249
+ return;
250
+ }
251
+ logger.verbose({
252
+ label: Label.TORZNAB,
253
+ message: `${newCandidates.length} new candidates on indexer ${indexer.id} page ${i + 1}`,
254
+ });
255
+ // yield each new candidate
256
+ yield* newCandidates;
257
+ earliestSeen = Math.min(earliestSeen, currentPageEarliest);
258
+ }
259
+ logger.verbose({
260
+ label: Label.TORZNAB,
261
+ message: `Paging indexer ${indexer.url} stopped: reached 10 pages`,
262
+ });
263
+ }
264
+ export async function* queryRssFeeds(previousRunTime) {
265
+ const indexers = await getEnabledIndexers();
266
+ // offset -5m for delayed RSS -> publishing time
267
+ const timeWithOffset = previousRunTime - 300000;
268
+ yield* combineAsyncIterables(indexers.map((indexer) => rssPager(indexer, timeWithOffset)));
151
269
  }
152
- export async function searchTorznab(searchee) {
153
- const { excludeRecentSearch, excludeOlder, torznab } = getRuntimeConfig();
270
+ export async function searchTorznab(searchee, cachedSearch, progress) {
271
+ const { torznab } = getRuntimeConfig();
154
272
  if (torznab.length === 0) {
155
- throw new Error("no indexers are available");
273
+ logger.warn({
274
+ label: Label.SEARCH,
275
+ message: "no indexers are available, skipping search",
276
+ });
277
+ return [];
156
278
  }
157
- const enabledIndexers = await getEnabledIndexers();
158
- const name = searchee.name;
159
279
  const mediaType = getMediaType(searchee);
160
- // search history for name across all indexers
161
- const timestampDataSql = await db("searchee")
162
- .join("timestamp", "searchee.id", "timestamp.searchee_id")
163
- .join("indexer", "timestamp.indexer_id", "indexer.id")
164
- .whereIn("indexer.id", enabledIndexers.map((i) => i.id))
165
- .andWhere({ name })
166
- .select({
167
- indexerId: "indexer.id",
168
- firstSearched: "timestamp.first_searched",
169
- lastSearched: "timestamp.last_searched",
170
- });
171
- const indexersToUse = enabledIndexers.filter((indexer) => {
172
- const entry = timestampDataSql.find((entry) => entry.indexerId === indexer.id);
173
- return (indexerDoesSupportMediaType(mediaType, JSON.parse(indexer.categories)) &&
174
- (!entry ||
175
- ((!excludeOlder ||
176
- entry.firstSearched > nMsAgo(excludeOlder)) &&
177
- (!excludeRecentSearch ||
178
- entry.lastSearched < nMsAgo(excludeRecentSearch)))));
179
- });
180
- const timeOrCatCallout = " (filtered by category/timestamps)";
181
- logger.info({
182
- label: Label.TORZNAB,
183
- message: `(${mediaType.toUpperCase()}) Searching ${indexersToUse.length} indexers for ${name}${indexersToUse.length < enabledIndexers.length
184
- ? timeOrCatCallout
185
- : ""}`,
186
- });
187
- const searcheeIds = indexersToUse.length > 0 ? await getAvailableArrIds(searchee) : {};
188
- return await makeRequests(indexersToUse, async (indexer) => {
280
+ const { indexersToSearch, parsedMedia } = await getAndLogIndexers(searchee, cachedSearch, mediaType, progress);
281
+ const indexerCandidates = await makeRequests(indexersToSearch, async (indexer) => {
189
282
  const caps = {
190
283
  search: indexer.searchCap,
191
284
  tvSearch: indexer.tvSearchCap,
192
285
  movieSearch: indexer.movieSearchCap,
193
- tvIdSearch: JSON.parse(indexer.tvIdCaps),
194
- movieIdSearch: JSON.parse(indexer.movieIdCaps),
195
- categories: JSON.parse(indexer.categories),
286
+ tvIdSearch: indexer.tvIdCaps,
287
+ movieIdSearch: indexer.movieIdCaps,
288
+ categories: indexer.categories,
289
+ limits: indexer.limits,
196
290
  };
197
- return await createTorznabSearchQueries(searchee, searcheeIds, caps);
291
+ return await createTorznabSearchQueries(searchee, mediaType, caps, parsedMedia);
198
292
  });
293
+ return [...cachedSearch.indexerCandidates, ...indexerCandidates];
199
294
  }
200
295
  export async function syncWithDb() {
201
296
  const { torznab } = getRuntimeConfig();
202
- const dbIndexers = await db("indexer")
203
- .where({ active: true })
204
- .select({
205
- id: "id",
206
- url: "url",
207
- apikey: "apikey",
208
- active: "active",
209
- status: "status",
210
- retryAfter: "retry_after",
211
- searchCap: "search_cap",
212
- tvSearchCap: "tv_search_cap",
213
- tvIdCaps: "tv_id_caps",
214
- movieSearchCap: "movie_search_cap",
215
- movieIdCaps: "movie_id_caps",
216
- categories: "cat_caps",
217
- });
297
+ const dbIndexers = await getAllIndexers();
218
298
  const inConfigButNotInDb = torznab.filter((configIndexer) => !dbIndexers.some((dbIndexer) => dbIndexer.url === sanitizeUrl(configIndexer)));
219
299
  const inDbButNotInConfig = dbIndexers.filter((dbIndexer) => !torznab.some((configIndexer) => sanitizeUrl(configIndexer) === dbIndexer.url));
220
300
  const apikeyUpdates = dbIndexers.reduce((acc, dbIndexer) => {
@@ -255,8 +335,8 @@ export async function syncWithDb() {
255
335
  .update({ status: IndexerStatus.OK });
256
336
  });
257
337
  }
258
- export function assembleUrl(urlStr, apikey, params) {
259
- const url = new URL(urlStr);
338
+ export function assembleUrl(baseUrl, apikey, params) {
339
+ const url = new URL(baseUrl);
260
340
  const searchParams = new URLSearchParams();
261
341
  searchParams.set("apikey", apikey);
262
342
  for (const [key, value] of Object.entries(params)) {
@@ -269,7 +349,7 @@ export function assembleUrl(urlStr, apikey, params) {
269
349
  async function fetchCaps(indexer) {
270
350
  let response;
271
351
  try {
272
- response = await fetch(assembleUrl(indexer.url, indexer.apikey, { t: "caps" }));
352
+ response = await fetch(assembleUrl(indexer.url, indexer.apikey, { t: "caps" }), { signal: AbortSignal.timeout(ms("10 seconds")) });
273
353
  }
274
354
  catch (e) {
275
355
  const error = new Error(`Indexer ${indexer.url} failed to respond, check verbose logs`);
@@ -306,20 +386,12 @@ function collateOutcomes(correlators, outcomes) {
306
386
  return { rejected, fulfilled };
307
387
  }, { rejected: [], fulfilled: [] });
308
388
  }
309
- async function updateCaps(indexers) {
389
+ export async function updateCaps() {
390
+ const indexers = await getAllIndexers();
310
391
  const outcomes = await Promise.allSettled(indexers.map((indexer) => fetchCaps(indexer)));
311
392
  const { fulfilled } = collateOutcomes(indexers.map((i) => i.id), outcomes);
312
393
  for (const [indexerId, caps] of fulfilled) {
313
- await db("indexer")
314
- .where({ id: indexerId })
315
- .update({
316
- search_cap: caps.search,
317
- tv_search_cap: caps.tvSearch,
318
- movie_search_cap: caps.movieSearch,
319
- movie_id_caps: JSON.stringify(caps.movieIdSearch),
320
- tv_id_caps: JSON.stringify(caps.tvIdSearch),
321
- cat_caps: JSON.stringify(caps.categories),
322
- });
394
+ await updateIndexerCapsById(indexerId, caps);
323
395
  }
324
396
  }
325
397
  export async function validateTorznabUrls() {
@@ -336,8 +408,7 @@ export async function validateTorznabUrls() {
336
408
  }
337
409
  }
338
410
  await syncWithDb();
339
- const allIndexers = await getAllIndexers();
340
- await updateCaps(allIndexers);
411
+ await updateCaps();
341
412
  const indexersWithoutSearch = await db("indexer")
342
413
  .where({ search_cap: false, active: true })
343
414
  .select({ id: "id", url: "url" });
@@ -349,44 +420,56 @@ export async function validateTorznabUrls() {
349
420
  logger.warn("no working indexers available");
350
421
  }
351
422
  }
352
- async function makeRequests(indexers, getQueries) {
423
+ /**
424
+ * Snooze indexers based on the response headers and status code.
425
+ * specifically for a search, probably not applicable to a caps fetch.
426
+ */
427
+ async function onResponseNotOk(response, indexerId) {
428
+ const retryAfterSeconds = Number(response.headers.get("Retry-After"));
429
+ const retryAfter = !Number.isNaN(retryAfterSeconds)
430
+ ? Date.now() + ms(`${retryAfterSeconds} seconds`)
431
+ : response.status === 429
432
+ ? Date.now() + ms("1 hour")
433
+ : Date.now() + ms("10 minutes");
434
+ await updateIndexerStatus(response.status === 429
435
+ ? IndexerStatus.RATE_LIMITED
436
+ : IndexerStatus.UNKNOWN_ERROR, retryAfter, [indexerId]);
437
+ }
438
+ async function makeRequest(request) {
353
439
  const { searchTimeout } = getRuntimeConfig();
354
- const searchUrls = await Promise.all(indexers.flatMap(async (indexer) => (await getQueries(indexer)).map((query) => assembleUrl(indexer.url, indexer.apikey, query)))).then((urls) => urls.flat());
355
- searchUrls.forEach((message) => void logger.verbose({ label: Label.TORZNAB, message }));
356
- const abortControllers = searchUrls.map(() => new AbortController());
357
- if (typeof searchTimeout === "number") {
358
- setTimeout(() => {
359
- for (const abortController of abortControllers) {
360
- abortController.abort();
361
- }
362
- }, searchTimeout).unref();
363
- }
364
- const outcomes = await Promise.allSettled(searchUrls.map((url, i) => fetch(url, {
440
+ const url = assembleUrl(request.baseUrl, request.apikey, request.query);
441
+ const abortSignal = typeof searchTimeout === "number"
442
+ ? AbortSignal.timeout(searchTimeout)
443
+ : undefined;
444
+ logger.verbose({
445
+ label: Label.TORZNAB,
446
+ message: `Querying indexer ${request.indexerId} at ${request.baseUrl} with ${inspect(request.query)}`,
447
+ });
448
+ const response = await fetch(url, {
365
449
  headers: { "User-Agent": USER_AGENT },
366
- signal: abortControllers[i].signal,
367
- })
368
- .then((response) => {
369
- if (!response.ok) {
370
- const retryAfterSeconds = Number(response.headers.get("Retry-After"));
371
- if (!Number.isNaN(retryAfterSeconds)) {
372
- updateIndexerStatus(response.status === 429
373
- ? IndexerStatus.RATE_LIMITED
374
- : IndexerStatus.UNKNOWN_ERROR, Date.now() + ms(`${retryAfterSeconds} seconds`), [indexers[i].id]);
375
- }
376
- else {
377
- updateIndexerStatus(response.status === 429
378
- ? IndexerStatus.RATE_LIMITED
379
- : IndexerStatus.UNKNOWN_ERROR, response.status === 429
380
- ? Date.now() + ms("1 hour")
381
- : Date.now() + ms("10 minutes"), [indexers[i].id]);
382
- }
383
- throw new Error(`request failed with code: ${response.status}`);
384
- }
385
- return response.text();
386
- })
387
- .then(xml2js.parseStringPromise)
388
- .then(parseTorznabResults)));
389
- const { rejected, fulfilled } = collateOutcomes(indexers.map((indexer) => indexer.id), outcomes);
450
+ signal: abortSignal,
451
+ });
452
+ if (!response.ok) {
453
+ await onResponseNotOk(response, request.indexerId);
454
+ throw new Error(`request failed with code: ${response.status}`);
455
+ }
456
+ const xml = await response.text();
457
+ const torznabResults = await xml2js.parseStringPromise(xml);
458
+ return parseTorznabResults(torznabResults);
459
+ }
460
+ async function makeRequests(indexers, getQueriesForIndexer) {
461
+ const requests = [];
462
+ for (const indexer of indexers) {
463
+ const queries = await getQueriesForIndexer(indexer);
464
+ requests.push(...queries.map((query) => ({
465
+ indexerId: indexer.id,
466
+ baseUrl: indexer.url,
467
+ apikey: indexer.apikey,
468
+ query,
469
+ })));
470
+ }
471
+ const outcomes = await Promise.allSettled(requests.map(makeRequest));
472
+ const { rejected, fulfilled } = collateOutcomes(requests.map((request) => request.indexerId), outcomes);
390
473
  for (const [indexerId, reason] of rejected) {
391
474
  logger.warn(`Failed to reach ${indexers.find((i) => i.id === indexerId).url}`);
392
475
  logger.debug(reason);
@@ -396,4 +479,91 @@ async function makeRequests(indexers, getQueries) {
396
479
  candidates: results,
397
480
  }));
398
481
  }
482
+ async function getAndLogIndexers(searchee, cachedSearch, mediaType, progress) {
483
+ const { excludeRecentSearch, excludeOlder } = getRuntimeConfig();
484
+ const searcheeLog = getLogString(searchee, chalk.bold.white);
485
+ const mediaTypeLog = chalk.white(mediaType.toUpperCase());
486
+ const enabledIndexers = await getEnabledIndexers();
487
+ // search history for name across all indexers
488
+ const name = searchee.title;
489
+ const timestampDataSql = await db("searchee")
490
+ .join("timestamp", "searchee.id", "timestamp.searchee_id")
491
+ .join("indexer", "timestamp.indexer_id", "indexer.id")
492
+ .whereIn("indexer.id", enabledIndexers.map((i) => i.id))
493
+ .andWhere({ name })
494
+ .select({
495
+ indexerId: "indexer.id",
496
+ firstSearched: "timestamp.first_searched",
497
+ lastSearched: "timestamp.last_searched",
498
+ });
499
+ const skipBefore = searchee.label !== Label.WEBHOOK && excludeOlder
500
+ ? nMsAgo(excludeOlder)
501
+ : Number.NEGATIVE_INFINITY;
502
+ const skipAfter = searchee.label !== Label.WEBHOOK && excludeRecentSearch
503
+ ? nMsAgo(excludeRecentSearch)
504
+ : Number.POSITIVE_INFINITY;
505
+ const timeFilteredIndexers = enabledIndexers.filter((indexer) => {
506
+ const entry = timestampDataSql.find((entry) => entry.indexerId === indexer.id);
507
+ if (!entry)
508
+ return true;
509
+ if (entry.firstSearched && entry.firstSearched < skipBefore) {
510
+ return false;
511
+ }
512
+ if (entry.lastSearched && entry.lastSearched > skipAfter) {
513
+ return false;
514
+ }
515
+ return true;
516
+ });
517
+ const indexersToUse = timeFilteredIndexers.filter((indexer) => {
518
+ return indexerDoesSupportMediaType(mediaType, indexer.categories);
519
+ });
520
+ // Invalidate cache if searchStr or ids is different
521
+ let shouldScanArr = true;
522
+ let parsedMedia;
523
+ const searchStr = await getSearchString(searchee);
524
+ if (cachedSearch.q === searchStr) {
525
+ shouldScanArr = false;
526
+ const res = await scanAllArrsForMedia(name, mediaType);
527
+ parsedMedia = res.isOk() ? res.unwrap() : undefined;
528
+ const ids = parsedMedia?.movie ?? parsedMedia?.series;
529
+ if (!arrIdsEqual(ids, cachedSearch.ids)) {
530
+ cachedSearch.indexerCandidates.length = 0;
531
+ cachedSearch.ids = ids;
532
+ }
533
+ }
534
+ else {
535
+ cachedSearch.q = searchStr;
536
+ cachedSearch.indexerCandidates.length = 0;
537
+ cachedSearch.ids = undefined; // Don't prematurely get ids if skipping
538
+ }
539
+ const indexersToSearch = indexersToUse.filter((indexer) => {
540
+ return !cachedSearch.indexerCandidates.some((candidates) => candidates.indexerId === indexer.id);
541
+ });
542
+ const filteringCauses = [
543
+ enabledIndexers.length > timeFilteredIndexers.length && "timestamps",
544
+ timeFilteredIndexers.length > indexersToUse.length && "category",
545
+ ].filter(isTruthy);
546
+ const reasonStr = filteringCauses.length
547
+ ? ` (filtered by ${formatAsList(filteringCauses, { sort: true })})`
548
+ : "";
549
+ if (!indexersToSearch.length && !cachedSearch.indexerCandidates.length) {
550
+ cachedSearch.q = null; // Won't scan arrs for multiple skips in a row
551
+ logger.info({
552
+ label: searchee.label,
553
+ message: `${progress}Skipped searching on indexers for ${searcheeLog}${reasonStr} | MediaType: ${mediaTypeLog} | IDs: N/A`,
554
+ });
555
+ return { indexersToSearch };
556
+ }
557
+ if (shouldScanArr) {
558
+ const res = await scanAllArrsForMedia(name, mediaType);
559
+ parsedMedia = res.isOk() ? res.unwrap() : undefined;
560
+ cachedSearch.ids = parsedMedia?.movie ?? parsedMedia?.series;
561
+ }
562
+ const idsStr = cachedSearch.ids ? formatFoundIds(cachedSearch.ids) : "NONE";
563
+ logger.info({
564
+ label: searchee.label,
565
+ message: `${progress}Searching for ${searcheeLog} | MediaType: ${mediaTypeLog} | IDs: ${idsStr}`,
566
+ });
567
+ return { indexersToSearch, parsedMedia };
568
+ }
399
569
  //# sourceMappingURL=torznab.js.map