better-ani-scraped 1.5.1 → 1.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/DOCUMENTATION.md CHANGED
@@ -27,7 +27,7 @@ const crunchyroll = new AnimeScraper('crunchyroll') //for Crunchyroll
27
27
 
28
28
  ## `AnimeScraper("animesama")` methods
29
29
 
30
- - [searchAnime](#animesamasearchanimequery-limit--10-wantedlanguages--vostfr-vf-vastfr-wantedtypes--anime-film)
30
+ - [searchAnime](#animesamasearchanimequery-limit--10-wantedlanguages--vostfr-vf-vastfr-wantedtypes--anime-film-page--null)
31
31
  - [getSeasons](#animesamagetseasonsanimeurl-language--vostfr)
32
32
  - [getEpisodeTitles](#animesamagetepisodetitlesseasonurl-customchromiumpath)
33
33
  - [getEmbed](#animesamagetembedseasonurl-hostpriority--sibnet-vidmoly)
@@ -37,7 +37,7 @@ const crunchyroll = new AnimeScraper('crunchyroll') //for Crunchyroll
37
37
  - [getLatestEpisodes](#animesamagetlatestepisodeslanguagefilter--null)
38
38
  - [getRandomAnime](#animesamagetrandomanimewantedlanguages--vostfr-vf-vastfr-wantedtypes--anime-film-maxattempts--null-attempt--0)
39
39
 
40
- ### `animesama.searchAnime(query, limit = 10, wantedLanguages = ["vostfr", "vf", "vastfr"], wantedTypes = ["Anime", "Film"])`
40
+ ### `animesama.searchAnime(query, limit = 10, wantedLanguages = ["vostfr", "vf", "vastfr"], wantedTypes = ["Anime", "Film"], page = null)`
41
41
  Searches for anime titles that match the given query.
42
42
 
43
43
  - **Parameters:**
@@ -45,6 +45,7 @@ Searches for anime titles that match the given query.
45
45
  - `limit` *(number)*: Maximum number of results to return (default: 10).
46
46
  - `wantedLanguages` *(string[])*: Array of wanted languages.
47
47
  - `wantedTypes` *(string[])*: Array of wanted types.
48
+ - `page` *(number)*: The catalog page number.
48
49
  - **Returns:**
49
50
  An array of anime objects:
50
51
  ```js
@@ -3,7 +3,7 @@ import { AnimeScraper } from "../../index.js"; // REPLACE BY "from 'better-ani-s
3
3
  const main = async () => {
4
4
  const animesama = new AnimeScraper('animesama');
5
5
 
6
- const search = await animesama.searchAnime("86", 3, ["vostfr", "vf", "vastfr"], ["Anime", "Film"]);
6
+ const search = await animesama.searchAnime("a", 100, ["vostfr", "vf", "vastfr"], ["Anime", "Film"], 2);
7
7
  console.log("Search Results:", search);
8
8
  };
9
9
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "better-ani-scraped",
3
- "version": "1.5.1",
3
+ "version": "1.5.2",
4
4
  "description": "Scrape anime data from different sources (only anime-sama.fr for the moment)",
5
5
  "main": "index.js",
6
6
  "scripts": {
@@ -47,67 +47,73 @@ export async function searchAnime(
47
47
  query,
48
48
  limit = 10,
49
49
  wantedLanguages = ["vostfr", "vf", "vastfr"],
50
- wantedTypes = ["Anime", "Film"]
50
+ wantedTypes = ["Anime", "Film"],
51
+ page = null
51
52
  ) {
52
- const url = `${CATALOGUE_URL}/?search=${encodeURIComponent(
53
- query
54
- )}`;
55
53
  const isWanted = (text, list) =>
56
54
  list.some(item => text.toLowerCase().includes(item.toLowerCase()));
57
- const res = await axios.get(url, { headers: getHeaders(CATALOGUE_URL) });
58
- const $ = cheerio.load(res.data);
55
+
59
56
  const results = [];
60
57
 
61
- $("a.flex.divide-x").each((i, el) => {
62
- if (i >= limit) return false;
58
+ const fetchPage = async (pageNum) => {
59
+ const url =
60
+ pageNum === 1
61
+ ? `${CATALOGUE_URL}/?search=${encodeURIComponent(query)}`
62
+ : `${CATALOGUE_URL}/?search=${encodeURIComponent(query)}&page=${pageNum}`;
63
63
 
64
- const anchor = $(el);
65
- const link = anchor.attr("href");
66
- const title = anchor.find("h1").first().text().trim();
67
- const altRaw = anchor
68
- .find("p.text-xs.opacity-40.italic")
69
- .first()
70
- .text()
71
- .trim();
72
- const cover = anchor.find("img").first().attr("src");
64
+ const res = await axios.get(url, { headers: getHeaders(CATALOGUE_URL) });
65
+ const $ = cheerio.load(res.data);
73
66
 
74
- const tagText = anchor.find("p").filter((_, p) =>
75
- isWanted($(p).text(), wantedTypes)
76
- ).first().text();
67
+ const containers = $("a.flex.divide-x");
77
68
 
78
- const languageText = anchor.find("p").filter((_, p) =>
79
- isWanted($(p).text(), wantedLanguages)
80
- ).first().text();
69
+ containers.each((_, el) => {
70
+ if (results.length >= limit) return false;
81
71
 
82
- const altTitles = altRaw
83
- ? altRaw
84
- .split(",")
85
- .map((t) => t.trim())
86
- .filter(Boolean)
87
- : [];
72
+ const anchor = $(el);
73
+ const link = anchor.attr("href");
74
+ const title = anchor.find("h1").first().text().trim();
75
+ const altRaw = anchor.find("p.text-xs.opacity-40.italic").first().text().trim();
76
+ const cover = anchor.find("img").first().attr("src");
88
77
 
89
- const genreRaw = anchor
90
- .find("p.text-xs.font-medium.text-gray-300")
91
- .first()
92
- .text()
93
- .trim();
94
- const genres = genreRaw
95
- ? genreRaw
96
- .split(",")
97
- .map((g) => g.trim())
98
- .filter(Boolean)
99
- : [];
78
+ const tagText = anchor.find("p").filter((_, p) =>
79
+ isWanted($(p).text(), wantedTypes)
80
+ ).first().text();
100
81
 
101
- if (title && link && tagText && languageText) {
102
- results.push({
103
- title,
104
- altTitles,
105
- genres,
106
- url: link.startsWith("http") ? link : `${CATALOGUE_URL}${link}`,
107
- cover,
108
- });
82
+ const languageText = anchor.find("p").filter((_, p) =>
83
+ isWanted($(p).text(), wantedLanguages)
84
+ ).first().text();
85
+
86
+ const altTitles = altRaw
87
+ ? altRaw.split(",").map((t) => t.trim()).filter(Boolean)
88
+ : [];
89
+
90
+ const genreRaw = anchor.find("p.text-xs.font-medium.text-gray-300").first().text().trim();
91
+ const genres = genreRaw
92
+ ? genreRaw.split(",").map((g) => g.trim()).filter(Boolean)
93
+ : [];
94
+
95
+ if (title && link && tagText && languageText) {
96
+ results.push({
97
+ title,
98
+ altTitles,
99
+ genres,
100
+ url: link.startsWith("http") ? link : `${CATALOGUE_URL}${link}`,
101
+ cover,
102
+ });
103
+ }
104
+ });
105
+
106
+ return containers.length > 0;
107
+ };
108
+
109
+ if (page) {
110
+ await fetchPage(page);
111
+ } else {
112
+ let currentPage = 1;
113
+ while (await fetchPage(currentPage++) && results.length < limit) {
114
+ await new Promise((res) => setTimeout(res, 300));
109
115
  }
110
- });
116
+ }
111
117
 
112
118
  return results;
113
119
  }