better-ani-scraped 1.0.0 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -5,8 +5,6 @@ Better-Ani-Scraped
5
5
  A set of utility functions for scraping anime data from multiple sources. This tool allows you to search for anime, retrieve information, get episodes, and more.
6
6
  <p>
7
7
  <p align="center">
8
- <a href="https://www.npmjs.com/package/ani-scraped"><img src="https://img.shields.io/npm/v/ani-scraped"></a>
9
- <a href="https://www.npmjs.com/package/ani-scraped"><img src="https://img.shields.io/npm/dw/ani-scraped"></a>
10
8
  <p>
11
9
 
12
10
  <p align="center">
@@ -45,4 +43,4 @@ Please make sure you review the websites' terms of service and obtain permission
45
43
 
46
44
 
47
45
  ### Special thanks to
48
- - [Hxpe Dev](https://github.com/hxpe_dev)
46
+ - [Hxpe Dev](https://github.com/hxpe-dev)
@@ -16,7 +16,7 @@ const main = async () => {
16
16
  ]);
17
17
  console.log("Embed Links:", embeds);
18
18
 
19
- const videoUrl = await getVideoUrlFromEmbed("sibnet", embeds[11])
19
+ const videoUrl = await getVideoUrlFromEmbed("sibnet", embeds[11].url)
20
20
  console.log("Video URL:", videoUrl);
21
21
  };
22
22
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "better-ani-scraped",
3
- "version": "1.0.0",
3
+ "version": "1.1.1",
4
4
  "description": "Scrape anime data from different sources (only anime-sama.fr for the moment)",
5
5
  "main": "index.js",
6
6
  "scripts": {
@@ -17,6 +17,7 @@
17
17
  "license": "MIT",
18
18
  "type": "module",
19
19
  "dependencies": {
20
+ "ani-scraped": "^1.2.8",
20
21
  "axios": "^1.8.4",
21
22
  "cheerio": "^1.0.0",
22
23
  "puppeteer": "^24.6.1"
@@ -124,7 +124,7 @@ export async function getSeasons(animeUrl, language = "vostfr") {
124
124
  return seasons;
125
125
  }
126
126
 
127
- async function getEpisodeTitles(animeUrl) {
127
+ export async function getEpisodeTitles(animeUrl) {
128
128
  let browser;
129
129
  try {
130
130
  browser = await puppeteer.launch({
@@ -156,51 +156,56 @@ async function getEpisodeTitles(animeUrl) {
156
156
  }
157
157
  }
158
158
 
159
- export async function getEmbed(animeUrl, hostPriority = ["sibnet", "vidmoly"]) {
160
- let res, episodesJs;
161
- try {
162
- res = await axios.get(animeUrl, {
163
- headers: getHeaders(animeUrl.split("/").slice(0, 5).join("/")),
164
- });
159
+ export async function getEmbed(animeUrl, hostPriority = ["vidmoly"]) {
160
+ const res = await axios.get(animeUrl, {
161
+ headers: getHeaders(animeUrl.split("/").slice(0, 5).join("/")),
162
+ });
163
+ const $ = cheerio.load(res.data);
165
164
 
166
- const $ = cheerio.load(res.data);
167
- const scriptTag = $('script[src*="episodes.js"]').attr("src");
165
+ const scriptTag = $('script[src*="episodes.js"]').attr("src");
166
+ if (!scriptTag) throw new Error("No episodes script found");
168
167
 
169
- if (!scriptTag) throw new Error("No episodes script found");
170
- const scriptUrl = animeUrl.endsWith("/") ? animeUrl + scriptTag : animeUrl + "/" + scriptTag;
171
-
172
- episodesJs = await axios.get(scriptUrl, { headers: getHeaders(animeUrl) }).then(r => r.data);
168
+ const scriptUrl = animeUrl.endsWith("/")
169
+ ? animeUrl + scriptTag
170
+ : animeUrl + "/" + scriptTag;
173
171
 
174
- const match = episodesJs.match(/var\s+eps\d+\s*=\s*(\[[^\]]+\])/);
175
- if (!match) throw new Error("No episode array found");
172
+ const episodesJs = await axios
173
+ .get(scriptUrl, { headers: getHeaders(animeUrl) })
174
+ .then((r) => r.data);
176
175
 
177
- const arrayString = match[1];
178
- let links = [];
176
+ const matches = [
177
+ ...episodesJs.matchAll(/var\s+(eps\d+)\s*=\s*(\[[^\]]+\])/g),
178
+ ];
179
+ if (!matches.length) throw new Error("No episode arrays found");
179
180
 
180
- try {
181
- links = eval(arrayString);
182
- } catch (e) {
183
- console.warn("Could not parse episode links array:", e);
184
- }
185
- const titles = await getEpisodeTitles(animeUrl);
186
-
187
- const results = titles.slice(0, links.length).map((title, i) => ({
188
- title,
189
- url: [links[i]]
190
- }));
191
- for (const host of hostPriority) {
192
- const filtered = results.filter(ep =>
193
- ep.url.some(link => link.includes(host))
194
- );
195
- if (filtered.length) return filtered;
196
- }
197
- return results;
198
- } catch (error) {
199
- console.error('Erreur lors de la récupération des données d\'épisodes:', error);
200
- return [];
181
+ let allEmbeds = [];
182
+
183
+ for (const [, , arrayString] of matches) {
184
+ try {
185
+ const links = eval(arrayString);
186
+ allEmbeds.push(...links);
187
+ } catch (e) {
188
+ console.warn("Could not parse embed array:", e);
189
+ }
190
+ }
191
+ for (const host of hostPriority) {
192
+ const filtered = allEmbeds.filter((url) => url.includes(host));
193
+ if (filtered.length) {
194
+ const titles = await getEpisodeTitles(animeUrl);
195
+ return titles.slice(0, filtered.length).map((title, i) => ({
196
+ title,
197
+ url: filtered[i]
198
+ }));
201
199
  }
200
+ }
201
+ const titles = await getEpisodeTitles(animeUrl);
202
+ return titles.slice(0, allEmbeds.length).map((title, i) => ({
203
+ title,
204
+ url: allEmbeds[i]
205
+ }));
202
206
  }
203
207
 
208
+
204
209
  export async function getAnimeInfo(animeUrl) {
205
210
  const res = await axios.get(animeUrl, { headers: getHeaders(CATALOGUE_URL) });
206
211
  const $ = cheerio.load(res.data);
@@ -83,4 +83,13 @@ export class AnimeScraper {
83
83
  return null;
84
84
  }
85
85
  }
86
+
87
+ async getEpisodeTitles(animeUrl) {
88
+ try {
89
+ return await this.source.getEpisodeTitles(animeUrl);
90
+ } catch (error) {
91
+ console.error(`This scraper does not have the getRandomAnime function implemented or an error happened -> ${error}`);
92
+ return null;
93
+ }
94
+ }
86
95
  }