nhpdf 1.7__tar.gz → 1.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nhpdf
3
- Version: 1.7
3
+ Version: 1.8
4
4
  Summary: Download NHentai doujins as PDF
5
5
  Project-URL: Homepage, https://github.com/Aze543/NhentaiDoujinToPDFMaker
6
6
  Project-URL: Issues, https://github.com/Aze543/NhentaiDoujinToPDFMaker/issues
@@ -12,6 +12,7 @@ Classifier: Development Status :: 4 - Beta
12
12
  Classifier: Programming Language :: Python
13
13
  Requires-Python: >=3.8
14
14
  Requires-Dist: beautifulsoup4
15
+ Requires-Dist: cloudscraper
15
16
  Requires-Dist: img2pdf
16
17
  Requires-Dist: requests
17
18
  Description-Content-Type: text/markdown
@@ -4,11 +4,12 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "nhpdf"
7
- version = "1.7"
7
+ version = "1.8"
8
8
  dependencies = [
9
9
  "beautifulsoup4",
10
10
  "img2pdf",
11
11
  "requests",
12
+ "cloudscraper",
12
13
  ]
13
14
  requires-python = ">=3.8"
14
15
  authors = [
@@ -1,5 +1,5 @@
1
- import img2pdf # type: ignore
2
- import requests
1
+ import img2pdf
2
+ import cloudscraper
3
3
  from typing import NoReturn
4
4
  from bs4 import BeautifulSoup
5
5
  from concurrent.futures import ThreadPoolExecutor
@@ -15,6 +15,13 @@ from PIL import Image
15
15
  nhpdf_dir = Path.home()/"Documents"/"nhpdf"
16
16
  nhpdf_dir.mkdir(parents=True, exist_ok=True)
17
17
 
18
+ scraper = cloudscraper.create_scraper(
19
+ browser={
20
+ "browser": "chrome",
21
+ "platform": "windows",
22
+ }
23
+ )
24
+
18
25
  def loading_animation() -> NoReturn:
19
26
  global la, pages, page
20
27
  spinner = ['|', '/', '-', '\\']
@@ -33,7 +40,7 @@ def download_image(raw_url: str) -> bytes:
33
40
  page += 1
34
41
  img_code, f_type = re.search(r'/(\d+)', raw_url['data-src']).group(), re.search(r'\b(.(jpg|jpeg|png|webp|gif|tiff|svg))\b', raw_url['data-src']).group()
35
42
  url = f'https://i3.nhentai.net/galleries{img_code}/{page}{f_type}'
36
- response = requests.get(url)
43
+ response = scraper.get(url)
37
44
  if response.status_code == 200:
38
45
  content = check_alpha(response.content)
39
46
  return content
@@ -51,7 +58,7 @@ def compile_images(raw: list, name: str) -> NoReturn:
51
58
  def check_alpha(image: bytes) -> bytes:
52
59
  try:
53
60
  img2pdf.convert(image)
54
- except img2pdf.AlphaChannelError as alphaError:
61
+ except img2pdf.AlphaChannelError:
55
62
  buffered = io.BytesIO(image)
56
63
  img = Image.open(buffered)
57
64
  converted: Image = img.convert('RGB')
@@ -79,7 +86,7 @@ def start_nhpdf():
79
86
  print("\nThe code needs to be numbers (Ex: 177013) :) \n\ngoing to the next code...")
80
87
  continue
81
88
  url = f'https://nhentai.net/g/{code}/'
82
- response = requests.get(url)
89
+ response = scraper.get(url)
83
90
  try:
84
91
  soup = BeautifulSoup(response.text, "html.parser")
85
92
  pages = len(soup.find_all(class_='gallerythumb'))
@@ -1,24 +0,0 @@
1
- {
2
- "terminal.external.osxExec": "iTerm.app",
3
-
4
- "background.fullscreen": {
5
-
6
-
7
- // Local images can be dragged into the browser to quickly get the file protocol address from the address bar
8
- "images": ["file:///Users/eizen/Documents/papelnapader/+9999Aura.png"],
9
- "opacity": 0.1,
10
- "size": "cover",
11
- "position": "center",
12
- "interval": 0,
13
- "random": false
14
- },
15
- // `sidebar` and `panel` have the same config as `fullscreen`
16
- "background.sidebar": {},
17
- "background.panel": {},
18
- "background.enabled": true,
19
- "workbench.colorTheme": "dragan",
20
- "editor.fontFamily": "JetBrains Mono",
21
- "editor.fontLigatures": true,
22
- "terminal.integrated.fontLigatures.enabled": true,
23
- "terminal.integrated.fontFamily": "monospace",
24
- }
File without changes
File without changes
File without changes
File without changes
File without changes