nhpdf 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nhpdf-0.1.0/.gitignore +2 -0
- nhpdf-0.1.0/LICENSE +0 -0
- nhpdf-0.1.0/PKG-INFO +16 -0
- nhpdf-0.1.0/README.md +0 -0
- nhpdf-0.1.0/pyproject.toml +35 -0
- nhpdf-0.1.0/requirements.txt +15 -0
- nhpdf-0.1.0/src/nhpdf/__init__.py +0 -0
- nhpdf-0.1.0/src/nhpdf/nhpdf.py +80 -0
- nhpdf-0.1.0/test.py +5 -0
nhpdf-0.1.0/.gitignore
ADDED
nhpdf-0.1.0/LICENSE
ADDED
File without changes
|
nhpdf-0.1.0/PKG-INFO
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: nhpdf
|
3
|
+
Version: 0.1.0
|
4
|
+
Summary: Download NHentai doujins as PDF
|
5
|
+
Project-URL: Homepage, https://github.com/Aze543/NhentaiDoujinToPDFMaker
|
6
|
+
Project-URL: Issues, https://github.com/Aze543/NhentaiDoujinToPDFMaker/issues
|
7
|
+
Author-email: Aze543 <eizen.9258437663@gmail.com>
|
8
|
+
Maintainer-email: Aze543 <eizen.9258437663@gmail.com>
|
9
|
+
License-Expression: MIT
|
10
|
+
Keywords: goon,pdf,scraper
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
12
|
+
Classifier: Programming Language :: Python
|
13
|
+
Requires-Python: >=3.8
|
14
|
+
Requires-Dist: beautifulsoup4
|
15
|
+
Requires-Dist: img2pdf
|
16
|
+
Requires-Dist: requests
|
nhpdf-0.1.0/README.md
ADDED
File without changes
|
@@ -0,0 +1,35 @@
|
|
1
|
+
[build-system]
|
2
|
+
requires = ["hatchling"]
|
3
|
+
build-backend = "hatchling.build"
|
4
|
+
|
5
|
+
[project]
|
6
|
+
name = "nhpdf"
|
7
|
+
version = "0.1.0"
|
8
|
+
dependencies = [
|
9
|
+
"beautifulsoup4",
|
10
|
+
"img2pdf",
|
11
|
+
"requests",
|
12
|
+
]
|
13
|
+
requires-python = ">=3.8"
|
14
|
+
authors = [
|
15
|
+
{name = "Aze543", email = "eizen.9258437663@gmail.com"},
|
16
|
+
]
|
17
|
+
maintainers = [
|
18
|
+
{name = "Aze543", email = "eizen.9258437663@gmail.com"},
|
19
|
+
]
|
20
|
+
description = "Download NHentai doujins as PDF"
|
21
|
+
readme = "README.md"
|
22
|
+
license = "MIT"
|
23
|
+
license-files = ["LICEN[CS]E.*"]
|
24
|
+
keywords = ["pdf", "scraper", "goon"]
|
25
|
+
classifiers = [
|
26
|
+
"Development Status :: 4 - Beta",
|
27
|
+
"Programming Language :: Python"
|
28
|
+
]
|
29
|
+
|
30
|
+
[project.urls]
|
31
|
+
Homepage = "https://github.com/Aze543/NhentaiDoujinToPDFMaker"
|
32
|
+
Issues = "https://github.com/Aze543/NhentaiDoujinToPDFMaker/issues"
|
33
|
+
|
34
|
+
[project.scripts]
|
35
|
+
nhpdf = "nhpdf.nhpdf:start_nhpdf"
|
@@ -0,0 +1,15 @@
|
|
1
|
+
beautifulsoup4==4.13.3
|
2
|
+
certifi==2025.1.31
|
3
|
+
charset-normalizer==3.4.1
|
4
|
+
Deprecated==1.2.18
|
5
|
+
idna==3.10
|
6
|
+
img2pdf==0.6.0
|
7
|
+
lxml==5.3.2
|
8
|
+
packaging==24.2
|
9
|
+
pikepdf==9.7.0
|
10
|
+
pillow==11.1.0
|
11
|
+
requests==2.32.3
|
12
|
+
soupsieve==2.6
|
13
|
+
typing_extensions==4.13.1
|
14
|
+
urllib3==2.3.0
|
15
|
+
wrapt==1.17.2
|
File without changes
|
@@ -0,0 +1,80 @@
|
|
1
|
+
import img2pdf # type: ignore
|
2
|
+
import requests
|
3
|
+
from typing import NoReturn
|
4
|
+
from bs4 import BeautifulSoup
|
5
|
+
import re
|
6
|
+
import sys
|
7
|
+
import time
|
8
|
+
import threading
|
9
|
+
from pathlib import Path
|
10
|
+
|
11
|
+
nhpdf_dir = Path.home()/"Documents"/"nhpdf"
|
12
|
+
nhpdf_dir.mkdir(parents=True, exist_ok=True)
|
13
|
+
|
14
|
+
def loading_animation():
|
15
|
+
global la, pages, page
|
16
|
+
spinner = ['|', '/', '-', '\\']
|
17
|
+
while not la:
|
18
|
+
for frame in spinner:
|
19
|
+
sys.stdout.write(f'\rDownloading the pages ({page}/{pages})...{frame}')
|
20
|
+
sys.stdout.flush()
|
21
|
+
time.sleep(0.1)
|
22
|
+
if la:
|
23
|
+
break
|
24
|
+
|
25
|
+
def compile_images(raw: list, name: str) -> NoReturn:
|
26
|
+
global pages, page
|
27
|
+
raw_images = []
|
28
|
+
for item in raw:
|
29
|
+
page += 1
|
30
|
+
img_code, f_type = re.search(r'/(\d+)', item['data-src']).group(), re.search(r'\.([a-zA-Z0-9]+)$', item['data-src']).group()
|
31
|
+
url = f'https://i3.nhentai.net/galleries{img_code}/{page}{f_type}'
|
32
|
+
response = requests.get(url)
|
33
|
+
raw_images.append(response.content)
|
34
|
+
if page == pages:
|
35
|
+
break
|
36
|
+
nhpdf = nhpdf_dir/f"{name}.pdf"
|
37
|
+
with open(nhpdf, "wb") as file:
|
38
|
+
file.write(img2pdf.convert(raw_images))
|
39
|
+
|
40
|
+
def start_nhpdf():
|
41
|
+
global la, page, pages
|
42
|
+
if len(sys.argv) < 2:
|
43
|
+
print("Usage: nhpdf <doujin-code>\nExamples:\n1. nhpdf 566212\n2. nhpdf 566212 563102")
|
44
|
+
return
|
45
|
+
codes = sys.argv[1:]
|
46
|
+
items = len(codes)
|
47
|
+
item = 0
|
48
|
+
for code in codes:
|
49
|
+
la = False
|
50
|
+
page = 0
|
51
|
+
item += 1
|
52
|
+
try:
|
53
|
+
code = int(code)
|
54
|
+
except Exception:
|
55
|
+
print("\nThe code needs to be numbers (Ex: 177013) :) \n\ngoing to the next code...")
|
56
|
+
continue
|
57
|
+
url = f'https://nhentai.net/g/{code}/'
|
58
|
+
response = requests.get(url)
|
59
|
+
try:
|
60
|
+
soup = BeautifulSoup(response.text, "html.parser")
|
61
|
+
pages = len(soup.find_all(class_='gallerythumb'))
|
62
|
+
name = soup.find(class_='pretty').text
|
63
|
+
author = soup.find(class_='before').text
|
64
|
+
raw_data = soup.find_all(class_='lazyload')
|
65
|
+
if not author:
|
66
|
+
author = '[NAME-MISSING]: They forgot to put the author name in the website.'
|
67
|
+
except Exception:
|
68
|
+
print("\n[ERROR]: The code cannot be found in the website.\n\ngoing to the next code...")
|
69
|
+
continue
|
70
|
+
print(f"\nH-Doujin Details:\nname: {name}\nauthor: {author}\npages: {pages}\n")
|
71
|
+
function_thread = threading.Thread(target=loading_animation)
|
72
|
+
function_thread.start()
|
73
|
+
compile_images(raw_data, name)
|
74
|
+
la = True
|
75
|
+
function_thread.join()
|
76
|
+
if item != items:
|
77
|
+
print(f"\n\n{name}.pdf was successfully downloaded\n\ngoing to the next code...")
|
78
|
+
|
79
|
+
print(f"\n\nOperation was success, the pdf's was saved into the nhpdf folder in your documents folder.\n")
|
80
|
+
|