LuminaScan 1.0.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminascan-1.0.6/LICENSE +21 -0
- luminascan-1.0.6/LuminaScan/__init__.py +0 -0
- luminascan-1.0.6/LuminaScan/main.py +298 -0
- luminascan-1.0.6/LuminaScan.egg-info/PKG-INFO +29 -0
- luminascan-1.0.6/LuminaScan.egg-info/SOURCES.txt +11 -0
- luminascan-1.0.6/LuminaScan.egg-info/dependency_links.txt +1 -0
- luminascan-1.0.6/LuminaScan.egg-info/entry_points.txt +2 -0
- luminascan-1.0.6/LuminaScan.egg-info/requires.txt +3 -0
- luminascan-1.0.6/LuminaScan.egg-info/top_level.txt +1 -0
- luminascan-1.0.6/PKG-INFO +29 -0
- luminascan-1.0.6/README.md +6 -0
- luminascan-1.0.6/setup.cfg +4 -0
- luminascan-1.0.6/setup.py +25 -0
luminascan-1.0.6/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 marlenelopestolfo2018-cmd
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
File without changes
|
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import sys
|
|
3
|
+
import requests
|
|
4
|
+
import pyfiglet
|
|
5
|
+
import colorama
|
|
6
|
+
from colorama import Fore, Style
|
|
7
|
+
import json
|
|
8
|
+
import argparse
|
|
9
|
+
import time
|
|
10
|
+
from requests.exceptions import (
|
|
11
|
+
MissingSchema,
|
|
12
|
+
InvalidSchema,
|
|
13
|
+
InvalidURL,
|
|
14
|
+
ConnectionError,
|
|
15
|
+
Timeout,
|
|
16
|
+
RequestException
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
# fingerprint detection function
|
|
20
|
+
def detectar_fingerprint(headers):
|
|
21
|
+
h = {k.lower(): v.lower() for k, v in headers.items()}
|
|
22
|
+
resultado = []
|
|
23
|
+
|
|
24
|
+
fingerprints = {
|
|
25
|
+
"Cloudflare": ["cf-ray", "cf-cache-status", ("server", "cloudflare"), "cf-connecting-ip"],
|
|
26
|
+
"AWS (CloudFront / S3)": ["x-amz-cf-id", "x-amz-cf-pop", ("via", "amazon"), ("server", "amazons3")],
|
|
27
|
+
"Google Cloud": ["x-cloud-trace-context", ("server", "google"), ("via", "google")],
|
|
28
|
+
"Azure": ["x-azure-ref", "arr-cookie", ("server", "microsoft-iis")],
|
|
29
|
+
"Fastly": ["x-served-by", ("via", "fastly"), ("x-cache", "hit")],
|
|
30
|
+
"Vercel": ["x-vercel-id", ("server", "vercel")],
|
|
31
|
+
"Netlify": ["x-nf-request-id", ("server", "netlify")]
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
for nome, sinais in fingerprints.items():
|
|
35
|
+
score = 0
|
|
36
|
+
for sinal in sinais:
|
|
37
|
+
if isinstance(sinal, tuple):
|
|
38
|
+
chave, valor = sinal
|
|
39
|
+
if chave in h and valor in h[chave]:
|
|
40
|
+
score += 1
|
|
41
|
+
else:
|
|
42
|
+
if sinal in h:
|
|
43
|
+
score += 1
|
|
44
|
+
if score >= 2:
|
|
45
|
+
confianca = "High"
|
|
46
|
+
elif score == 1:
|
|
47
|
+
confianca = "Average"
|
|
48
|
+
else:
|
|
49
|
+
continue
|
|
50
|
+
resultado.append(f"{nome} (Trust {confianca})")
|
|
51
|
+
|
|
52
|
+
return resultado if resultado else ["Unidentified"]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def analisar_security_headers(headers):
|
|
56
|
+
security_headers = {
|
|
57
|
+
"content-security-policy", "strict-transport-security", "x-content-type-options",
|
|
58
|
+
"x-frame-options", "x-xss-protection", "referrer-policy", "permissions-policy",
|
|
59
|
+
"cross-origin-resource-policy", "cross-origin-opener-policy", "cross-origin-embedder-policy"
|
|
60
|
+
}
|
|
61
|
+
encontrados = []
|
|
62
|
+
headers_lower = {k.lower(): v for k, v in headers.items()}
|
|
63
|
+
for sec in security_headers:
|
|
64
|
+
if sec in headers_lower:
|
|
65
|
+
encontrados.append(sec)
|
|
66
|
+
total = len(encontrados)
|
|
67
|
+
if total <= 1:
|
|
68
|
+
nivel = "Low"
|
|
69
|
+
elif total <= 3:
|
|
70
|
+
nivel = "Medium"
|
|
71
|
+
else:
|
|
72
|
+
nivel = "High"
|
|
73
|
+
return {"Found": encontrados, "Total": total, "Security Level": nivel}
|
|
74
|
+
|
|
75
|
+
# Main function
|
|
76
|
+
def main():
|
|
77
|
+
parser = argparse.ArgumentParser(
|
|
78
|
+
description="LuminaScan - Simple HTTP Analyzer"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
parser.add_argument("url", help="Target URL")
|
|
83
|
+
parser.add_argument("--cookies", action="store_true", help="Show cookies")
|
|
84
|
+
parser.add_argument("--headers", action="store_true", help="Show headers")
|
|
85
|
+
parser.add_argument("--fingerprint", action="store_true", help="Detect CDN/Fingerprint")
|
|
86
|
+
parser.add_argument("--security", action="store_true", help="Analyze security headers")
|
|
87
|
+
parser.add_argument("--json", action="store_true", help="Show JSON response")
|
|
88
|
+
parser.add_argument("--redirects", action="store_true", help="Show redirects info")
|
|
89
|
+
parser.add_argument("--status", action="store_true", help="Show status code")
|
|
90
|
+
parser.add_argument("--content", action="store_true", help="Determine if API or Website")
|
|
91
|
+
parser.add_argument("--all", action="store_true", help="Show all information")
|
|
92
|
+
parser.add_argument("--method", choices=["get", "post", "put", "delete"], default="get", help="HTTP method to use (default: get)")
|
|
93
|
+
parser.add_argument("--timeout", default="easy", help="Set delay between requests: easy (1s), medium (3s), hard (5s)")
|
|
94
|
+
parser.add_argument("--data", help="JSON payload for POST/PUT requests")
|
|
95
|
+
parser.add_argument("--http-complete", action="store_true", help="Show status for all payloads")
|
|
96
|
+
parser.add_argument("--banner", action="store_true", help="Show LuminaScan Banner")
|
|
97
|
+
args = parser.parse_args()
|
|
98
|
+
|
|
99
|
+
# tempo
|
|
100
|
+
val = args.timeout.lower()
|
|
101
|
+
try:
|
|
102
|
+
if val == "easy":
|
|
103
|
+
delay = 1
|
|
104
|
+
elif val == "medium":
|
|
105
|
+
delay = 3
|
|
106
|
+
elif val == "hard":
|
|
107
|
+
delay = 5
|
|
108
|
+
else:
|
|
109
|
+
delay = 1
|
|
110
|
+
except:
|
|
111
|
+
print("Invalid timeout value, using default of 1 second.")
|
|
112
|
+
delay = 1
|
|
113
|
+
|
|
114
|
+
url = args.url
|
|
115
|
+
if not url.startswith(("http://", "https://")):
|
|
116
|
+
url = "http://" + url
|
|
117
|
+
|
|
118
|
+
# User-Agent
|
|
119
|
+
ninja = {
|
|
120
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)",
|
|
121
|
+
"Accept": "*/*",
|
|
122
|
+
"Accept-Language": "en-US,en;q=0.5",
|
|
123
|
+
"Accept-Encoding": "gzip, deflate",
|
|
124
|
+
"Connection": "keep-alive"
|
|
125
|
+
}
|
|
126
|
+
# Banner
|
|
127
|
+
if args.banner:
|
|
128
|
+
alien = pyfiglet.figlet_format("LuminaScan", font="smslant")
|
|
129
|
+
print(Fore.GREEN + alien + Style.RESET_ALL)
|
|
130
|
+
|
|
131
|
+
# All flags
|
|
132
|
+
if args.all:
|
|
133
|
+
args.cookies = args.headers = args.fingerprint = args.json = args.redirects = args.status = args.security = args.content = True
|
|
134
|
+
|
|
135
|
+
# HTTP Method with payload input
|
|
136
|
+
payload = {}
|
|
137
|
+
|
|
138
|
+
if args.method.lower() in ["post", "put"]:
|
|
139
|
+
if args.data:
|
|
140
|
+
try:
|
|
141
|
+
payload = json.loads(args.data)
|
|
142
|
+
except json.JSONDecodeError:
|
|
143
|
+
print("Invalid JSON in --data, using empty payload {}")
|
|
144
|
+
payload = {}
|
|
145
|
+
else:
|
|
146
|
+
user_input = input('Enter JSON payload (e.g. {"key":"value"}): ').strip()
|
|
147
|
+
if user_input:
|
|
148
|
+
try:
|
|
149
|
+
payload = json.loads(user_input)
|
|
150
|
+
except json.JSONDecodeError:
|
|
151
|
+
print("Invalid JSON, using empty payload {}")
|
|
152
|
+
payload = {}
|
|
153
|
+
|
|
154
|
+
# Requests
|
|
155
|
+
try:
|
|
156
|
+
time.sleep(delay)
|
|
157
|
+
|
|
158
|
+
if args.method.lower() == "get":
|
|
159
|
+
response = requests.get(url, headers=ninja, timeout=4)
|
|
160
|
+
elif args.method.lower() == "post":
|
|
161
|
+
response = requests.post(url, json=payload, headers=ninja, timeout=4)
|
|
162
|
+
elif args.method.lower() == "put":
|
|
163
|
+
response = requests.put(url, json=payload, headers=ninja, timeout=4)
|
|
164
|
+
elif args.method.lower() == "delete":
|
|
165
|
+
response = requests.delete(url, headers=ninja, timeout=4)
|
|
166
|
+
|
|
167
|
+
except MissingSchema:
|
|
168
|
+
print("[ERROR] Invalid URL format. Please include http:// or https://")
|
|
169
|
+
exit(1)
|
|
170
|
+
except InvalidURL:
|
|
171
|
+
print("[ERROR] Malformed URL. Check the target address.")
|
|
172
|
+
exit(1)
|
|
173
|
+
except ConnectionError:
|
|
174
|
+
print("[ERROR] Connection failed. Host unreachable or offline.")
|
|
175
|
+
exit(1)
|
|
176
|
+
except Timeout:
|
|
177
|
+
print("[ERROR] Request timeout. The server did not respond in time.")
|
|
178
|
+
exit(1)
|
|
179
|
+
except RequestException as e:
|
|
180
|
+
print(f"[ERROR] Unexpected request error: {e}")
|
|
181
|
+
exit(1)
|
|
182
|
+
|
|
183
|
+
print(f"\n-------HTTP METHOD SUMMARY: {args.method.upper()}-------")
|
|
184
|
+
print(f"Status: {response.status_code}")
|
|
185
|
+
print(f"Response Time: {response.elapsed.total_seconds()}s\n")
|
|
186
|
+
|
|
187
|
+
# Payload loop guru-friendly
|
|
188
|
+
results = []
|
|
189
|
+
try:
|
|
190
|
+
payloads = payload if isinstance(payload, list) else [payload]
|
|
191
|
+
|
|
192
|
+
for i, pay in enumerate(payloads, start=1):
|
|
193
|
+
try:
|
|
194
|
+
r = requests.post(url, json=pay, timeout=delay)
|
|
195
|
+
status = r.status_code
|
|
196
|
+
except requests.exceptions.Timeout:
|
|
197
|
+
status = "TIMEOUT"
|
|
198
|
+
except requests.exceptions.RequestException:
|
|
199
|
+
status = "ERROR"
|
|
200
|
+
|
|
201
|
+
results.append((i, status))
|
|
202
|
+
|
|
203
|
+
if args.http_complete:
|
|
204
|
+
print(f"Payload {i}: Status {status}")
|
|
205
|
+
except KeyboardInterrupt:
|
|
206
|
+
print("\n[!] KeyboardInterrupt detected. Exiting...")
|
|
207
|
+
sys.exit(0)
|
|
208
|
+
|
|
209
|
+
# Cookies
|
|
210
|
+
if args.cookies:
|
|
211
|
+
time.sleep(delay)
|
|
212
|
+
print(f"\n---------COOKIES---------")
|
|
213
|
+
for c in response.cookies:
|
|
214
|
+
print(f"{c.name} = {c.value}")
|
|
215
|
+
print("\nAll cookies:", response.cookies)
|
|
216
|
+
print(f"[*] Number of cookies: {len(response.cookies)}")
|
|
217
|
+
print(f"[!] Time to receive cookies: {response.elapsed.total_seconds()} seconds")
|
|
218
|
+
|
|
219
|
+
# Status
|
|
220
|
+
if args.status:
|
|
221
|
+
time.sleep(delay)
|
|
222
|
+
print("\n---------STATUS---------")
|
|
223
|
+
try:
|
|
224
|
+
if response.status_code == 200:
|
|
225
|
+
print("[+] Active site/API - 200")
|
|
226
|
+
elif response.status_code == 404:
|
|
227
|
+
print("[!] Site/API Not Found - 404")
|
|
228
|
+
elif response.status_code == 429:
|
|
229
|
+
print("[-] Too Many Requests - 429")
|
|
230
|
+
elif response.status_code == 403:
|
|
231
|
+
print("[$] Site Blocked - 403")
|
|
232
|
+
else:
|
|
233
|
+
print("Unknown status error!!")
|
|
234
|
+
except Exception as e:
|
|
235
|
+
print(f"Error checking status: {e}")
|
|
236
|
+
print(f"[-] Time to receive response: {response.elapsed.total_seconds()} seconds")
|
|
237
|
+
|
|
238
|
+
# Headers
|
|
239
|
+
if args.headers:
|
|
240
|
+
time.sleep(delay)
|
|
241
|
+
print("\n------------HEADERS------------")
|
|
242
|
+
for chave, valor in response.headers.items():
|
|
243
|
+
print(f"[*] Key: {chave}")
|
|
244
|
+
print(f"[!] Value: {valor}")
|
|
245
|
+
print(f"[#] Number of Headers: {len(response.headers)}")
|
|
246
|
+
|
|
247
|
+
# Fingerprint
|
|
248
|
+
if args.fingerprint:
|
|
249
|
+
time.sleep(delay)
|
|
250
|
+
print("\n---------FINGERPRINT---------")
|
|
251
|
+
print(f"Possible CDN/Fingerprint: {', '.join(detectar_fingerprint(response.headers))}")
|
|
252
|
+
|
|
253
|
+
# JSON
|
|
254
|
+
if args.json:
|
|
255
|
+
time.sleep(delay)
|
|
256
|
+
print("\n--------------JSON-----------------")
|
|
257
|
+
try:
|
|
258
|
+
print(json.dumps(response.json(), indent=4))
|
|
259
|
+
except ValueError:
|
|
260
|
+
print("No JSON response found.")
|
|
261
|
+
|
|
262
|
+
# Redirects
|
|
263
|
+
if args.redirects:
|
|
264
|
+
time.sleep(delay)
|
|
265
|
+
print("\n-----------REDIRECT---------------")
|
|
266
|
+
print(f"Final URL after redirects: {response.url}")
|
|
267
|
+
print(f"Number of redirects: {len(response.history)}")
|
|
268
|
+
for resp in response.history:
|
|
269
|
+
print(f"Redirected from {resp.url} with status {resp.status_code}")
|
|
270
|
+
|
|
271
|
+
# Content type
|
|
272
|
+
if args.content:
|
|
273
|
+
time.sleep(delay)
|
|
274
|
+
print("\n----------API OR SITE---------------")
|
|
275
|
+
content_type = response.headers.get("Content-Type", "").lower()
|
|
276
|
+
if "application/json" in content_type:
|
|
277
|
+
print("[+] API detected (JSON response)")
|
|
278
|
+
elif "text/html" in content_type:
|
|
279
|
+
print("[+] Website detected (HTML)")
|
|
280
|
+
else:
|
|
281
|
+
print("[?] Unknown content type")
|
|
282
|
+
|
|
283
|
+
# Security headers
|
|
284
|
+
if args.security:
|
|
285
|
+
time.sleep(delay)
|
|
286
|
+
print("\n-------SECURITY HEADERS-------")
|
|
287
|
+
time.sleep(delay)
|
|
288
|
+
print(analisar_security_headers(response.headers))
|
|
289
|
+
|
|
290
|
+
print(f"\n[~] Avg response time: {response.elapsed.total_seconds()}s")
|
|
291
|
+
print("-----------------------------------")
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
# Entry point
|
|
297
|
+
if __name__ == "__main__":
|
|
298
|
+
main()
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: LuminaScan
|
|
3
|
+
Version: 1.0.6
|
|
4
|
+
Summary: Tool to analyze website and APIs via HTTP requests
|
|
5
|
+
Home-page: https://github.com/PixelPirate-bit/LuminaScan
|
|
6
|
+
Author: PixelPirate-bit-2
|
|
7
|
+
License: MIT
|
|
8
|
+
Requires-Python: >=3.8
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Requires-Dist: requests>=2.25.1
|
|
12
|
+
Requires-Dist: pyfiglet>=0.8.post1
|
|
13
|
+
Requires-Dist: colorama>=0.4.4
|
|
14
|
+
Dynamic: author
|
|
15
|
+
Dynamic: description
|
|
16
|
+
Dynamic: description-content-type
|
|
17
|
+
Dynamic: home-page
|
|
18
|
+
Dynamic: license
|
|
19
|
+
Dynamic: license-file
|
|
20
|
+
Dynamic: requires-dist
|
|
21
|
+
Dynamic: requires-python
|
|
22
|
+
Dynamic: summary
|
|
23
|
+
|
|
24
|
+
LuminaScan
|
|
25
|
+
|
|
26
|
+
LuminaScan is a Python tool created to analyze websites and APIs in a fast and practical way. It allows you to check headers, status codes, cookies, JSON responses, and much more, all directly from the terminal.
|
|
27
|
+
|
|
28
|
+
Installation
|
|
29
|
+
pip install luminascan
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
setup.py
|
|
4
|
+
LuminaScan/__init__.py
|
|
5
|
+
LuminaScan/main.py
|
|
6
|
+
LuminaScan.egg-info/PKG-INFO
|
|
7
|
+
LuminaScan.egg-info/SOURCES.txt
|
|
8
|
+
LuminaScan.egg-info/dependency_links.txt
|
|
9
|
+
LuminaScan.egg-info/entry_points.txt
|
|
10
|
+
LuminaScan.egg-info/requires.txt
|
|
11
|
+
LuminaScan.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
LuminaScan
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: LuminaScan
|
|
3
|
+
Version: 1.0.6
|
|
4
|
+
Summary: Tool to analyze website and APIs via HTTP requests
|
|
5
|
+
Home-page: https://github.com/PixelPirate-bit/LuminaScan
|
|
6
|
+
Author: PixelPirate-bit-2
|
|
7
|
+
License: MIT
|
|
8
|
+
Requires-Python: >=3.8
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Requires-Dist: requests>=2.25.1
|
|
12
|
+
Requires-Dist: pyfiglet>=0.8.post1
|
|
13
|
+
Requires-Dist: colorama>=0.4.4
|
|
14
|
+
Dynamic: author
|
|
15
|
+
Dynamic: description
|
|
16
|
+
Dynamic: description-content-type
|
|
17
|
+
Dynamic: home-page
|
|
18
|
+
Dynamic: license
|
|
19
|
+
Dynamic: license-file
|
|
20
|
+
Dynamic: requires-dist
|
|
21
|
+
Dynamic: requires-python
|
|
22
|
+
Dynamic: summary
|
|
23
|
+
|
|
24
|
+
LuminaScan
|
|
25
|
+
|
|
26
|
+
LuminaScan is a Python tool created to analyze websites and APIs in a fast and practical way. It allows you to check headers, status codes, cookies, JSON responses, and much more, all directly from the terminal.
|
|
27
|
+
|
|
28
|
+
Installation
|
|
29
|
+
pip install luminascan
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from setuptools import setup, find_packages
|
|
2
|
+
|
|
3
|
+
setup(
|
|
4
|
+
name="LuminaScan",
|
|
5
|
+
version="1.0.6",
|
|
6
|
+
author="PixelPirate-bit-2",
|
|
7
|
+
description="Tool to analyze website and APIs via HTTP requests",
|
|
8
|
+
long_description=open("README.md", encoding="utf-8").read(),
|
|
9
|
+
long_description_content_type="text/markdown",
|
|
10
|
+
url="https://github.com/PixelPirate-bit/LuminaScan",
|
|
11
|
+
license="MIT",
|
|
12
|
+
packages=find_packages(),
|
|
13
|
+
install_requires=[
|
|
14
|
+
"requests>=2.25.1",
|
|
15
|
+
"pyfiglet>=0.8.post1",
|
|
16
|
+
"colorama>=0.4.4"
|
|
17
|
+
],
|
|
18
|
+
entry_points={
|
|
19
|
+
"console_scripts": [
|
|
20
|
+
"luminascan=LuminaScan.main:main"
|
|
21
|
+
]
|
|
22
|
+
},
|
|
23
|
+
python_requires='>=3.8',
|
|
24
|
+
|
|
25
|
+
)
|