cs2tracker 2.1.8__py3-none-any.whl → 2.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cs2tracker might be problematic. Click here for more details.
- cs2tracker/_version.py +2 -2
- cs2tracker/application.py +349 -86
- cs2tracker/background_task.py +109 -0
- cs2tracker/constants.py +32 -55
- cs2tracker/data/config.ini +155 -156
- cs2tracker/discord_notifier.py +87 -0
- cs2tracker/price_logs.py +100 -0
- cs2tracker/scraper.py +105 -355
- cs2tracker/validated_config.py +117 -0
- {cs2tracker-2.1.8.dist-info → cs2tracker-2.1.10.dist-info}/METADATA +7 -4
- cs2tracker-2.1.10.dist-info/RECORD +20 -0
- cs2tracker-2.1.8.dist-info/RECORD +0 -16
- {cs2tracker-2.1.8.dist-info → cs2tracker-2.1.10.dist-info}/WHEEL +0 -0
- {cs2tracker-2.1.8.dist-info → cs2tracker-2.1.10.dist-info}/entry_points.txt +0 -0
- {cs2tracker-2.1.8.dist-info → cs2tracker-2.1.10.dist-info}/licenses/LICENSE.md +0 -0
- {cs2tracker-2.1.8.dist-info → cs2tracker-2.1.10.dist-info}/top_level.txt +0 -0
cs2tracker/scraper.py
CHANGED
|
@@ -1,9 +1,4 @@
|
|
|
1
|
-
import csv
|
|
2
|
-
import os
|
|
3
1
|
import time
|
|
4
|
-
from configparser import ConfigParser
|
|
5
|
-
from datetime import datetime
|
|
6
|
-
from subprocess import DEVNULL, call
|
|
7
2
|
from urllib.parse import unquote
|
|
8
3
|
|
|
9
4
|
from bs4 import BeautifulSoup
|
|
@@ -13,22 +8,11 @@ from requests import RequestException, Session
|
|
|
13
8
|
from requests.adapters import HTTPAdapter, Retry
|
|
14
9
|
from tenacity import RetryError, retry, stop_after_attempt
|
|
15
10
|
|
|
16
|
-
from cs2tracker.constants import
|
|
17
|
-
|
|
18
|
-
BANNER,
|
|
19
|
-
BATCH_FILE,
|
|
20
|
-
CAPSULE_INFO,
|
|
21
|
-
CASE_HREFS,
|
|
22
|
-
CASE_PAGES,
|
|
23
|
-
CONFIG_FILE,
|
|
24
|
-
OS,
|
|
25
|
-
OUTPUT_FILE,
|
|
26
|
-
PROJECT_DIR,
|
|
27
|
-
PYTHON_EXECUTABLE,
|
|
28
|
-
RUNNING_IN_EXE,
|
|
29
|
-
OSType,
|
|
30
|
-
)
|
|
11
|
+
from cs2tracker.constants import AUTHOR_STRING, BANNER, CAPSULE_INFO, CASE_HREFS
|
|
12
|
+
from cs2tracker.discord_notifier import DiscordNotifier
|
|
31
13
|
from cs2tracker.padded_console import PaddedConsole
|
|
14
|
+
from cs2tracker.price_logs import PriceLogs
|
|
15
|
+
from cs2tracker.validated_config import ValidatedConfig
|
|
32
16
|
|
|
33
17
|
MAX_LINE_LEN = 72
|
|
34
18
|
SEPARATOR = "-"
|
|
@@ -37,32 +21,21 @@ PRICE_INFO = "Owned: {:<10} Steam market price: ${:<10} Total: ${:<10}\n"
|
|
|
37
21
|
HTTP_PROXY_URL = "http://{}:@smartproxy.crawlbase.com:8012"
|
|
38
22
|
HTTPS_PROXY_URL = "http://{}:@smartproxy.crawlbase.com:8012"
|
|
39
23
|
|
|
40
|
-
|
|
41
|
-
DC_WEBHOOK_AVATAR_URL = "https://img.icons8.com/?size=100&id=uWQJp2tLXUH6&format=png&color=000000"
|
|
42
|
-
DC_RECENT_HISTORY_LIMIT = 5
|
|
43
|
-
|
|
44
|
-
WIN_BACKGROUND_TASK_NAME = "CS2Tracker Daily Calculation"
|
|
45
|
-
WIN_BACKGROUND_TASK_SCHEDULE = "DAILY"
|
|
46
|
-
WIN_BACKGROUND_TASK_TIME = "12:00"
|
|
47
|
-
WIN_BACKGROUND_TASK_CMD = (
|
|
48
|
-
f"powershell -WindowStyle Hidden -Command \"Start-Process '{BATCH_FILE}' -WindowStyle Hidden\""
|
|
49
|
-
)
|
|
24
|
+
console = PaddedConsole()
|
|
50
25
|
|
|
51
26
|
|
|
52
27
|
class Scraper:
|
|
53
28
|
def __init__(self):
|
|
54
29
|
"""Initialize the Scraper class."""
|
|
55
|
-
self.
|
|
56
|
-
self.parse_config()
|
|
30
|
+
self.load_config()
|
|
57
31
|
self._start_session()
|
|
58
32
|
|
|
59
33
|
self.usd_total = 0
|
|
60
34
|
self.eur_total = 0
|
|
61
35
|
|
|
62
|
-
def
|
|
63
|
-
"""
|
|
64
|
-
self.config =
|
|
65
|
-
self.config.read(CONFIG_FILE)
|
|
36
|
+
def load_config(self):
|
|
37
|
+
"""Load the configuration file and validate its contents."""
|
|
38
|
+
self.config = ValidatedConfig()
|
|
66
39
|
|
|
67
40
|
def _start_session(self):
|
|
68
41
|
"""Start a requests session with custom headers and retry logic."""
|
|
@@ -80,29 +53,15 @@ class Scraper:
|
|
|
80
53
|
"""Scrape prices for capsules and cases, calculate totals in USD and EUR, and
|
|
81
54
|
print/save the results.
|
|
82
55
|
"""
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
except (RequestException, AttributeError, RetryError, ValueError):
|
|
87
|
-
self.console.print(
|
|
88
|
-
"[bold red][!] Failed to scrape capsule prices. (Consider using proxies to prevent rate limiting)\n"
|
|
89
|
-
)
|
|
90
|
-
|
|
91
|
-
case_usd_total = 0
|
|
92
|
-
try:
|
|
93
|
-
case_usd_total = self._scrape_case_prices()
|
|
94
|
-
except (RequestException, AttributeError, RetryError, ValueError):
|
|
95
|
-
self.console.print(
|
|
96
|
-
"[bold red][!] Failed to scrape case prices. (Consider using proxies to prevent rate limiting)\n"
|
|
56
|
+
if not self.config.valid:
|
|
57
|
+
console.print(
|
|
58
|
+
"[bold red][!] Invalid configuration. Please fix the config file before running."
|
|
97
59
|
)
|
|
60
|
+
return
|
|
98
61
|
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
except (RequestException, AttributeError, RetryError, ValueError):
|
|
103
|
-
self.console.print(
|
|
104
|
-
"[bold red][!] Failed to scrape custom item prices. (Consider using proxies to prevent rate limiting)\n"
|
|
105
|
-
)
|
|
62
|
+
capsule_usd_total = self._scrape_capsule_section_prices()
|
|
63
|
+
case_usd_total = self._scrape_case_prices()
|
|
64
|
+
custom_item_usd_total = self._scrape_custom_item_prices()
|
|
106
65
|
|
|
107
66
|
self.usd_total += capsule_usd_total
|
|
108
67
|
self.usd_total += case_usd_total
|
|
@@ -110,7 +69,7 @@ class Scraper:
|
|
|
110
69
|
self.eur_total = CurrencyConverter().convert(self.usd_total, "USD", "EUR")
|
|
111
70
|
|
|
112
71
|
self._print_total()
|
|
113
|
-
self.
|
|
72
|
+
PriceLogs.save(self.usd_total, self.eur_total)
|
|
114
73
|
self._send_discord_notification()
|
|
115
74
|
|
|
116
75
|
# Reset totals for next run
|
|
@@ -121,125 +80,15 @@ class Scraper:
|
|
|
121
80
|
separators.
|
|
122
81
|
"""
|
|
123
82
|
usd_title = "USD Total".center(MAX_LINE_LEN, SEPARATOR)
|
|
124
|
-
|
|
125
|
-
|
|
83
|
+
console.print(f"[bold green]{usd_title}")
|
|
84
|
+
console.print(f"${self.usd_total:.2f}")
|
|
126
85
|
|
|
127
86
|
eur_title = "EUR Total".center(MAX_LINE_LEN, SEPARATOR)
|
|
128
|
-
|
|
129
|
-
|
|
87
|
+
console.print(f"[bold green]{eur_title}")
|
|
88
|
+
console.print(f"€{self.eur_total:.2f}")
|
|
130
89
|
|
|
131
90
|
end_string = SEPARATOR * MAX_LINE_LEN
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
def _save_price_log(self):
|
|
135
|
-
"""
|
|
136
|
-
Save the current date and total prices in USD and EUR to a CSV file.
|
|
137
|
-
|
|
138
|
-
This will append a new entry to the output file if no entry has been made for
|
|
139
|
-
today.
|
|
140
|
-
|
|
141
|
-
:raises FileNotFoundError: If the output file does not exist.
|
|
142
|
-
:raises IOError: If there is an error writing to the output file.
|
|
143
|
-
"""
|
|
144
|
-
with open(OUTPUT_FILE, "r", encoding="utf-8") as price_logs:
|
|
145
|
-
price_logs_reader = csv.reader(price_logs)
|
|
146
|
-
rows = list(price_logs_reader)
|
|
147
|
-
last_log_date, _, _ = rows[-1] if rows else ("", "", "")
|
|
148
|
-
|
|
149
|
-
today = datetime.now().strftime("%Y-%m-%d")
|
|
150
|
-
if last_log_date != today:
|
|
151
|
-
# Append first price calculation of the day
|
|
152
|
-
with open(OUTPUT_FILE, "a", newline="", encoding="utf-8") as price_logs:
|
|
153
|
-
price_logs_writer = csv.writer(price_logs)
|
|
154
|
-
price_logs_writer.writerow(
|
|
155
|
-
[today, f"{self.usd_total:.2f}$", f"{self.eur_total:.2f}€"]
|
|
156
|
-
)
|
|
157
|
-
else:
|
|
158
|
-
# Replace the last calculation of today with the most recent one of today
|
|
159
|
-
with open(OUTPUT_FILE, "r+", newline="", encoding="utf-8") as price_logs:
|
|
160
|
-
price_logs_reader = csv.reader(price_logs)
|
|
161
|
-
rows = list(price_logs_reader)
|
|
162
|
-
rows_without_today = rows[:-1]
|
|
163
|
-
price_logs.seek(0)
|
|
164
|
-
price_logs.truncate()
|
|
165
|
-
|
|
166
|
-
price_logs_writer = csv.writer(price_logs)
|
|
167
|
-
price_logs_writer.writerows(rows_without_today)
|
|
168
|
-
price_logs_writer.writerow(
|
|
169
|
-
[today, f"{self.usd_total:.2f}$", f"{self.eur_total:.2f}€"]
|
|
170
|
-
)
|
|
171
|
-
|
|
172
|
-
def read_price_log(self):
|
|
173
|
-
"""
|
|
174
|
-
Parse the output file to extract dates, dollar prices, and euro prices. This
|
|
175
|
-
data is used for drawing the plot of past prices.
|
|
176
|
-
|
|
177
|
-
:return: A tuple containing three lists: dates, dollar prices, and euro prices.
|
|
178
|
-
:raises FileNotFoundError: If the output file does not exist.
|
|
179
|
-
:raises IOError: If there is an error reading the output file.
|
|
180
|
-
"""
|
|
181
|
-
dates, dollars, euros = [], [], []
|
|
182
|
-
with open(OUTPUT_FILE, "r", encoding="utf-8") as price_logs:
|
|
183
|
-
price_logs_reader = csv.reader(price_logs)
|
|
184
|
-
for row in price_logs_reader:
|
|
185
|
-
date, price_usd, price_eur = row
|
|
186
|
-
date = datetime.strptime(date, "%Y-%m-%d")
|
|
187
|
-
price_usd = float(price_usd.rstrip("$"))
|
|
188
|
-
price_eur = float(price_eur.rstrip("€"))
|
|
189
|
-
|
|
190
|
-
dates.append(date)
|
|
191
|
-
dollars.append(price_usd)
|
|
192
|
-
euros.append(price_eur)
|
|
193
|
-
|
|
194
|
-
return dates, dollars, euros
|
|
195
|
-
|
|
196
|
-
def _construct_recent_calculations_embeds(self):
|
|
197
|
-
"""
|
|
198
|
-
Construct the embeds for the Discord message that will be sent after a price
|
|
199
|
-
calculation has been made.
|
|
200
|
-
|
|
201
|
-
:return: A list of embeds for the Discord message.
|
|
202
|
-
"""
|
|
203
|
-
dates, usd_logs, eur_logs = self.read_price_log()
|
|
204
|
-
dates, usd_logs, eur_logs = reversed(dates), reversed(usd_logs), reversed(eur_logs)
|
|
205
|
-
|
|
206
|
-
date_history, usd_history, eur_history = [], [], []
|
|
207
|
-
for date, usd_log, eur_log in zip(dates, usd_logs, eur_logs):
|
|
208
|
-
if len(date_history) >= DC_RECENT_HISTORY_LIMIT:
|
|
209
|
-
break
|
|
210
|
-
date_history.append(date.strftime("%Y-%m-%d"))
|
|
211
|
-
usd_history.append(f"${usd_log:.2f}")
|
|
212
|
-
eur_history.append(f"€{eur_log:.2f}")
|
|
213
|
-
|
|
214
|
-
date_history = "\n".join(date_history)
|
|
215
|
-
usd_history = "\n".join(usd_history)
|
|
216
|
-
eur_history = "\n".join(eur_history)
|
|
217
|
-
|
|
218
|
-
embeds = [
|
|
219
|
-
{
|
|
220
|
-
"title": "📊 Recent Price History",
|
|
221
|
-
"color": 5814783,
|
|
222
|
-
"fields": [
|
|
223
|
-
{
|
|
224
|
-
"name": "Date",
|
|
225
|
-
"value": date_history,
|
|
226
|
-
"inline": True,
|
|
227
|
-
},
|
|
228
|
-
{
|
|
229
|
-
"name": "USD Total",
|
|
230
|
-
"value": usd_history,
|
|
231
|
-
"inline": True,
|
|
232
|
-
},
|
|
233
|
-
{
|
|
234
|
-
"name": "EUR Total",
|
|
235
|
-
"value": eur_history,
|
|
236
|
-
"inline": True,
|
|
237
|
-
},
|
|
238
|
-
],
|
|
239
|
-
}
|
|
240
|
-
]
|
|
241
|
-
|
|
242
|
-
return embeds
|
|
91
|
+
console.print(f"[bold green]{end_string}\n")
|
|
243
92
|
|
|
244
93
|
def _send_discord_notification(self):
|
|
245
94
|
"""Send a message to a Discord webhook if notifications are enabled in the
|
|
@@ -249,25 +98,9 @@ class Scraper:
|
|
|
249
98
|
"App Settings", "discord_notifications", fallback=False
|
|
250
99
|
)
|
|
251
100
|
webhook_url = self.config.get("User Settings", "discord_webhook_url", fallback=None)
|
|
252
|
-
webhook_url = None if webhook_url in ("None", "") else webhook_url
|
|
253
101
|
|
|
254
102
|
if discord_notifications and webhook_url:
|
|
255
|
-
|
|
256
|
-
try:
|
|
257
|
-
response = self.session.post(
|
|
258
|
-
url=webhook_url,
|
|
259
|
-
json={
|
|
260
|
-
"embeds": embeds,
|
|
261
|
-
"username": DC_WEBHOOK_USERNAME,
|
|
262
|
-
"avatar_url": DC_WEBHOOK_AVATAR_URL,
|
|
263
|
-
},
|
|
264
|
-
)
|
|
265
|
-
response.raise_for_status()
|
|
266
|
-
self.console.print("[bold steel_blue3][+] Discord notification sent.\n")
|
|
267
|
-
except RequestException as error:
|
|
268
|
-
self.console.print(f"[bold red][!] Failed to send Discord notification: {error}\n")
|
|
269
|
-
except Exception as error:
|
|
270
|
-
self.console.print(f"[bold red][!] An unexpected error occurred: {error}\n")
|
|
103
|
+
DiscordNotifier.notify(webhook_url)
|
|
271
104
|
|
|
272
105
|
@retry(stop=stop_after_attempt(10))
|
|
273
106
|
def _get_page(self, url):
|
|
@@ -281,14 +114,14 @@ class Scraper:
|
|
|
281
114
|
:raises RetryError: If the retry limit is reached.
|
|
282
115
|
"""
|
|
283
116
|
use_proxy = self.config.getboolean("App Settings", "use_proxy", fallback=False)
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
if use_proxy and
|
|
117
|
+
proxy_api_key = self.config.get("User Settings", "proxy_api_key", fallback=None)
|
|
118
|
+
|
|
119
|
+
if use_proxy and proxy_api_key:
|
|
287
120
|
page = self.session.get(
|
|
288
121
|
url=url,
|
|
289
122
|
proxies={
|
|
290
|
-
"http": HTTP_PROXY_URL.format(
|
|
291
|
-
"https": HTTPS_PROXY_URL.format(
|
|
123
|
+
"http": HTTP_PROXY_URL.format(proxy_api_key),
|
|
124
|
+
"https": HTTPS_PROXY_URL.format(proxy_api_key),
|
|
292
125
|
},
|
|
293
126
|
verify=False,
|
|
294
127
|
)
|
|
@@ -296,8 +129,7 @@ class Scraper:
|
|
|
296
129
|
page = self.session.get(url)
|
|
297
130
|
|
|
298
131
|
if not page.ok or not page.content:
|
|
299
|
-
|
|
300
|
-
self.console.print(f"[bold red][!] Failed to load page ({status}). Retrying...\n")
|
|
132
|
+
console.print(f"[bold red][!] Failed to load page ({page.status_code}). Retrying...\n")
|
|
301
133
|
raise RequestException(f"Failed to load page: {url}")
|
|
302
134
|
|
|
303
135
|
return page
|
|
@@ -340,26 +172,33 @@ class Scraper:
|
|
|
340
172
|
hrefs, and names.
|
|
341
173
|
"""
|
|
342
174
|
capsule_title = capsule_section.center(MAX_LINE_LEN, SEPARATOR)
|
|
343
|
-
|
|
175
|
+
console.print(f"[bold magenta]{capsule_title}\n")
|
|
344
176
|
|
|
345
177
|
capsule_usd_total = 0
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
178
|
+
try:
|
|
179
|
+
capsule_page = self._get_page(capsule_info["page"])
|
|
180
|
+
for capsule_name, capsule_href in zip(capsule_info["names"], capsule_info["items"]):
|
|
181
|
+
config_capsule_name = capsule_name.replace(" ", "_")
|
|
182
|
+
owned = self.config.getint(capsule_section, config_capsule_name, fallback=0)
|
|
183
|
+
if owned == 0:
|
|
184
|
+
continue
|
|
185
|
+
|
|
186
|
+
price_usd = self._parse_item_price(capsule_page, capsule_href)
|
|
187
|
+
price_usd_owned = round(float(owned * price_usd), 2)
|
|
188
|
+
|
|
189
|
+
console.print(f"[bold deep_sky_blue4]{capsule_name}")
|
|
190
|
+
console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
191
|
+
capsule_usd_total += price_usd_owned
|
|
192
|
+
except (RetryError, ValueError):
|
|
193
|
+
console.print(
|
|
194
|
+
"[bold red][!] Too many requests. (Consider using proxies to prevent rate limiting)\n"
|
|
195
|
+
)
|
|
196
|
+
except Exception as error:
|
|
197
|
+
console.print(f"[bold red][!] An unexpected error occurred: {error}\n")
|
|
359
198
|
|
|
360
199
|
return capsule_usd_total
|
|
361
200
|
|
|
362
|
-
def
|
|
201
|
+
def _scrape_capsule_section_prices(self):
|
|
363
202
|
"""Scrape prices for all capsule sections defined in the configuration."""
|
|
364
203
|
capsule_usd_total = 0
|
|
365
204
|
for capsule_section, capsule_info in CAPSULE_INFO.items():
|
|
@@ -369,6 +208,19 @@ class Scraper:
|
|
|
369
208
|
|
|
370
209
|
return capsule_usd_total
|
|
371
210
|
|
|
211
|
+
def _market_page_from_href(self, item_href):
|
|
212
|
+
"""
|
|
213
|
+
Convert an href of a Steam Community Market item to a market page URL.
|
|
214
|
+
|
|
215
|
+
:param item_href: The href of the item listing, typically ending with the item's
|
|
216
|
+
name.
|
|
217
|
+
:return: A URL string for the Steam Community Market page of the item.
|
|
218
|
+
"""
|
|
219
|
+
url_encoded_name = item_href.split("/")[-1]
|
|
220
|
+
page_url = f"https://steamcommunity.com/market/search?q={url_encoded_name}"
|
|
221
|
+
|
|
222
|
+
return page_url
|
|
223
|
+
|
|
372
224
|
def _scrape_case_prices(self):
|
|
373
225
|
"""
|
|
374
226
|
Scrape prices for all cases defined in the configuration.
|
|
@@ -383,36 +235,28 @@ class Scraper:
|
|
|
383
235
|
|
|
384
236
|
case_name = config_case_name.replace("_", " ").title()
|
|
385
237
|
case_title = case_name.center(MAX_LINE_LEN, SEPARATOR)
|
|
386
|
-
|
|
238
|
+
console.print(f"[bold magenta]{case_title}\n")
|
|
387
239
|
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
240
|
+
try:
|
|
241
|
+
case_page_url = self._market_page_from_href(CASE_HREFS[case_index])
|
|
242
|
+
case_page = self._get_page(case_page_url)
|
|
243
|
+
price_usd = self._parse_item_price(case_page, CASE_HREFS[case_index])
|
|
244
|
+
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
245
|
+
|
|
246
|
+
console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
247
|
+
case_usd_total += price_usd_owned
|
|
248
|
+
|
|
249
|
+
if not self.config.getboolean("App Settings", "use_proxy", fallback=False):
|
|
250
|
+
time.sleep(1)
|
|
251
|
+
except (RetryError, ValueError):
|
|
252
|
+
console.print(
|
|
253
|
+
"[bold red][!] Too many requests. (Consider using proxies to prevent rate limiting)\n"
|
|
254
|
+
)
|
|
255
|
+
except Exception as error:
|
|
256
|
+
console.print(f"[bold red][!] An unexpected error occurred: {error}\n")
|
|
397
257
|
|
|
398
258
|
return case_usd_total
|
|
399
259
|
|
|
400
|
-
def _market_page_from_href(self, item_href):
|
|
401
|
-
"""
|
|
402
|
-
Convert an href of a Steam Community Market item to a market page URL. This is
|
|
403
|
-
done by decoding the URL-encoded item name and formatting it into a search URL.
|
|
404
|
-
|
|
405
|
-
:param item_href: The href of the item listing, typically ending with the item's
|
|
406
|
-
name.
|
|
407
|
-
:return: A URL string for the Steam Community Market page of the item.
|
|
408
|
-
"""
|
|
409
|
-
url_encoded_name = item_href.split("/")[-1]
|
|
410
|
-
decoded_name = unquote(url_encoded_name)
|
|
411
|
-
decoded_name_query = decoded_name.lower().replace(" ", "+")
|
|
412
|
-
page_url = f"https://steamcommunity.com/market/search?q={decoded_name_query}"
|
|
413
|
-
|
|
414
|
-
return page_url
|
|
415
|
-
|
|
416
260
|
def _scrape_custom_item_prices(self):
|
|
417
261
|
"""
|
|
418
262
|
Scrape prices for custom items defined in the configuration.
|
|
@@ -421,129 +265,41 @@ class Scraper:
|
|
|
421
265
|
total price for owned items.
|
|
422
266
|
"""
|
|
423
267
|
custom_item_usd_total = 0
|
|
424
|
-
for
|
|
425
|
-
if " " not in owned_and_href:
|
|
426
|
-
self.console.print(
|
|
427
|
-
"[bold red][!] Invalid custom item format (<item_name> = <owned_count> <item_url>)\n"
|
|
428
|
-
)
|
|
429
|
-
continue
|
|
430
|
-
|
|
431
|
-
owned, custom_item_href = owned_and_href.split(" ", 1)
|
|
268
|
+
for custom_item_href, owned in self.config.items("Custom Items"):
|
|
432
269
|
if int(owned) == 0:
|
|
433
270
|
continue
|
|
434
271
|
|
|
435
|
-
custom_item_name =
|
|
272
|
+
custom_item_name = unquote(custom_item_href.split("/")[-1])
|
|
436
273
|
custom_item_title = custom_item_name.center(MAX_LINE_LEN, SEPARATOR)
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
custom_item_page_url = self._market_page_from_href(custom_item_href)
|
|
440
|
-
custom_item_page = self._get_page(custom_item_page_url)
|
|
441
|
-
price_usd = self._parse_item_price(custom_item_page, custom_item_href)
|
|
442
|
-
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
443
|
-
|
|
444
|
-
self.console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
445
|
-
custom_item_usd_total += price_usd_owned
|
|
274
|
+
console.print(f"[bold magenta]{custom_item_title}\n")
|
|
446
275
|
|
|
447
|
-
|
|
448
|
-
|
|
276
|
+
try:
|
|
277
|
+
custom_item_page_url = self._market_page_from_href(custom_item_href)
|
|
278
|
+
custom_item_page = self._get_page(custom_item_page_url)
|
|
279
|
+
price_usd = self._parse_item_price(custom_item_page, custom_item_href)
|
|
280
|
+
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
281
|
+
|
|
282
|
+
console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
283
|
+
custom_item_usd_total += price_usd_owned
|
|
284
|
+
|
|
285
|
+
if not self.config.getboolean("App Settings", "use_proxy", fallback=False):
|
|
286
|
+
time.sleep(1)
|
|
287
|
+
except (RetryError, ValueError):
|
|
288
|
+
console.print(
|
|
289
|
+
"[bold red][!] Too many requests. (Consider using proxies to prevent rate limiting)\n"
|
|
290
|
+
)
|
|
291
|
+
except Exception as error:
|
|
292
|
+
console.print(f"[bold red][!] An unexpected error occurred: {error}\n")
|
|
449
293
|
|
|
450
294
|
return custom_item_usd_total
|
|
451
295
|
|
|
452
|
-
def identify_background_task(self):
|
|
453
|
-
"""
|
|
454
|
-
Search the OS for a daily background task that runs the scraper.
|
|
455
|
-
|
|
456
|
-
:return: True if a background task is found, False otherwise.
|
|
457
|
-
"""
|
|
458
|
-
if OS == OSType.WINDOWS:
|
|
459
|
-
cmd = ["schtasks", "/query", "/tn", WIN_BACKGROUND_TASK_NAME]
|
|
460
|
-
return_code = call(cmd, stdout=DEVNULL, stderr=DEVNULL)
|
|
461
|
-
found = return_code == 0
|
|
462
|
-
return found
|
|
463
|
-
else:
|
|
464
|
-
# TODO: implement finder for cron jobs
|
|
465
|
-
return False
|
|
466
|
-
|
|
467
|
-
def _toggle_task_batch_file(self, enabled: bool):
|
|
468
|
-
"""
|
|
469
|
-
Create or delete a batch file that runs the scraper.
|
|
470
|
-
|
|
471
|
-
:param enabled: If True, the batch file will be created; if False, the batch
|
|
472
|
-
file will be deleted.
|
|
473
|
-
"""
|
|
474
|
-
if enabled:
|
|
475
|
-
with open(BATCH_FILE, "w", encoding="utf-8") as batch_file:
|
|
476
|
-
if RUNNING_IN_EXE:
|
|
477
|
-
# The python executable is set to the executable itself
|
|
478
|
-
# for executables created with PyInstaller
|
|
479
|
-
batch_file.write(f"{PYTHON_EXECUTABLE} --only-scrape\n")
|
|
480
|
-
else:
|
|
481
|
-
batch_file.write(f"cd {PROJECT_DIR}\n")
|
|
482
|
-
batch_file.write(f"{PYTHON_EXECUTABLE} -m cs2tracker --only-scrape\n")
|
|
483
|
-
else:
|
|
484
|
-
if os.path.exists(BATCH_FILE):
|
|
485
|
-
os.remove(BATCH_FILE)
|
|
486
|
-
|
|
487
|
-
def _toggle_background_task_windows(self, enabled: bool):
|
|
488
|
-
"""
|
|
489
|
-
Create or delete a daily background task that runs the scraper on Windows.
|
|
490
|
-
|
|
491
|
-
:param enabled: If True, the task will be created; if False, the task will be
|
|
492
|
-
deleted.
|
|
493
|
-
"""
|
|
494
|
-
self._toggle_task_batch_file(enabled)
|
|
495
|
-
if enabled:
|
|
496
|
-
cmd = [
|
|
497
|
-
"schtasks",
|
|
498
|
-
"/create",
|
|
499
|
-
"/tn",
|
|
500
|
-
WIN_BACKGROUND_TASK_NAME,
|
|
501
|
-
"/tr",
|
|
502
|
-
WIN_BACKGROUND_TASK_CMD,
|
|
503
|
-
"/sc",
|
|
504
|
-
WIN_BACKGROUND_TASK_SCHEDULE,
|
|
505
|
-
"/st",
|
|
506
|
-
WIN_BACKGROUND_TASK_TIME,
|
|
507
|
-
]
|
|
508
|
-
return_code = call(cmd, stdout=DEVNULL, stderr=DEVNULL)
|
|
509
|
-
if return_code == 0:
|
|
510
|
-
self.console.print("[bold green][+] Background task enabled.")
|
|
511
|
-
else:
|
|
512
|
-
self.console.print("[bold red][!] Failed to enable background task.")
|
|
513
|
-
else:
|
|
514
|
-
cmd = ["schtasks", "/delete", "/tn", WIN_BACKGROUND_TASK_NAME, "/f"]
|
|
515
|
-
return_code = call(cmd, stdout=DEVNULL, stderr=DEVNULL)
|
|
516
|
-
if return_code == 0:
|
|
517
|
-
self.console.print("[bold green][-] Background task disabled.")
|
|
518
|
-
else:
|
|
519
|
-
self.console.print("[bold red][!] Failed to disable background task.")
|
|
520
|
-
|
|
521
|
-
def toggle_background_task(self, enabled: bool):
|
|
522
|
-
"""
|
|
523
|
-
Create or delete a daily background task that runs the scraper.
|
|
524
|
-
|
|
525
|
-
:param enabled: If True, the task will be created; if False, the task will be
|
|
526
|
-
deleted.
|
|
527
|
-
"""
|
|
528
|
-
if OS == OSType.WINDOWS:
|
|
529
|
-
self._toggle_background_task_windows(enabled)
|
|
530
|
-
else:
|
|
531
|
-
# TODO: implement toggle for cron jobs
|
|
532
|
-
pass
|
|
533
|
-
|
|
534
296
|
def toggle_use_proxy(self, enabled: bool):
|
|
535
297
|
"""
|
|
536
298
|
Toggle the use of proxies for requests. This will update the configuration file.
|
|
537
299
|
|
|
538
300
|
:param enabled: If True, proxies will be used; if False, they will not be used.
|
|
539
301
|
"""
|
|
540
|
-
self.config.
|
|
541
|
-
with open(CONFIG_FILE, "w", encoding="utf-8") as config_file:
|
|
542
|
-
self.config.write(config_file)
|
|
543
|
-
|
|
544
|
-
self.console.print(
|
|
545
|
-
f"[bold green]{'[+] Enabled' if enabled else '[-] Disabled'} proxy usage for requests."
|
|
546
|
-
)
|
|
302
|
+
self.config.toggle_use_proxy(enabled)
|
|
547
303
|
|
|
548
304
|
def toggle_discord_webhook(self, enabled: bool):
|
|
549
305
|
"""
|
|
@@ -552,16 +308,10 @@ class Scraper:
|
|
|
552
308
|
:param enabled: If True, the webhook will be used; if False, it will not be
|
|
553
309
|
used.
|
|
554
310
|
"""
|
|
555
|
-
self.config.
|
|
556
|
-
with open(CONFIG_FILE, "w", encoding="utf-8") as config_file:
|
|
557
|
-
self.config.write(config_file)
|
|
558
|
-
|
|
559
|
-
self.console.print(
|
|
560
|
-
f"[bold green]{'[+] Enabled' if enabled else '[-] Disabled'} Discord webhook notifications."
|
|
561
|
-
)
|
|
311
|
+
self.config.toggle_discord_webhook(enabled)
|
|
562
312
|
|
|
563
313
|
|
|
564
314
|
if __name__ == "__main__":
|
|
565
315
|
scraper = Scraper()
|
|
566
|
-
|
|
316
|
+
console.print(f"[bold yellow]{BANNER}\n{AUTHOR_STRING}\n")
|
|
567
317
|
scraper.scrape_prices()
|