cs2tracker 2.1.9__py3-none-any.whl → 2.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cs2tracker might be problematic. Click here for more details.
- cs2tracker/_version.py +2 -2
- cs2tracker/app/__init__.py +3 -0
- cs2tracker/app/application.py +255 -0
- cs2tracker/app/editor_frame.py +247 -0
- cs2tracker/app/scraper_frame.py +76 -0
- cs2tracker/constants.py +101 -115
- cs2tracker/data/config.ini +155 -156
- cs2tracker/main.py +2 -2
- cs2tracker/scraper/__init__.py +9 -0
- cs2tracker/scraper/background_task.py +109 -0
- cs2tracker/scraper/discord_notifier.py +86 -0
- cs2tracker/scraper/scraper.py +348 -0
- cs2tracker/util/__init__.py +9 -0
- cs2tracker/util/price_logs.py +100 -0
- cs2tracker/util/validated_config.py +117 -0
- {cs2tracker-2.1.9.dist-info → cs2tracker-2.1.11.dist-info}/METADATA +7 -4
- cs2tracker-2.1.11.dist-info/RECORD +25 -0
- cs2tracker/application.py +0 -270
- cs2tracker/scraper.py +0 -637
- cs2tracker-2.1.9.dist-info/RECORD +0 -16
- /cs2tracker/{padded_console.py → util/padded_console.py} +0 -0
- {cs2tracker-2.1.9.dist-info → cs2tracker-2.1.11.dist-info}/WHEEL +0 -0
- {cs2tracker-2.1.9.dist-info → cs2tracker-2.1.11.dist-info}/entry_points.txt +0 -0
- {cs2tracker-2.1.9.dist-info → cs2tracker-2.1.11.dist-info}/licenses/LICENSE.md +0 -0
- {cs2tracker-2.1.9.dist-info → cs2tracker-2.1.11.dist-info}/top_level.txt +0 -0
cs2tracker/scraper.py
DELETED
|
@@ -1,637 +0,0 @@
|
|
|
1
|
-
import csv
|
|
2
|
-
import os
|
|
3
|
-
import time
|
|
4
|
-
from configparser import ConfigParser
|
|
5
|
-
from datetime import datetime
|
|
6
|
-
from subprocess import DEVNULL, call
|
|
7
|
-
|
|
8
|
-
from bs4 import BeautifulSoup
|
|
9
|
-
from bs4.element import Tag
|
|
10
|
-
from currency_converter import CurrencyConverter
|
|
11
|
-
from requests import RequestException, Session
|
|
12
|
-
from requests.adapters import HTTPAdapter, Retry
|
|
13
|
-
from tenacity import RetryError, retry, stop_after_attempt
|
|
14
|
-
|
|
15
|
-
from cs2tracker.constants import (
|
|
16
|
-
AUTHOR_STRING,
|
|
17
|
-
BANNER,
|
|
18
|
-
BATCH_FILE,
|
|
19
|
-
CAPSULE_INFO,
|
|
20
|
-
CASE_HREFS,
|
|
21
|
-
CONFIG_FILE,
|
|
22
|
-
OS,
|
|
23
|
-
OUTPUT_FILE,
|
|
24
|
-
PROJECT_DIR,
|
|
25
|
-
PYTHON_EXECUTABLE,
|
|
26
|
-
RUNNING_IN_EXE,
|
|
27
|
-
OSType,
|
|
28
|
-
)
|
|
29
|
-
from cs2tracker.padded_console import PaddedConsole
|
|
30
|
-
|
|
31
|
-
MAX_LINE_LEN = 72
|
|
32
|
-
SEPARATOR = "-"
|
|
33
|
-
PRICE_INFO = "Owned: {:<10} Steam market price: ${:<10} Total: ${:<10}\n"
|
|
34
|
-
|
|
35
|
-
HTTP_PROXY_URL = "http://{}:@smartproxy.crawlbase.com:8012"
|
|
36
|
-
HTTPS_PROXY_URL = "http://{}:@smartproxy.crawlbase.com:8012"
|
|
37
|
-
|
|
38
|
-
DC_WEBHOOK_USERNAME = "CS2Tracker"
|
|
39
|
-
DC_WEBHOOK_AVATAR_URL = "https://img.icons8.com/?size=100&id=uWQJp2tLXUH6&format=png&color=000000"
|
|
40
|
-
DC_RECENT_HISTORY_LIMIT = 5
|
|
41
|
-
|
|
42
|
-
WIN_BACKGROUND_TASK_NAME = "CS2Tracker Daily Calculation"
|
|
43
|
-
WIN_BACKGROUND_TASK_SCHEDULE = "DAILY"
|
|
44
|
-
WIN_BACKGROUND_TASK_TIME = "12:00"
|
|
45
|
-
WIN_BACKGROUND_TASK_CMD = (
|
|
46
|
-
f"powershell -WindowStyle Hidden -Command \"Start-Process '{BATCH_FILE}' -WindowStyle Hidden\""
|
|
47
|
-
)
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
class Scraper:
|
|
51
|
-
def __init__(self):
|
|
52
|
-
"""Initialize the Scraper class."""
|
|
53
|
-
self.console = PaddedConsole()
|
|
54
|
-
self.parse_config()
|
|
55
|
-
self._start_session()
|
|
56
|
-
|
|
57
|
-
self.usd_total = 0
|
|
58
|
-
self.eur_total = 0
|
|
59
|
-
|
|
60
|
-
def _validate_config_sections(self):
|
|
61
|
-
"""Validate that the configuration file has all required sections."""
|
|
62
|
-
if not self.config.has_section("User Settings"):
|
|
63
|
-
raise ValueError("Missing 'User Settings' section in the configuration file.")
|
|
64
|
-
if not self.config.has_section("App Settings"):
|
|
65
|
-
raise ValueError("Missing 'App Settings' section in the configuration file.")
|
|
66
|
-
if not self.config.has_section("Custom Items"):
|
|
67
|
-
raise ValueError("Missing 'Custom Items' section in the configuration file.")
|
|
68
|
-
if not self.config.has_section("Cases"):
|
|
69
|
-
raise ValueError("Missing 'Cases' section in the configuration file.")
|
|
70
|
-
for capsule_section in CAPSULE_INFO:
|
|
71
|
-
if not self.config.has_section(capsule_section):
|
|
72
|
-
raise ValueError(f"Missing '{capsule_section}' section in the configuration file.")
|
|
73
|
-
|
|
74
|
-
def _validate_config_values(self):
|
|
75
|
-
"""Validate that the configuration file has valid values for all sections."""
|
|
76
|
-
try:
|
|
77
|
-
for custom_item_name, custom_item_owned in self.config.items("Custom Items"):
|
|
78
|
-
if " " not in custom_item_owned:
|
|
79
|
-
raise ValueError(
|
|
80
|
-
f"Invalid custom item format (<item_name> = <owned_count> <item_url>): {custom_item_name} = {custom_item_owned}"
|
|
81
|
-
)
|
|
82
|
-
owned, _ = custom_item_owned.split(" ", 1)
|
|
83
|
-
if int(owned) < 0:
|
|
84
|
-
raise ValueError(
|
|
85
|
-
f"Invalid value in 'Custom Items' section: {custom_item_name} = {custom_item_owned}"
|
|
86
|
-
)
|
|
87
|
-
for case_name, case_owned in self.config.items("Cases"):
|
|
88
|
-
if int(case_owned) < 0:
|
|
89
|
-
raise ValueError(
|
|
90
|
-
f"Invalid value in 'Cases' section: {case_name} = {case_owned}"
|
|
91
|
-
)
|
|
92
|
-
for capsule_section in CAPSULE_INFO:
|
|
93
|
-
for capsule_name, capsule_owned in self.config.items(capsule_section):
|
|
94
|
-
if int(capsule_owned) < 0:
|
|
95
|
-
raise ValueError(
|
|
96
|
-
f"Invalid value in '{capsule_section}' section: {capsule_name} = {capsule_owned}"
|
|
97
|
-
)
|
|
98
|
-
except ValueError as error:
|
|
99
|
-
if "Invalid " in str(error):
|
|
100
|
-
raise
|
|
101
|
-
raise ValueError("Invalid value type. All values must be integers.") from error
|
|
102
|
-
|
|
103
|
-
def _validate_config(self):
|
|
104
|
-
"""
|
|
105
|
-
Validate the configuration file to ensure all required sections exist with the
|
|
106
|
-
right values.
|
|
107
|
-
|
|
108
|
-
:raises ValueError: If any required section is missing or if any value is
|
|
109
|
-
invalid.
|
|
110
|
-
"""
|
|
111
|
-
self._validate_config_sections()
|
|
112
|
-
self._validate_config_values()
|
|
113
|
-
|
|
114
|
-
def parse_config(self):
|
|
115
|
-
"""
|
|
116
|
-
Parse the configuration file to read settings and user-owned items.
|
|
117
|
-
|
|
118
|
-
Sets self.valid_config to True if the configuration is valid, and False if it is
|
|
119
|
-
not.
|
|
120
|
-
"""
|
|
121
|
-
self.config = ConfigParser(interpolation=None)
|
|
122
|
-
self.config.read(CONFIG_FILE)
|
|
123
|
-
try:
|
|
124
|
-
self._validate_config()
|
|
125
|
-
self.valid_config = True
|
|
126
|
-
except ValueError as error:
|
|
127
|
-
self.console.print(f"[bold red][!] Configuration error: {error}")
|
|
128
|
-
self.valid_config = False
|
|
129
|
-
|
|
130
|
-
def _start_session(self):
|
|
131
|
-
"""Start a requests session with custom headers and retry logic."""
|
|
132
|
-
self.session = Session()
|
|
133
|
-
self.session.headers.update(
|
|
134
|
-
{
|
|
135
|
-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
|
|
136
|
-
}
|
|
137
|
-
)
|
|
138
|
-
retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504, 520])
|
|
139
|
-
self.session.mount("http://", HTTPAdapter(max_retries=retries))
|
|
140
|
-
self.session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
141
|
-
|
|
142
|
-
def scrape_prices(self):
|
|
143
|
-
"""Scrape prices for capsules and cases, calculate totals in USD and EUR, and
|
|
144
|
-
print/save the results.
|
|
145
|
-
"""
|
|
146
|
-
if not self.valid_config:
|
|
147
|
-
self.console.print(
|
|
148
|
-
"[bold red][!] Invalid configuration. Please fix the config file before running."
|
|
149
|
-
)
|
|
150
|
-
return
|
|
151
|
-
|
|
152
|
-
capsule_usd_total = self._scrape_capsule_section_prices()
|
|
153
|
-
case_usd_total = self._scrape_case_prices()
|
|
154
|
-
custom_item_usd_total = self._scrape_custom_item_prices()
|
|
155
|
-
|
|
156
|
-
self.usd_total += capsule_usd_total
|
|
157
|
-
self.usd_total += case_usd_total
|
|
158
|
-
self.usd_total += custom_item_usd_total
|
|
159
|
-
self.eur_total = CurrencyConverter().convert(self.usd_total, "USD", "EUR")
|
|
160
|
-
|
|
161
|
-
self._print_total()
|
|
162
|
-
self._save_price_log()
|
|
163
|
-
self._send_discord_notification()
|
|
164
|
-
|
|
165
|
-
# Reset totals for next run
|
|
166
|
-
self.usd_total, self.eur_total = 0, 0
|
|
167
|
-
|
|
168
|
-
def _print_total(self):
|
|
169
|
-
"""Print the total prices in USD and EUR, formatted with titles and
|
|
170
|
-
separators.
|
|
171
|
-
"""
|
|
172
|
-
usd_title = "USD Total".center(MAX_LINE_LEN, SEPARATOR)
|
|
173
|
-
self.console.print(f"[bold green]{usd_title}")
|
|
174
|
-
self.console.print(f"${self.usd_total:.2f}")
|
|
175
|
-
|
|
176
|
-
eur_title = "EUR Total".center(MAX_LINE_LEN, SEPARATOR)
|
|
177
|
-
self.console.print(f"[bold green]{eur_title}")
|
|
178
|
-
self.console.print(f"€{self.eur_total:.2f}")
|
|
179
|
-
|
|
180
|
-
end_string = SEPARATOR * MAX_LINE_LEN
|
|
181
|
-
self.console.print(f"[bold green]{end_string}\n")
|
|
182
|
-
|
|
183
|
-
def _save_price_log(self):
|
|
184
|
-
"""
|
|
185
|
-
Save the current date and total prices in USD and EUR to a CSV file.
|
|
186
|
-
|
|
187
|
-
This will append a new entry to the output file if no entry has been made for
|
|
188
|
-
today.
|
|
189
|
-
|
|
190
|
-
:raises FileNotFoundError: If the output file does not exist.
|
|
191
|
-
:raises IOError: If there is an error writing to the output file.
|
|
192
|
-
"""
|
|
193
|
-
with open(OUTPUT_FILE, "r", encoding="utf-8") as price_logs:
|
|
194
|
-
price_logs_reader = csv.reader(price_logs)
|
|
195
|
-
rows = list(price_logs_reader)
|
|
196
|
-
last_log_date, _, _ = rows[-1] if rows else ("", "", "")
|
|
197
|
-
|
|
198
|
-
today = datetime.now().strftime("%Y-%m-%d")
|
|
199
|
-
if last_log_date != today:
|
|
200
|
-
# Append first price calculation of the day
|
|
201
|
-
with open(OUTPUT_FILE, "a", newline="", encoding="utf-8") as price_logs:
|
|
202
|
-
price_logs_writer = csv.writer(price_logs)
|
|
203
|
-
price_logs_writer.writerow(
|
|
204
|
-
[today, f"{self.usd_total:.2f}$", f"{self.eur_total:.2f}€"]
|
|
205
|
-
)
|
|
206
|
-
else:
|
|
207
|
-
# Replace the last calculation of today with the most recent one of today
|
|
208
|
-
with open(OUTPUT_FILE, "r+", newline="", encoding="utf-8") as price_logs:
|
|
209
|
-
price_logs_reader = csv.reader(price_logs)
|
|
210
|
-
rows = list(price_logs_reader)
|
|
211
|
-
rows_without_today = rows[:-1]
|
|
212
|
-
price_logs.seek(0)
|
|
213
|
-
price_logs.truncate()
|
|
214
|
-
|
|
215
|
-
price_logs_writer = csv.writer(price_logs)
|
|
216
|
-
price_logs_writer.writerows(rows_without_today)
|
|
217
|
-
price_logs_writer.writerow(
|
|
218
|
-
[today, f"{self.usd_total:.2f}$", f"{self.eur_total:.2f}€"]
|
|
219
|
-
)
|
|
220
|
-
|
|
221
|
-
def read_price_log(self):
|
|
222
|
-
"""
|
|
223
|
-
Parse the output file to extract dates, dollar prices, and euro prices. This
|
|
224
|
-
data is used for drawing the plot of past prices.
|
|
225
|
-
|
|
226
|
-
:return: A tuple containing three lists: dates, dollar prices, and euro prices.
|
|
227
|
-
:raises FileNotFoundError: If the output file does not exist.
|
|
228
|
-
:raises IOError: If there is an error reading the output file.
|
|
229
|
-
"""
|
|
230
|
-
dates, dollars, euros = [], [], []
|
|
231
|
-
with open(OUTPUT_FILE, "r", encoding="utf-8") as price_logs:
|
|
232
|
-
price_logs_reader = csv.reader(price_logs)
|
|
233
|
-
for row in price_logs_reader:
|
|
234
|
-
date, price_usd, price_eur = row
|
|
235
|
-
date = datetime.strptime(date, "%Y-%m-%d")
|
|
236
|
-
price_usd = float(price_usd.rstrip("$"))
|
|
237
|
-
price_eur = float(price_eur.rstrip("€"))
|
|
238
|
-
|
|
239
|
-
dates.append(date)
|
|
240
|
-
dollars.append(price_usd)
|
|
241
|
-
euros.append(price_eur)
|
|
242
|
-
|
|
243
|
-
return dates, dollars, euros
|
|
244
|
-
|
|
245
|
-
def _construct_recent_calculations_embeds(self):
|
|
246
|
-
"""
|
|
247
|
-
Construct the embeds for the Discord message that will be sent after a price
|
|
248
|
-
calculation has been made.
|
|
249
|
-
|
|
250
|
-
:return: A list of embeds for the Discord message.
|
|
251
|
-
"""
|
|
252
|
-
dates, usd_logs, eur_logs = self.read_price_log()
|
|
253
|
-
dates, usd_logs, eur_logs = reversed(dates), reversed(usd_logs), reversed(eur_logs)
|
|
254
|
-
|
|
255
|
-
date_history, usd_history, eur_history = [], [], []
|
|
256
|
-
for date, usd_log, eur_log in zip(dates, usd_logs, eur_logs):
|
|
257
|
-
if len(date_history) >= DC_RECENT_HISTORY_LIMIT:
|
|
258
|
-
break
|
|
259
|
-
date_history.append(date.strftime("%Y-%m-%d"))
|
|
260
|
-
usd_history.append(f"${usd_log:.2f}")
|
|
261
|
-
eur_history.append(f"€{eur_log:.2f}")
|
|
262
|
-
|
|
263
|
-
date_history = "\n".join(date_history)
|
|
264
|
-
usd_history = "\n".join(usd_history)
|
|
265
|
-
eur_history = "\n".join(eur_history)
|
|
266
|
-
|
|
267
|
-
embeds = [
|
|
268
|
-
{
|
|
269
|
-
"title": "📊 Recent Price History",
|
|
270
|
-
"color": 5814783,
|
|
271
|
-
"fields": [
|
|
272
|
-
{
|
|
273
|
-
"name": "Date",
|
|
274
|
-
"value": date_history,
|
|
275
|
-
"inline": True,
|
|
276
|
-
},
|
|
277
|
-
{
|
|
278
|
-
"name": "USD Total",
|
|
279
|
-
"value": usd_history,
|
|
280
|
-
"inline": True,
|
|
281
|
-
},
|
|
282
|
-
{
|
|
283
|
-
"name": "EUR Total",
|
|
284
|
-
"value": eur_history,
|
|
285
|
-
"inline": True,
|
|
286
|
-
},
|
|
287
|
-
],
|
|
288
|
-
}
|
|
289
|
-
]
|
|
290
|
-
|
|
291
|
-
return embeds
|
|
292
|
-
|
|
293
|
-
def _send_discord_notification(self):
|
|
294
|
-
"""Send a message to a Discord webhook if notifications are enabled in the
|
|
295
|
-
config file and a webhook URL is provided.
|
|
296
|
-
"""
|
|
297
|
-
discord_notifications = self.config.getboolean(
|
|
298
|
-
"App Settings", "discord_notifications", fallback=False
|
|
299
|
-
)
|
|
300
|
-
webhook_url = self.config.get("User Settings", "discord_webhook_url", fallback=None)
|
|
301
|
-
webhook_url = None if webhook_url in ("None", "") else webhook_url
|
|
302
|
-
|
|
303
|
-
if discord_notifications and webhook_url:
|
|
304
|
-
embeds = self._construct_recent_calculations_embeds()
|
|
305
|
-
try:
|
|
306
|
-
response = self.session.post(
|
|
307
|
-
url=webhook_url,
|
|
308
|
-
json={
|
|
309
|
-
"embeds": embeds,
|
|
310
|
-
"username": DC_WEBHOOK_USERNAME,
|
|
311
|
-
"avatar_url": DC_WEBHOOK_AVATAR_URL,
|
|
312
|
-
},
|
|
313
|
-
)
|
|
314
|
-
response.raise_for_status()
|
|
315
|
-
self.console.print("[bold steel_blue3][+] Discord notification sent.\n")
|
|
316
|
-
except RequestException as error:
|
|
317
|
-
self.console.print(f"[bold red][!] Failed to send Discord notification: {error}\n")
|
|
318
|
-
except Exception as error:
|
|
319
|
-
self.console.print(f"[bold red][!] An unexpected error occurred: {error}\n")
|
|
320
|
-
|
|
321
|
-
@retry(stop=stop_after_attempt(10))
|
|
322
|
-
def _get_page(self, url):
|
|
323
|
-
"""
|
|
324
|
-
Get the page content from the given URL, using a proxy if configured. If the
|
|
325
|
-
request fails, it will retry up to 10 times.
|
|
326
|
-
|
|
327
|
-
:param url: The URL to fetch the page from.
|
|
328
|
-
:return: The HTTP response object containing the page content.
|
|
329
|
-
:raises RequestException: If the request fails.
|
|
330
|
-
:raises RetryError: If the retry limit is reached.
|
|
331
|
-
"""
|
|
332
|
-
use_proxy = self.config.getboolean("App Settings", "use_proxy", fallback=False)
|
|
333
|
-
api_key = self.config.get("User Settings", "api_key", fallback=None)
|
|
334
|
-
api_key = None if api_key in ("None", "") else api_key
|
|
335
|
-
|
|
336
|
-
if use_proxy and api_key:
|
|
337
|
-
page = self.session.get(
|
|
338
|
-
url=url,
|
|
339
|
-
proxies={
|
|
340
|
-
"http": HTTP_PROXY_URL.format(api_key),
|
|
341
|
-
"https": HTTPS_PROXY_URL.format(api_key),
|
|
342
|
-
},
|
|
343
|
-
verify=False,
|
|
344
|
-
)
|
|
345
|
-
else:
|
|
346
|
-
page = self.session.get(url)
|
|
347
|
-
|
|
348
|
-
if not page.ok or not page.content:
|
|
349
|
-
self.console.print(
|
|
350
|
-
f"[bold red][!] Failed to load page ({page.status_code}). Retrying...\n"
|
|
351
|
-
)
|
|
352
|
-
raise RequestException(f"Failed to load page: {url}")
|
|
353
|
-
|
|
354
|
-
return page
|
|
355
|
-
|
|
356
|
-
def _parse_item_price(self, item_page, item_href):
|
|
357
|
-
"""
|
|
358
|
-
Parse the price of an item from the given steamcommunity market page and item
|
|
359
|
-
href.
|
|
360
|
-
|
|
361
|
-
:param item_page: The HTTP response object containing the item page content.
|
|
362
|
-
:param item_href: The href of the item listing to find the price for.
|
|
363
|
-
:return: The price of the item as a float.
|
|
364
|
-
:raises ValueError: If the item listing or price span cannot be found.
|
|
365
|
-
"""
|
|
366
|
-
item_soup = BeautifulSoup(item_page.content, "html.parser")
|
|
367
|
-
item_listing = item_soup.find("a", attrs={"href": f"{item_href}"})
|
|
368
|
-
if not isinstance(item_listing, Tag):
|
|
369
|
-
raise ValueError(f"Failed to find item listing: {item_href}")
|
|
370
|
-
|
|
371
|
-
item_price_span = item_listing.find("span", attrs={"class": "normal_price"})
|
|
372
|
-
if not isinstance(item_price_span, Tag):
|
|
373
|
-
raise ValueError(f"Failed to find price span in item listing: {item_href}")
|
|
374
|
-
|
|
375
|
-
price_str = item_price_span.text.split()[2]
|
|
376
|
-
price = float(price_str.replace("$", ""))
|
|
377
|
-
|
|
378
|
-
return price
|
|
379
|
-
|
|
380
|
-
def _scrape_capsule_prices(
|
|
381
|
-
self,
|
|
382
|
-
capsule_section,
|
|
383
|
-
capsule_info,
|
|
384
|
-
):
|
|
385
|
-
"""
|
|
386
|
-
Scrape prices for a specific capsule section, printing the details to the
|
|
387
|
-
console.
|
|
388
|
-
|
|
389
|
-
:param capsule_section: The section name in the config for the capsule.
|
|
390
|
-
:param capsule_info: A dictionary containing information about the capsule page,
|
|
391
|
-
hrefs, and names.
|
|
392
|
-
"""
|
|
393
|
-
capsule_title = capsule_section.center(MAX_LINE_LEN, SEPARATOR)
|
|
394
|
-
self.console.print(f"[bold magenta]{capsule_title}\n")
|
|
395
|
-
|
|
396
|
-
capsule_usd_total = 0
|
|
397
|
-
try:
|
|
398
|
-
capsule_page = self._get_page(capsule_info["page"])
|
|
399
|
-
for capsule_name, capsule_href in zip(capsule_info["names"], capsule_info["items"]):
|
|
400
|
-
config_capsule_name = capsule_name.replace(" ", "_")
|
|
401
|
-
owned = self.config.getint(capsule_section, config_capsule_name, fallback=0)
|
|
402
|
-
if owned == 0:
|
|
403
|
-
continue
|
|
404
|
-
|
|
405
|
-
price_usd = self._parse_item_price(capsule_page, capsule_href)
|
|
406
|
-
price_usd_owned = round(float(owned * price_usd), 2)
|
|
407
|
-
|
|
408
|
-
self.console.print(f"[bold deep_sky_blue4]{capsule_name}")
|
|
409
|
-
self.console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
410
|
-
capsule_usd_total += price_usd_owned
|
|
411
|
-
except (RetryError, ValueError):
|
|
412
|
-
self.console.print(
|
|
413
|
-
"[bold red][!] Failed to scrape capsule prices. (Consider using proxies to prevent rate limiting)\n"
|
|
414
|
-
)
|
|
415
|
-
except Exception as error:
|
|
416
|
-
self.console.print(
|
|
417
|
-
f"[bold red][!] An unexpected error occurred while scraping capsule prices: {error}\n"
|
|
418
|
-
)
|
|
419
|
-
|
|
420
|
-
return capsule_usd_total
|
|
421
|
-
|
|
422
|
-
def _scrape_capsule_section_prices(self):
|
|
423
|
-
"""Scrape prices for all capsule sections defined in the configuration."""
|
|
424
|
-
capsule_usd_total = 0
|
|
425
|
-
for capsule_section, capsule_info in CAPSULE_INFO.items():
|
|
426
|
-
# Only scrape capsule sections where the user owns at least one item
|
|
427
|
-
if any(int(owned) > 0 for _, owned in self.config.items(capsule_section)):
|
|
428
|
-
capsule_usd_total += self._scrape_capsule_prices(capsule_section, capsule_info)
|
|
429
|
-
|
|
430
|
-
return capsule_usd_total
|
|
431
|
-
|
|
432
|
-
def _market_page_from_href(self, item_href):
|
|
433
|
-
"""
|
|
434
|
-
Convert an href of a Steam Community Market item to a market page URL.
|
|
435
|
-
|
|
436
|
-
:param item_href: The href of the item listing, typically ending with the item's
|
|
437
|
-
name.
|
|
438
|
-
:return: A URL string for the Steam Community Market page of the item.
|
|
439
|
-
"""
|
|
440
|
-
url_encoded_name = item_href.split("/")[-1]
|
|
441
|
-
page_url = f"https://steamcommunity.com/market/search?q={url_encoded_name}"
|
|
442
|
-
|
|
443
|
-
return page_url
|
|
444
|
-
|
|
445
|
-
def _scrape_case_prices(self):
|
|
446
|
-
"""
|
|
447
|
-
Scrape prices for all cases defined in the configuration.
|
|
448
|
-
|
|
449
|
-
For each case, it prints the case name, owned count, price per item, and total
|
|
450
|
-
price for owned items.
|
|
451
|
-
"""
|
|
452
|
-
case_usd_total = 0
|
|
453
|
-
for case_index, (config_case_name, owned) in enumerate(self.config.items("Cases")):
|
|
454
|
-
if int(owned) == 0:
|
|
455
|
-
continue
|
|
456
|
-
|
|
457
|
-
case_name = config_case_name.replace("_", " ").title()
|
|
458
|
-
case_title = case_name.center(MAX_LINE_LEN, SEPARATOR)
|
|
459
|
-
self.console.print(f"[bold magenta]{case_title}\n")
|
|
460
|
-
|
|
461
|
-
try:
|
|
462
|
-
case_page_url = self._market_page_from_href(CASE_HREFS[case_index])
|
|
463
|
-
case_page = self._get_page(case_page_url)
|
|
464
|
-
price_usd = self._parse_item_price(case_page, CASE_HREFS[case_index])
|
|
465
|
-
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
466
|
-
|
|
467
|
-
self.console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
468
|
-
case_usd_total += price_usd_owned
|
|
469
|
-
|
|
470
|
-
if not self.config.getboolean("App Settings", "use_proxy", fallback=False):
|
|
471
|
-
time.sleep(1)
|
|
472
|
-
except (RetryError, ValueError):
|
|
473
|
-
self.console.print(
|
|
474
|
-
"[bold red][!] Failed to scrape case prices. (Consider using proxies to prevent rate limiting)\n"
|
|
475
|
-
)
|
|
476
|
-
except Exception as error:
|
|
477
|
-
self.console.print(
|
|
478
|
-
f"[bold red][!] An unexpected error occurred while scraping case prices: {error}\n"
|
|
479
|
-
)
|
|
480
|
-
|
|
481
|
-
return case_usd_total
|
|
482
|
-
|
|
483
|
-
def _scrape_custom_item_prices(self):
|
|
484
|
-
"""
|
|
485
|
-
Scrape prices for custom items defined in the configuration.
|
|
486
|
-
|
|
487
|
-
For each custom item, it prints the item name, owned count, price per item, and
|
|
488
|
-
total price for owned items.
|
|
489
|
-
"""
|
|
490
|
-
custom_item_usd_total = 0
|
|
491
|
-
for config_custom_item_name, owned_and_href in self.config.items("Custom Items"):
|
|
492
|
-
owned, custom_item_href = owned_and_href.split(" ", 1)
|
|
493
|
-
if int(owned) == 0:
|
|
494
|
-
continue
|
|
495
|
-
|
|
496
|
-
custom_item_name = config_custom_item_name.replace("_", " ").title()
|
|
497
|
-
custom_item_title = custom_item_name.center(MAX_LINE_LEN, SEPARATOR)
|
|
498
|
-
self.console.print(f"[bold magenta]{custom_item_title}\n")
|
|
499
|
-
|
|
500
|
-
try:
|
|
501
|
-
custom_item_page_url = self._market_page_from_href(custom_item_href)
|
|
502
|
-
custom_item_page = self._get_page(custom_item_page_url)
|
|
503
|
-
price_usd = self._parse_item_price(custom_item_page, custom_item_href)
|
|
504
|
-
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
505
|
-
|
|
506
|
-
self.console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
507
|
-
custom_item_usd_total += price_usd_owned
|
|
508
|
-
|
|
509
|
-
if not self.config.getboolean("App Settings", "use_proxy", fallback=False):
|
|
510
|
-
time.sleep(1)
|
|
511
|
-
except (RetryError, ValueError):
|
|
512
|
-
self.console.print(
|
|
513
|
-
"[bold red][!] Failed to scrape custom item prices. (Consider using proxies to prevent rate limiting)\n"
|
|
514
|
-
)
|
|
515
|
-
except Exception as error:
|
|
516
|
-
self.console.print(
|
|
517
|
-
f"[bold red][!] An unexpected error occurred while scraping custom item prices: {error}\n"
|
|
518
|
-
)
|
|
519
|
-
|
|
520
|
-
return custom_item_usd_total
|
|
521
|
-
|
|
522
|
-
def identify_background_task(self):
|
|
523
|
-
"""
|
|
524
|
-
Search the OS for a daily background task that runs the scraper.
|
|
525
|
-
|
|
526
|
-
:return: True if a background task is found, False otherwise.
|
|
527
|
-
"""
|
|
528
|
-
if OS == OSType.WINDOWS:
|
|
529
|
-
cmd = ["schtasks", "/query", "/tn", WIN_BACKGROUND_TASK_NAME]
|
|
530
|
-
return_code = call(cmd, stdout=DEVNULL, stderr=DEVNULL)
|
|
531
|
-
found = return_code == 0
|
|
532
|
-
return found
|
|
533
|
-
else:
|
|
534
|
-
# TODO: implement finder for cron jobs
|
|
535
|
-
return False
|
|
536
|
-
|
|
537
|
-
def _toggle_task_batch_file(self, enabled: bool):
|
|
538
|
-
"""
|
|
539
|
-
Create or delete a batch file that runs the scraper.
|
|
540
|
-
|
|
541
|
-
:param enabled: If True, the batch file will be created; if False, the batch
|
|
542
|
-
file will be deleted.
|
|
543
|
-
"""
|
|
544
|
-
if enabled:
|
|
545
|
-
with open(BATCH_FILE, "w", encoding="utf-8") as batch_file:
|
|
546
|
-
if RUNNING_IN_EXE:
|
|
547
|
-
# The python executable is set to the executable itself
|
|
548
|
-
# for executables created with PyInstaller
|
|
549
|
-
batch_file.write(f"{PYTHON_EXECUTABLE} --only-scrape\n")
|
|
550
|
-
else:
|
|
551
|
-
batch_file.write(f"cd {PROJECT_DIR}\n")
|
|
552
|
-
batch_file.write(f"{PYTHON_EXECUTABLE} -m cs2tracker --only-scrape\n")
|
|
553
|
-
else:
|
|
554
|
-
if os.path.exists(BATCH_FILE):
|
|
555
|
-
os.remove(BATCH_FILE)
|
|
556
|
-
|
|
557
|
-
def _toggle_background_task_windows(self, enabled: bool):
|
|
558
|
-
"""
|
|
559
|
-
Create or delete a daily background task that runs the scraper on Windows.
|
|
560
|
-
|
|
561
|
-
:param enabled: If True, the task will be created; if False, the task will be
|
|
562
|
-
deleted.
|
|
563
|
-
"""
|
|
564
|
-
self._toggle_task_batch_file(enabled)
|
|
565
|
-
if enabled:
|
|
566
|
-
cmd = [
|
|
567
|
-
"schtasks",
|
|
568
|
-
"/create",
|
|
569
|
-
"/tn",
|
|
570
|
-
WIN_BACKGROUND_TASK_NAME,
|
|
571
|
-
"/tr",
|
|
572
|
-
WIN_BACKGROUND_TASK_CMD,
|
|
573
|
-
"/sc",
|
|
574
|
-
WIN_BACKGROUND_TASK_SCHEDULE,
|
|
575
|
-
"/st",
|
|
576
|
-
WIN_BACKGROUND_TASK_TIME,
|
|
577
|
-
]
|
|
578
|
-
return_code = call(cmd, stdout=DEVNULL, stderr=DEVNULL)
|
|
579
|
-
if return_code == 0:
|
|
580
|
-
self.console.print("[bold green][+] Background task enabled.")
|
|
581
|
-
else:
|
|
582
|
-
self.console.print("[bold red][!] Failed to enable background task.")
|
|
583
|
-
else:
|
|
584
|
-
cmd = ["schtasks", "/delete", "/tn", WIN_BACKGROUND_TASK_NAME, "/f"]
|
|
585
|
-
return_code = call(cmd, stdout=DEVNULL, stderr=DEVNULL)
|
|
586
|
-
if return_code == 0:
|
|
587
|
-
self.console.print("[bold green][-] Background task disabled.")
|
|
588
|
-
else:
|
|
589
|
-
self.console.print("[bold red][!] Failed to disable background task.")
|
|
590
|
-
|
|
591
|
-
def toggle_background_task(self, enabled: bool):
|
|
592
|
-
"""
|
|
593
|
-
Create or delete a daily background task that runs the scraper.
|
|
594
|
-
|
|
595
|
-
:param enabled: If True, the task will be created; if False, the task will be
|
|
596
|
-
deleted.
|
|
597
|
-
"""
|
|
598
|
-
if OS == OSType.WINDOWS:
|
|
599
|
-
self._toggle_background_task_windows(enabled)
|
|
600
|
-
else:
|
|
601
|
-
# TODO: implement toggle for cron jobs
|
|
602
|
-
pass
|
|
603
|
-
|
|
604
|
-
def toggle_use_proxy(self, enabled: bool):
|
|
605
|
-
"""
|
|
606
|
-
Toggle the use of proxies for requests. This will update the configuration file.
|
|
607
|
-
|
|
608
|
-
:param enabled: If True, proxies will be used; if False, they will not be used.
|
|
609
|
-
"""
|
|
610
|
-
self.config.set("App Settings", "use_proxy", str(enabled))
|
|
611
|
-
with open(CONFIG_FILE, "w", encoding="utf-8") as config_file:
|
|
612
|
-
self.config.write(config_file)
|
|
613
|
-
|
|
614
|
-
self.console.print(
|
|
615
|
-
f"[bold green]{'[+] Enabled' if enabled else '[-] Disabled'} proxy usage for requests."
|
|
616
|
-
)
|
|
617
|
-
|
|
618
|
-
def toggle_discord_webhook(self, enabled: bool):
|
|
619
|
-
"""
|
|
620
|
-
Toggle the use of a Discord webhook to notify users of price calculations.
|
|
621
|
-
|
|
622
|
-
:param enabled: If True, the webhook will be used; if False, it will not be
|
|
623
|
-
used.
|
|
624
|
-
"""
|
|
625
|
-
self.config.set("App Settings", "discord_notifications", str(enabled))
|
|
626
|
-
with open(CONFIG_FILE, "w", encoding="utf-8") as config_file:
|
|
627
|
-
self.config.write(config_file)
|
|
628
|
-
|
|
629
|
-
self.console.print(
|
|
630
|
-
f"[bold green]{'[+] Enabled' if enabled else '[-] Disabled'} Discord webhook notifications."
|
|
631
|
-
)
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
if __name__ == "__main__":
|
|
635
|
-
scraper = Scraper()
|
|
636
|
-
scraper.console.print(f"[bold yellow]{BANNER}\n{AUTHOR_STRING}\n")
|
|
637
|
-
scraper.scrape_prices()
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
cs2tracker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
cs2tracker/__main__.py,sha256=Ub--oSMv48YzfWF1CZqYlkn1-HvZ7Bhxoc7urn1oY6o,249
|
|
3
|
-
cs2tracker/_version.py,sha256=RhQLK1m9uLWnJF6WDfppvypZmuwbm4NwHzCGbpSNQe8,511
|
|
4
|
-
cs2tracker/application.py,sha256=R_a8n0AgKLCHEG5O8K8_Q246Q_sopzRHvOQi4HtA0sE,9632
|
|
5
|
-
cs2tracker/constants.py,sha256=2jbg5tkRe3dFb_h47NG_CWNr11to3d3ojrr2s3YjyMY,27471
|
|
6
|
-
cs2tracker/main.py,sha256=jXEgZIpM_cDENXOaXCVTg2n50Xso7btI5FImg9BBeXQ,1041
|
|
7
|
-
cs2tracker/padded_console.py,sha256=lPEa34p-8LTmTbpf-2S5uYPaA2UmsIOPq2_UoVhMRgU,674
|
|
8
|
-
cs2tracker/scraper.py,sha256=G-7e2zwCoFt6ZCdo-PwLUtD7ZYd4Wdk2r0jgDoctN6A,26039
|
|
9
|
-
cs2tracker/data/config.ini,sha256=960jvrTt6ZOwCrHTVC5Q4Uw9lVGNnVRY7-kG6-k_Mig,5197
|
|
10
|
-
cs2tracker/data/output.csv,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
cs2tracker-2.1.9.dist-info/licenses/LICENSE.md,sha256=G5wqQ_8KGA808kVuF-Fpu_Yhteg8K_5ux9n2v8eQK7s,1069
|
|
12
|
-
cs2tracker-2.1.9.dist-info/METADATA,sha256=TyaeU7b7hlEw32XtOQgp9U8xOf0bisfTYspqoGZkA7Y,3734
|
|
13
|
-
cs2tracker-2.1.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
14
|
-
cs2tracker-2.1.9.dist-info/entry_points.txt,sha256=K8IwDIkg8QztSB9g9c89B9jR_2pG4QyJGrNs4z5RcZw,63
|
|
15
|
-
cs2tracker-2.1.9.dist-info/top_level.txt,sha256=2HB4xDDOxaU5BDc_yvdi9UlYLgL768n8aR-hRhFM6VQ,11
|
|
16
|
-
cs2tracker-2.1.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|