cs2tracker 2.1.0__py3-none-any.whl → 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cs2tracker/scraper.py CHANGED
@@ -1,348 +1,305 @@
1
- import configparser
2
1
  import csv
3
- import datetime
4
2
  import os
5
3
  import time
4
+ from configparser import ConfigParser
5
+ from datetime import datetime
6
6
 
7
- import requests
8
7
  from bs4 import BeautifulSoup
8
+ from bs4.element import Tag
9
9
  from currency_converter import CurrencyConverter
10
+ from requests import RequestException, Session
10
11
  from requests.adapters import HTTPAdapter, Retry
11
12
  from rich.console import Console
12
- from tenacity import retry, stop_after_attempt
13
+ from tenacity import RetryError, retry, stop_after_attempt
13
14
 
14
- from .constants import (
15
- CAPSULE_HREFS,
16
- CAPSULE_NAMES,
17
- CAPSULE_NAMES_GENERIC,
18
- CAPSULE_PAGES,
15
+ from cs2tracker.constants import (
16
+ CAPSULE_INFO,
19
17
  CASE_HREFS,
20
- CASE_NAMES,
21
18
  CASE_PAGES,
22
19
  CONFIG_FILE,
23
20
  OUTPUT_FILE,
24
21
  )
25
22
 
26
23
  MAX_LINE_LEN = 72
27
- PADDING_LEN = MAX_LINE_LEN // 2 - 1
28
- PADDING = "-" * PADDING_LEN
24
+ SEPARATOR = "-"
25
+ PRICE_INFO = "Owned: {} Steam market price: ${} Total: ${}\n"
29
26
 
30
27
 
31
28
  class Scraper:
32
29
  def __init__(self):
33
- self.api_key = None
34
- self.use_proxy = False
35
-
36
- self.case_quantities = []
37
- self.rmr_quantities = []
38
- self.stockholm_quantities = []
39
- self.antwerp_quantities = []
40
- self.rio_quantities = []
41
- self.paris_quantities = []
42
- self.copenhagen_quantities = []
43
- self.shanghai_quantities = []
44
- self.austin_quantities = []
45
-
46
- self.total_price = 0
47
- self.total_price_euro = 0
48
-
49
- self.session = requests.Session()
30
+ """Initialize the Scraper class."""
31
+ self.console = Console()
32
+ self.parse_config()
33
+ self._start_session()
34
+
35
+ self.usd_total = 0
36
+ self.eur_total = 0
37
+
38
+ def parse_config(self):
39
+ """Parse the configuration file to read settings and user-owned items."""
40
+ self.config = ConfigParser()
41
+ self.config.read(CONFIG_FILE)
42
+
43
+ def _start_session(self):
44
+ """Start a requests session with custom headers and retry logic."""
45
+ self.session = Session()
50
46
  self.session.headers.update(
51
47
  {
52
48
  "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
53
49
  }
54
50
  )
55
- retries = Retry(
56
- total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504, 520]
57
- )
51
+ retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504, 520])
58
52
  self.session.mount("http://", HTTPAdapter(max_retries=retries))
59
53
  self.session.mount("https://", HTTPAdapter(max_retries=retries))
60
54
 
61
- self.console = Console()
62
-
63
- config = self.parse_config()
64
- self.set_config(config)
65
-
66
55
  def scrape_prices(self):
67
- for capsule_page_url in CAPSULE_PAGES:
68
- capsule_hrefs = (
69
- capsule_name
70
- ) = capsule_names_generic = capsule_quantities = None
71
- if "rmr" in capsule_page_url:
72
- capsule_name = "2020 RMR"
73
- capsule_quantities = self.rmr_quantities
74
- capsule_hrefs = CAPSULE_HREFS[0:3]
75
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:3]
76
- elif "stockholm" in capsule_page_url:
77
- capsule_name = "Stockholm"
78
- capsule_quantities = self.stockholm_quantities
79
- capsule_hrefs = CAPSULE_HREFS[3:8]
80
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:4] + [
81
- CAPSULE_NAMES_GENERIC[-1]
82
- ]
83
- elif "antwerp" in capsule_page_url:
84
- capsule_name = "Antwerp"
85
- capsule_quantities = self.antwerp_quantities
86
- capsule_hrefs = CAPSULE_HREFS[8:15]
87
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:7]
88
- elif "rio" in capsule_page_url:
89
- capsule_name = "Rio"
90
- capsule_quantities = self.rio_quantities
91
- capsule_hrefs = CAPSULE_HREFS[15:22]
92
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:7]
93
- elif "paris" in capsule_page_url:
94
- capsule_name = "Paris"
95
- capsule_quantities = self.paris_quantities
96
- capsule_hrefs = CAPSULE_HREFS[22:29]
97
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:7]
98
- elif "copenhagen" in capsule_page_url:
99
- capsule_name = "Copenhagen"
100
- capsule_quantities = self.copenhagen_quantities
101
- capsule_hrefs = CAPSULE_HREFS[29:36]
102
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:7]
103
- elif "shanghai" in capsule_page_url:
104
- capsule_name = "Shanghai"
105
- capsule_quantities = self.shanghai_quantities
106
- capsule_hrefs = CAPSULE_HREFS[36:43]
107
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:7]
108
- elif "austin" in capsule_page_url:
109
- capsule_name = "Austin"
110
- capsule_quantities = self.austin_quantities
111
- capsule_hrefs = CAPSULE_HREFS[43:50]
112
- capsule_names_generic = CAPSULE_NAMES_GENERIC[0:7]
113
-
114
- self._scrape_prices_capsule(
115
- capsule_page_url,
116
- capsule_hrefs,
117
- capsule_name,
118
- capsule_names_generic,
119
- capsule_quantities,
56
+ """Scrape prices for capsules and cases, calculate totals in USD and EUR, and
57
+ print/save the results.
58
+ """
59
+ capsule_usd_total = 0
60
+ try:
61
+ capsule_usd_total = self.scrape_capsule_section_prices()
62
+ except (RequestException, AttributeError, RetryError, ValueError):
63
+ self.console.print(
64
+ "[bold red]Failed to scrape capsule prices. (Consider using proxies to prevent rate limiting)\n"
120
65
  )
121
66
 
122
- self._scrape_prices_case(
123
- self.case_quantities, CASE_PAGES, CASE_HREFS, CASE_NAMES
124
- )
67
+ case_usd_total = 0
68
+ try:
69
+ case_usd_total = self._scrape_case_prices()
70
+ except (RequestException, AttributeError, RetryError, ValueError):
71
+ self.console.print(
72
+ "[bold red]Failed to scrape case prices. (Consider using proxies to prevent rate limiting)\n"
73
+ )
125
74
 
126
- def print_total(self):
127
- usd_string = "USD Total".center(
128
- MAX_LINE_LEN, "-"
129
- ) # f"{PADDING}USD Total{PADDING}"[:MAX_LINE_LEN]
130
- self.console.print(f"[bold green]{usd_string}")
131
- self.console.print(f"${self.total_price:.2f}")
75
+ self.usd_total += capsule_usd_total
76
+ self.usd_total += case_usd_total
77
+ self.eur_total = CurrencyConverter().convert(self.usd_total, "USD", "EUR")
132
78
 
133
- self.total_price_euro = CurrencyConverter().convert(
134
- self.total_price, "USD", "EUR"
135
- )
136
- eur_string = "EUR Total".center(
137
- MAX_LINE_LEN, "-"
138
- ) # f"{PADDING}EUR Total{PADDING}"[:MAX_LINE_LEN]
139
- self.console.print(f"[bold green]{eur_string}")
140
- self.console.print(f"€{self.total_price_euro:.2f}")
141
- end_string = f"{PADDING}{PADDING}{PADDING}"[:MAX_LINE_LEN]
79
+ self._print_total()
80
+ self._save_price_log()
81
+
82
+ # reset totals for next run
83
+ self.usd_total, self.eur_total = 0, 0
84
+
85
+ def _print_total(self):
86
+ """Print the total prices in USD and EUR, formatted with titles and
87
+ separators.
88
+ """
89
+ usd_title = "USD Total".center(MAX_LINE_LEN, SEPARATOR)
90
+ self.console.print(f"[bold green]{usd_title}")
91
+ self.console.print(f"${self.usd_total:.2f}")
92
+
93
+ eur_title = "EUR Total".center(MAX_LINE_LEN, SEPARATOR)
94
+ self.console.print(f"[bold green]{eur_title}")
95
+ self.console.print(f"€{self.eur_total:.2f}")
96
+
97
+ end_string = SEPARATOR * MAX_LINE_LEN
142
98
  self.console.print(f"[bold green]{end_string}\n")
143
99
 
144
- def save_to_file(self):
145
- now = datetime.datetime.now()
146
- date = now.strftime("%Y-%m-%d")
100
+ def _save_price_log(self):
101
+ """
102
+ Save the current date and total prices in USD and EUR to a CSV file.
147
103
 
104
+ This will append a new entry to the output file if no entry has been made for
105
+ today.
106
+ """
148
107
  if not os.path.isfile(OUTPUT_FILE):
149
- open(OUTPUT_FILE, "w").close()
108
+ open(OUTPUT_FILE, "w", encoding="utf-8").close()
150
109
 
151
- with open(OUTPUT_FILE, "r", encoding="utf-8") as csvfile:
152
- reader = csv.reader(csvfile)
110
+ with open(OUTPUT_FILE, "r", encoding="utf-8") as price_logs:
111
+ price_logs_reader = csv.reader(price_logs)
153
112
  last_row = None
154
- for row in reader:
113
+ for row in price_logs_reader:
155
114
  last_row = row
156
115
  if last_row:
157
- last_date_str = last_row[0][:10]
116
+ last_log_date = last_row[0][:10]
158
117
  else:
159
- last_date_str = ""
160
-
161
- if date != last_date_str:
162
- today = now.strftime("%Y-%m-%d %H:%M:%S")
163
- total = f"{self.total_price:.2f}$"
164
- total_euro = f"{self.total_price_euro:.2f}€"
165
- with open(OUTPUT_FILE, "a", newline="", encoding="utf-8") as csvfile:
166
- writer = csv.writer(csvfile)
167
- writer.writerow([today, total])
168
- writer.writerow([today, total_euro])
169
-
170
- # reset total prices for next run
171
- self.total_price = 0
172
- self.total_price_euro = 0
173
-
174
- def parse_config(self):
175
- config = configparser.ConfigParser()
176
- config.read(CONFIG_FILE)
177
- return config
178
-
179
- def set_config(self, config):
180
- self.use_proxy = (
181
- False if config.get("Proxy API Key", "Use_Proxy") == "False" else True
182
- )
183
- self.api_key = config.get("Proxy API Key", "API_Key")
184
-
185
- # reset all quantities in case this is called at runtime (edit config)
186
- self.case_quantities = []
187
- self.rmr_quantities = []
188
- self.stockholm_quantities = []
189
- self.antwerp_quantities = []
190
- self.rio_quantities = []
191
- self.paris_quantities = []
192
- self.copenhagen_quantities = []
193
- self.shanghai_quantities = []
194
- self.austin_quantities = []
195
-
196
- for capsule_name in CAPSULE_NAMES:
197
- config_capsule_name = capsule_name.replace(" ", "_")
198
- if "RMR" in capsule_name:
199
- self.rmr_quantities.append(
200
- int(config.get("2020 RMR", config_capsule_name))
201
- )
202
- elif "Stockholm" in capsule_name:
203
- self.stockholm_quantities.append(
204
- int(config.get("Stockholm", config_capsule_name))
205
- )
206
- elif "Antwerp" in capsule_name:
207
- self.antwerp_quantities.append(
208
- int(config.get("Antwerp", config_capsule_name))
209
- )
210
- elif "Rio" in capsule_name:
211
- self.rio_quantities.append(int(config.get("Rio", config_capsule_name)))
212
- elif "Paris" in capsule_name:
213
- self.paris_quantities.append(
214
- int(config.get("Paris", config_capsule_name))
215
- )
216
- elif "Copenhagen" in capsule_name:
217
- self.copenhagen_quantities.append(
218
- int(config.get("Copenhagen", config_capsule_name))
219
- )
220
- elif "Shanghai" in capsule_name:
221
- self.shanghai_quantities.append(
222
- int(config.get("Shanghai", config_capsule_name))
223
- )
224
- elif "Austin" in capsule_name:
225
- self.austin_quantities.append(
226
- int(config.get("Austin", config_capsule_name))
227
- )
228
-
229
- for case_name in CASE_NAMES:
230
- config_case_name = case_name.replace(" ", "_")
231
- self.case_quantities.append(int(config.get("Cases", config_case_name)))
118
+ last_log_date = ""
119
+
120
+ today = datetime.now().strftime("%Y-%m-%d")
121
+ if last_log_date != today:
122
+ with open(OUTPUT_FILE, "a", newline="", encoding="utf-8") as price_logs:
123
+ price_logs_writer = csv.writer(price_logs)
124
+ price_logs_writer.writerow([today, f"{self.usd_total:.2f}$"])
125
+ price_logs_writer.writerow([today, f"{self.eur_total:.2f}€"])
126
+
127
+ def read_price_log(self):
128
+ """
129
+ Parse the output file to extract dates, dollar prices, and euro prices. This
130
+ data is used for drawing the plot of past prices.
131
+
132
+ :return: A tuple containing three lists: dates, dollar prices, and euro prices.
133
+ """
134
+ if not os.path.isfile(OUTPUT_FILE):
135
+ open(OUTPUT_FILE, "w", encoding="utf-8").close()
136
+
137
+ dates, dollars, euros = [], [], []
138
+ with open(OUTPUT_FILE, "r", newline="", encoding="utf-8") as price_logs:
139
+ price_logs_reader = csv.reader(price_logs)
140
+ for row in price_logs_reader:
141
+ date, price_with_currency = row
142
+ date = datetime.strptime(date, "%Y-%m-%d")
143
+ price = float(price_with_currency.rstrip("$€"))
144
+ if price_with_currency.endswith("€"):
145
+ euros.append(price)
146
+ else:
147
+ dollars.append(price)
148
+ # Only append every second date since the dates are the same for euros and dollars
149
+ # and we want the length of dates to match the lengths of dollars and euros
150
+ dates.append(date)
151
+
152
+ return dates, dollars, euros
232
153
 
233
154
  @retry(stop=stop_after_attempt(10))
234
155
  def _get_page(self, url):
235
- if self.use_proxy:
156
+ """
157
+ Get the page content from the given URL, using a proxy if configured. If the
158
+ request fails, it will retry up to 10 times.
159
+
160
+ :param url: The URL to fetch the page from.
161
+ :return: The HTTP response object containing the page content.
162
+ :raises RequestException: If the request fails.
163
+ :raises RetryError: If the retry limit is reached.
164
+ """
165
+ use_proxy = self.config.getboolean("Settings", "Use_Proxy", fallback=False)
166
+ api_key = self.config.get("Settings", "API_Key", fallback=None)
167
+ if use_proxy and api_key:
236
168
  page = self.session.get(
237
169
  url=url,
238
170
  proxies={
239
- "http": f"http://{self.api_key}:@smartproxy.crawlbase.com:8012",
240
- "https": f"http://{self.api_key}:@smartproxy.crawlbase.com:8012",
171
+ "http": f"http://{api_key}:@smartproxy.crawlbase.com:8012",
172
+ "https": f"http://{api_key}:@smartproxy.crawlbase.com:8012",
241
173
  },
242
174
  verify=False,
243
175
  )
244
176
  else:
245
177
  page = self.session.get(url)
246
178
 
179
+ if not page.ok or not page.content:
180
+ status = page.status_code
181
+ self.console.print(f"[bold red][!] Failed to load page ({status}). Retrying...\n")
182
+ raise RequestException(f"Failed to load page: {url}")
183
+
247
184
  return page
248
185
 
249
- def _scrape_prices_capsule(
186
+ def _parse_capsule_price(self, capsule_page, capsule_href):
187
+ """
188
+ Parse the price of a capsule from the given page and href.
189
+
190
+ :param capsule_page: The HTTP response object containing the capsule page
191
+ content.
192
+ :param capsule_href: The href of the capsule listing to find the price for.
193
+ :return: The price of the capsule as a float.
194
+ :raises ValueError: If the capsule listing or price span cannot be found.
195
+ """
196
+ capsule_soup = BeautifulSoup(capsule_page.content, "html.parser")
197
+ capsule_listing = capsule_soup.find("a", attrs={"href": f"{capsule_href}"})
198
+ if not isinstance(capsule_listing, Tag):
199
+ raise ValueError(f"Failed to find capsule listing: {capsule_href}")
200
+
201
+ price_span = capsule_listing.find("span", attrs={"class": "normal_price"})
202
+ if not isinstance(price_span, Tag):
203
+ raise ValueError(f"Failed to find price span in capsule listing: {capsule_href}")
204
+
205
+ price_str = price_span.text.split()[2]
206
+ price = float(price_str.replace("$", ""))
207
+
208
+ return price
209
+
210
+ def _scrape_capsule_prices(
250
211
  self,
251
- capsule_page_url,
252
- capsule_hrefs,
253
- capsule_name,
254
- capsule_names_generic,
255
- capsule_quantities,
212
+ capsule_section,
213
+ capsule_info,
256
214
  ):
257
- if any([quantity > 0 for quantity in capsule_quantities]):
258
- title_string = capsule_name.center(
259
- MAX_LINE_LEN, "-"
260
- ) # f"{PADDING}{capsule_name}{PADDING}"[:MAX_LINE_LEN]
261
- self.console.print(f"[bold magenta]{title_string}")
262
-
263
- page = self._get_page(capsule_page_url)
264
- soup = BeautifulSoup(page.content, "html.parser")
265
-
266
- for href_index, href in enumerate(capsule_hrefs):
267
- if capsule_quantities[href_index] > 0:
268
- try:
269
- listing = soup.find("a", attrs={"href": f"{href}"})
270
- retries = 0
271
- while not listing and retries < 5:
272
- self.console.print(
273
- f"[bold red][!] Failed to load page ({page.status_code}). Retrying...\n"
274
- )
275
- page = self._get_page(capsule_page_url)
276
- soup = BeautifulSoup(page.content, "html.parser")
277
- listing = soup.find("a", attrs={"href": f"{href}"})
278
- retries += 1
279
-
280
- price_span = listing.find(
281
- "span", attrs={"class": "normal_price"}
282
- )
283
- price_str = price_span.text.split()[2]
284
- price = float(price_str.replace("$", ""))
285
- price_total = round(
286
- float(capsule_quantities[href_index] * price), 2
287
- )
288
-
289
- self.console.print(
290
- f"[bold red]{capsule_names_generic[href_index]}"
291
- )
292
- self.console.print(
293
- f"Owned: {capsule_quantities[href_index]} Steam market price: ${price} Total: ${price_total}"
294
- )
295
-
296
- self.total_price += price_total
297
-
298
- except Exception:
299
- self.console.print("[bold red][!] Failed to find price listing")
300
- break
301
-
302
- self.console.print("\n")
303
-
304
- def _scrape_prices_case(
305
- self, case_quantities, case_page_urls, case_hrefs, case_names
306
- ):
307
- for index, case_quantity in enumerate(case_quantities):
308
- if case_quantity > 0:
309
- title_string = case_names[index].center(
310
- MAX_LINE_LEN, "-"
311
- ) # f"{PADDING}{case_names[index]}{PADDING}"[:MAX_LINE_LEN]
312
- self.console.print(f"[bold magenta]{title_string}")
313
-
314
- page = self._get_page(case_page_urls[index])
315
- soup = BeautifulSoup(page.content, "html.parser")
316
- listing = soup.find("a", attrs={"href": case_hrefs[index]})
317
- retries = 0
318
- while retries < 5:
319
- if not listing:
320
- self.console.print(
321
- f"[bold red][!] Failed to load page ({page.status_code}). Retrying...\n"
322
- )
323
- page = self._get_page(case_page_urls[index])
324
- soup = BeautifulSoup(page.content, "html.parser")
325
- listing = soup.find("a", attrs={"href": case_hrefs[index]})
326
- retries += 1
327
- else:
328
- break
329
-
330
- try:
331
- price_class = listing.find("span", attrs={"class": "normal_price"})
332
- price_str = price_class.text.split()[2]
333
- price = float(price_str.replace("$", ""))
334
- price_total = round(float(case_quantity * price), 2)
335
-
336
- self.console.print(
337
- f"Owned: {case_quantity} Steam market price: ${price} Total: ${price_total}"
338
- )
339
-
340
- self.total_price += price_total
341
-
342
- except Exception:
343
- self.console.print("[bold red][!] Failed to find price listing")
344
-
345
- self.console.print("\n")
346
-
347
- if not self.use_proxy:
348
- time.sleep(1)
215
+ """
216
+ Scrape prices for a specific capsule section, printing the details to the
217
+ console.
218
+
219
+ :param capsule_section: The section name in the config for the capsule.
220
+ :param capsule_info: A dictionary containing information about the capsule
221
+ section,
222
+ """
223
+ capsule_title = capsule_section.center(MAX_LINE_LEN, SEPARATOR)
224
+ self.console.print(f"[bold magenta]{capsule_title}")
225
+
226
+ capsule_price_total = 0
227
+ capsule_page = capsule_info["page"]
228
+ capsule_names = capsule_info["names"]
229
+ capsule_hrefs = capsule_info["items"]
230
+ capsule_page = self._get_page(capsule_page)
231
+ for capsule_name, capsule_href in zip(capsule_names, capsule_hrefs):
232
+ config_capsule_name = capsule_name.replace(" ", "_")
233
+ owned = self.config.getint(capsule_section, config_capsule_name, fallback=0)
234
+ if owned == 0:
235
+ continue
236
+
237
+ price_usd = self._parse_capsule_price(capsule_page, capsule_href)
238
+ price_usd_owned = round(float(owned * price_usd), 2)
239
+
240
+ self.console.print(f"[bold deep_sky_blue4]{capsule_name}")
241
+ self.console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
242
+ capsule_price_total += price_usd_owned
243
+
244
+ return capsule_price_total
245
+
246
+ def scrape_capsule_section_prices(self):
247
+ """Scrape prices for all capsule sections defined in the configuration."""
248
+ capsule_usd_total = 0
249
+ for capsule_section, capsule_info in CAPSULE_INFO.items():
250
+ # only scrape capsule sections where the user owns at least one item
251
+ if any(int(owned) > 0 for _, owned in self.config.items(capsule_section)):
252
+ capsule_usd_total += self._scrape_capsule_prices(capsule_section, capsule_info)
253
+
254
+ return capsule_usd_total
255
+
256
+ def _parse_case_price(self, case_page, case_href):
257
+ """
258
+ Parse the price of a case from the given page and href.
259
+
260
+ :param case_page: The HTTP response object containing the case page content.
261
+ :param case_href: The href of the case listing to find the price for.
262
+ :return: The price of the case as a float.
263
+ :raises ValueError: If the case listing or price span cannot be found.
264
+ """
265
+ case_soup = BeautifulSoup(case_page.content, "html.parser")
266
+ case_listing = case_soup.find("a", attrs={"href": case_href})
267
+ if not isinstance(case_listing, Tag):
268
+ raise ValueError(f"Failed to find case listing: {case_href}")
269
+
270
+ price_class = case_listing.find("span", attrs={"class": "normal_price"})
271
+ if not isinstance(price_class, Tag):
272
+ raise ValueError(f"Failed to find price class in case listing: {case_href}")
273
+
274
+ price_str = price_class.text.split()[2]
275
+ price = float(price_str.replace("$", ""))
276
+
277
+ return price
278
+
279
+ def _scrape_case_prices(self):
280
+ """
281
+ Scrape prices for all cases defined in the configuration.
282
+
283
+ For each case, it prints the case name, owned count, price per item, and total
284
+ price for owned items.
285
+ """
286
+ case_price_total = 0
287
+ for case_index, (config_case_name, owned) in enumerate(self.config.items("Cases")):
288
+ if int(owned) == 0:
289
+ continue
290
+
291
+ case_name = config_case_name.replace("_", " ").title()
292
+ case_title = case_name.center(MAX_LINE_LEN, SEPARATOR)
293
+ self.console.print(f"[bold magenta]{case_title}")
294
+
295
+ case_page = self._get_page(CASE_PAGES[case_index])
296
+ price_usd = self._parse_case_price(case_page, CASE_HREFS[case_index])
297
+ price_usd_owned = round(float(int(owned) * price_usd), 2)
298
+
299
+ self.console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
300
+ case_price_total += price_usd_owned
301
+
302
+ if not self.config.getboolean("Settings", "Use_Proxy", fallback=False):
303
+ time.sleep(1)
304
+
305
+ return case_price_total
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cs2tracker
3
- Version: 2.1.0
3
+ Version: 2.1.2
4
4
  Summary: Tracking the steam market prices of CS2 items
5
5
  Home-page: https://github.com/ashiven/cs2tracker
6
6
  Author: Jannik Novak
@@ -11,6 +11,7 @@ Classifier: Programming Language :: Python :: 3.11
11
11
  Requires-Python: >=3.11
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE.md
14
+ Requires-Dist: numpy==1.26.4
14
15
  Requires-Dist: beautifulsoup4==4.11.1
15
16
  Requires-Dist: CurrencyConverter==0.17.9
16
17
  Requires-Dist: matplotlib==3.7.0
@@ -20,8 +21,16 @@ Requires-Dist: tenacity==8.2.2
20
21
  Requires-Dist: urllib3==2.1.0
21
22
  Dynamic: license-file
22
23
 
24
+ <div align="center">
25
+
23
26
  [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
27
+ [![GitHub Release](https://img.shields.io/github/v/release/ashiven/cs2tracker)](https://github.com/ashiven/cs2tracker/releases)
24
28
  [![PyPI version](https://badge.fury.io/py/cs2tracker.svg)](https://badge.fury.io/py/cs2tracker)
29
+ [![GitHub Issues or Pull Requests](https://img.shields.io/github/issues/ashiven/cs2tracker)](https://github.com/ashiven/cs2tracker/issues)
30
+ [![GitHub Issues or Pull Requests](https://img.shields.io/github/issues-pr/ashiven/cs2tracker)](https://github.com/ashiven/cs2tracker/pulls)
31
+ ![GitHub Repo stars](https://img.shields.io/github/stars/ashiven/cs2tracker)
32
+
33
+ </div>
25
34
 
26
35
  ## About
27
36
 
@@ -49,10 +58,10 @@ Dynamic: license-file
49
58
 
50
59
  ### Options
51
60
 
52
- - `Edit Config` to change the specific numbers of each item you own and then save the config file.
53
61
  - `Run!` to gather the current market prices of your items and calculate the total amount in USD and EUR.
62
+ - `Edit Config` to change the specific numbers of each item you own and then save the config file.
54
63
  - `Show History` to see a price chart consisting of past calculations. A new data point for this chart is generated once a day upon running the program.
55
- - If you want to avoid temporary IP blocks, register for an API Key on [Crawlbase](crawlbase.com) and enter it into the `API_Key` field at the end of the config file. This will route every request through a different proxy server.
64
+ - If you want to prevent your requests from being rate limited by the steamcommunity server, register for an API key on [Crawlbase](crawlbase.com) and enter it into the `API_Key` field at the end of the config file. This will route every request through a different proxy server.
56
65
 
57
66
  ---
58
67