cs2tracker 2.1.13__py3-none-any.whl → 2.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cs2tracker might be problematic. Click here for more details.
- cs2tracker/_version.py +2 -2
- cs2tracker/app/application.py +72 -76
- cs2tracker/app/editor_frame.py +208 -127
- cs2tracker/app/scraper_frame.py +27 -10
- cs2tracker/constants.py +58 -321
- cs2tracker/data/config.ini +153 -153
- cs2tracker/data/get_inventory.js +64 -21
- cs2tracker/main.py +1 -16
- cs2tracker/scraper/parsers.py +192 -0
- cs2tracker/scraper/scraper.py +102 -212
- cs2tracker/util/padded_console.py +19 -0
- cs2tracker/util/validated_config.py +72 -18
- {cs2tracker-2.1.13.dist-info → cs2tracker-2.1.14.dist-info}/METADATA +3 -2
- cs2tracker-2.1.14.dist-info/RECORD +28 -0
- cs2tracker-2.1.13.dist-info/RECORD +0 -27
- {cs2tracker-2.1.13.dist-info → cs2tracker-2.1.14.dist-info}/WHEEL +0 -0
- {cs2tracker-2.1.13.dist-info → cs2tracker-2.1.14.dist-info}/entry_points.txt +0 -0
- {cs2tracker-2.1.13.dist-info → cs2tracker-2.1.14.dist-info}/licenses/LICENSE +0 -0
- {cs2tracker-2.1.13.dist-info → cs2tracker-2.1.14.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from urllib.parse import unquote
|
|
4
|
+
|
|
5
|
+
from bs4 import BeautifulSoup
|
|
6
|
+
from bs4.element import Tag
|
|
7
|
+
|
|
8
|
+
from cs2tracker.constants import CAPSULE_PAGES
|
|
9
|
+
from cs2tracker.util import get_console
|
|
10
|
+
from cs2tracker.util.validated_config import get_config
|
|
11
|
+
|
|
12
|
+
config = get_config()
|
|
13
|
+
console = get_console()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class PriceSource(Enum):
|
|
17
|
+
STEAM = "steam"
|
|
18
|
+
BUFF163 = "buff163"
|
|
19
|
+
SKINPORT = "skinport"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Parser(ABC):
|
|
23
|
+
@classmethod
|
|
24
|
+
@abstractmethod
|
|
25
|
+
def get_item_page_url(cls, item_href, source=PriceSource.STEAM) -> str:
|
|
26
|
+
"""
|
|
27
|
+
Convert an href of a Steam Community Market item to a Parser-specific market
|
|
28
|
+
page URL.
|
|
29
|
+
|
|
30
|
+
:param item_href: The href of the item listing, typically ending with the item's
|
|
31
|
+
name.
|
|
32
|
+
:return: A URL string for the Parser market page of the item.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
@classmethod
|
|
36
|
+
@abstractmethod
|
|
37
|
+
def parse_item_price(cls, item_page, item_href, source=PriceSource.STEAM) -> float:
|
|
38
|
+
"""
|
|
39
|
+
Parse the price of an item from the given Parser market page and steamcommunity
|
|
40
|
+
item href.
|
|
41
|
+
|
|
42
|
+
:param item_page: The HTTP response object containing the item page content.
|
|
43
|
+
:param item_href: The href of the item listing to find the price for.
|
|
44
|
+
:return: The price of the item as a float.
|
|
45
|
+
:raises ValueError: If the item listing or price span cannot be found.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class SteamParser(Parser):
|
|
50
|
+
STEAM_MARKET_SEARCH_PAGE_BASE_URL = "https://steamcommunity.com/market/search?q={}"
|
|
51
|
+
PRICE_INFO = "Owned: {:<10} {} price: ${:<10} Total: ${:<10}"
|
|
52
|
+
NEEDS_TIMEOUT = True
|
|
53
|
+
SOURCES = [PriceSource.STEAM]
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def get_item_page_url(cls, item_href, source=PriceSource.STEAM):
|
|
57
|
+
_ = source
|
|
58
|
+
|
|
59
|
+
# For higher efficiency we want to reuse the same page for sticker capsules (scraper uses caching)
|
|
60
|
+
# Therefore, if the provided item is a sticker capsule we return a search page defined in CAPSULE_PAGES
|
|
61
|
+
# where all of the sticker capsules of one section are listed
|
|
62
|
+
for section in config.sections():
|
|
63
|
+
if section in ("Custom Items", "Cases", "User Settings", "App Settings"):
|
|
64
|
+
continue
|
|
65
|
+
if any(item_href == option for option in config.options(section)):
|
|
66
|
+
return CAPSULE_PAGES[section]
|
|
67
|
+
|
|
68
|
+
url_encoded_name = item_href.split("/")[-1]
|
|
69
|
+
page_url = cls.STEAM_MARKET_SEARCH_PAGE_BASE_URL.format(url_encoded_name)
|
|
70
|
+
|
|
71
|
+
return page_url
|
|
72
|
+
|
|
73
|
+
@classmethod
|
|
74
|
+
def parse_item_price(cls, item_page, item_href, source=PriceSource.STEAM):
|
|
75
|
+
_ = source
|
|
76
|
+
|
|
77
|
+
item_soup = BeautifulSoup(item_page.content, "html.parser")
|
|
78
|
+
item_listing = item_soup.find("a", attrs={"href": f"{item_href}"})
|
|
79
|
+
if not isinstance(item_listing, Tag):
|
|
80
|
+
raise ValueError(f"Steam: Failed to find item listing for: {item_href}")
|
|
81
|
+
|
|
82
|
+
item_price_span = item_listing.find("span", attrs={"class": "normal_price"})
|
|
83
|
+
if not isinstance(item_price_span, Tag):
|
|
84
|
+
raise ValueError(f"Steam: Failed to find price span in item listing for: {item_href}")
|
|
85
|
+
|
|
86
|
+
price_str = item_price_span.text.split()[2]
|
|
87
|
+
price = float(price_str.replace("$", ""))
|
|
88
|
+
|
|
89
|
+
return price
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class SkinLedgerParser(Parser):
|
|
93
|
+
SKINLEDGER_PRICE_LIST = ""
|
|
94
|
+
PRICE_INFO = "Owned: {:<10} {} price: ${:<10} Total: ${:<10}"
|
|
95
|
+
NEEDS_TIMEOUT = False
|
|
96
|
+
SOURCES = [PriceSource.STEAM, PriceSource.BUFF163, PriceSource.SKINPORT]
|
|
97
|
+
|
|
98
|
+
@classmethod
|
|
99
|
+
def get_item_page_url(cls, item_href, source=PriceSource.STEAM) -> str:
|
|
100
|
+
_ = source
|
|
101
|
+
return super().get_item_page_url(item_href)
|
|
102
|
+
|
|
103
|
+
@classmethod
|
|
104
|
+
def parse_item_price(cls, item_page, item_href, source=PriceSource.STEAM) -> float:
|
|
105
|
+
_, _ = item_href, source
|
|
106
|
+
return super().parse_item_price(item_page, item_href)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class ClashParser(Parser):
|
|
110
|
+
CLASH_ITEM_API_BASE_URL = "https://inventory.clash.gg/api/GetItemPrice?id={}"
|
|
111
|
+
PRICE_INFO = "Owned: {:<10} {} price: ${:<10} Total: ${:<10}"
|
|
112
|
+
NEEDS_TIMEOUT = True
|
|
113
|
+
SOURCES = [PriceSource.STEAM]
|
|
114
|
+
|
|
115
|
+
@classmethod
|
|
116
|
+
def get_item_page_url(cls, item_href, source=PriceSource.STEAM):
|
|
117
|
+
_ = source
|
|
118
|
+
|
|
119
|
+
url_encoded_name = item_href.split("/")[-1]
|
|
120
|
+
page_url = cls.CLASH_ITEM_API_BASE_URL.format(url_encoded_name)
|
|
121
|
+
|
|
122
|
+
return page_url
|
|
123
|
+
|
|
124
|
+
@classmethod
|
|
125
|
+
def parse_item_price(cls, item_page, item_href, source=PriceSource.STEAM):
|
|
126
|
+
_, _ = item_href, source
|
|
127
|
+
|
|
128
|
+
data = item_page.json()
|
|
129
|
+
if data.get("success", "false") == "false":
|
|
130
|
+
raise ValueError(f"Clash: Response failed for: {item_href}")
|
|
131
|
+
|
|
132
|
+
price = data.get("average_price", None)
|
|
133
|
+
if not price:
|
|
134
|
+
raise ValueError(f"Clash: Failed to find item price for: {item_href}")
|
|
135
|
+
|
|
136
|
+
price = float(price)
|
|
137
|
+
|
|
138
|
+
return price
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class CSGOTrader(Parser):
|
|
142
|
+
CSGOTRADER_PRICE_LIST = "https://prices.csgotrader.app/latest/{}.json"
|
|
143
|
+
PRICE_INFO = "Owned: {:<10} {:<10}: ${:<10} Total: ${:<10}"
|
|
144
|
+
NEEDS_TIMEOUT = False
|
|
145
|
+
SOURCES = [PriceSource.STEAM, PriceSource.BUFF163, PriceSource.SKINPORT]
|
|
146
|
+
|
|
147
|
+
@classmethod
|
|
148
|
+
def get_item_page_url(cls, item_href, source=PriceSource.STEAM):
|
|
149
|
+
_ = item_href
|
|
150
|
+
|
|
151
|
+
page_url = cls.CSGOTRADER_PRICE_LIST.format(source.value)
|
|
152
|
+
|
|
153
|
+
return page_url
|
|
154
|
+
|
|
155
|
+
@classmethod
|
|
156
|
+
def parse_item_price(cls, item_page, item_href, source=PriceSource.STEAM):
|
|
157
|
+
_ = source
|
|
158
|
+
|
|
159
|
+
price_list = item_page.json()
|
|
160
|
+
|
|
161
|
+
url_decoded_name = unquote(item_href.split("/")[-1])
|
|
162
|
+
if source in (PriceSource.BUFF163, PriceSource.SKINPORT):
|
|
163
|
+
url_decoded_name = url_decoded_name.replace("Holo-Foil", "Holo/Foil")
|
|
164
|
+
|
|
165
|
+
price_info = price_list.get(url_decoded_name, None)
|
|
166
|
+
if not price_info:
|
|
167
|
+
raise ValueError(f"CSGOTrader: Could not find item price info: {url_decoded_name}")
|
|
168
|
+
|
|
169
|
+
if source == PriceSource.STEAM:
|
|
170
|
+
price = price_info.get("last_24h")
|
|
171
|
+
if not price:
|
|
172
|
+
price = price_info.get("last_7d")
|
|
173
|
+
if not price:
|
|
174
|
+
raise ValueError(
|
|
175
|
+
f"CSGOTrader: Could not find steam price of the past 7 days: {url_decoded_name}"
|
|
176
|
+
)
|
|
177
|
+
elif source == PriceSource.BUFF163:
|
|
178
|
+
price = price_info.get("starting_at")
|
|
179
|
+
if not price:
|
|
180
|
+
raise ValueError(f"CSGOTrader: Could not find buff163 listing: {url_decoded_name}")
|
|
181
|
+
price = price.get("price")
|
|
182
|
+
if not price:
|
|
183
|
+
raise ValueError(
|
|
184
|
+
f"CSGOTrader: Could not find recent buff163 price: {url_decoded_name}"
|
|
185
|
+
)
|
|
186
|
+
else:
|
|
187
|
+
price = price_info.get("starting_at")
|
|
188
|
+
if not price:
|
|
189
|
+
raise ValueError(f"CSGOTrader: Could not find skinport listing: {url_decoded_name}")
|
|
190
|
+
|
|
191
|
+
price = float(price)
|
|
192
|
+
return price
|
cs2tracker/scraper/scraper.py
CHANGED
|
@@ -1,22 +1,17 @@
|
|
|
1
1
|
import time
|
|
2
2
|
from datetime import datetime
|
|
3
|
-
from urllib.parse import unquote
|
|
4
3
|
|
|
5
|
-
from bs4 import BeautifulSoup
|
|
6
|
-
from bs4.element import Tag
|
|
7
4
|
from currency_converter import CurrencyConverter
|
|
8
|
-
from requests import RequestException
|
|
5
|
+
from requests import RequestException
|
|
9
6
|
from requests.adapters import HTTPAdapter, Retry
|
|
7
|
+
from requests_cache import CachedSession
|
|
10
8
|
from tenacity import RetryError, retry, stop_after_attempt
|
|
11
9
|
|
|
12
|
-
from cs2tracker.constants import AUTHOR_STRING, BANNER
|
|
10
|
+
from cs2tracker.constants import AUTHOR_STRING, BANNER
|
|
13
11
|
from cs2tracker.scraper.discord_notifier import DiscordNotifier
|
|
12
|
+
from cs2tracker.scraper.parsers import CSGOTrader, PriceSource
|
|
14
13
|
from cs2tracker.util import PriceLogs, get_config, get_console
|
|
15
14
|
|
|
16
|
-
MAX_LINE_LEN = 72
|
|
17
|
-
SEPARATOR = "-"
|
|
18
|
-
PRICE_INFO = "Owned: {:<10} Steam market price: ${:<10} Total: ${:<10}\n"
|
|
19
|
-
|
|
20
15
|
HTTP_PROXY_URL = "http://{}:@smartproxy.crawlbase.com:8012"
|
|
21
16
|
HTTPS_PROXY_URL = "http://{}:@smartproxy.crawlbase.com:8012"
|
|
22
17
|
|
|
@@ -29,6 +24,11 @@ class ConfigError:
|
|
|
29
24
|
self.message = "Invalid configuration. Please fix the config file before running."
|
|
30
25
|
|
|
31
26
|
|
|
27
|
+
class ParsingError:
|
|
28
|
+
def __init__(self, message):
|
|
29
|
+
self.message = message
|
|
30
|
+
|
|
31
|
+
|
|
32
32
|
class RequestLimitExceededError:
|
|
33
33
|
def __init__(self):
|
|
34
34
|
self.message = "Too many requests. Consider using proxies to prevent rate limiting."
|
|
@@ -48,13 +48,16 @@ class Scraper:
|
|
|
48
48
|
def __init__(self):
|
|
49
49
|
"""Initialize the Scraper class."""
|
|
50
50
|
self._start_session()
|
|
51
|
+
self._add_parser(CSGOTrader)
|
|
52
|
+
|
|
51
53
|
self.error_stack = []
|
|
52
|
-
self.
|
|
53
|
-
|
|
54
|
+
self.totals = {
|
|
55
|
+
price_source: {"usd": 0.0, "eur": 0.0} for price_source in self.parser.SOURCES
|
|
56
|
+
}
|
|
54
57
|
|
|
55
58
|
def _start_session(self):
|
|
56
59
|
"""Start a requests session with custom headers and retry logic."""
|
|
57
|
-
self.session =
|
|
60
|
+
self.session = CachedSession("scraper_cache", backend="memory")
|
|
58
61
|
self.session.headers.update(
|
|
59
62
|
{
|
|
60
63
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
|
|
@@ -64,11 +67,15 @@ class Scraper:
|
|
|
64
67
|
self.session.mount("http://", HTTPAdapter(max_retries=retries))
|
|
65
68
|
self.session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
66
69
|
|
|
70
|
+
def _add_parser(self, parser):
|
|
71
|
+
"""Add a parser for a specific page where item prices should be scraped."""
|
|
72
|
+
self.parser = parser
|
|
73
|
+
|
|
67
74
|
def _print_error(self):
|
|
68
75
|
"""Print the last error message from the error stack, if any."""
|
|
69
76
|
last_error = self.error_stack[-1] if self.error_stack else None
|
|
70
77
|
if last_error:
|
|
71
|
-
console.error(f"{last_error.message}
|
|
78
|
+
console.error(f"{last_error.message}")
|
|
72
79
|
|
|
73
80
|
def scrape_prices(self, update_sheet_callback=None):
|
|
74
81
|
"""
|
|
@@ -84,47 +91,58 @@ class Scraper:
|
|
|
84
91
|
return
|
|
85
92
|
|
|
86
93
|
# Reset totals from the previous run and clear the error stack
|
|
87
|
-
self.usd_total, self.eur_total = 0, 0
|
|
88
94
|
self.error_stack.clear()
|
|
95
|
+
self.totals = {
|
|
96
|
+
price_source: {"usd": 0.0, "eur": 0.0} for price_source in self.parser.SOURCES
|
|
97
|
+
}
|
|
89
98
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
99
|
+
for section in config.sections():
|
|
100
|
+
if section in ("User Settings", "App Settings"):
|
|
101
|
+
continue
|
|
102
|
+
self._scrape_item_prices(section, update_sheet_callback)
|
|
93
103
|
|
|
94
|
-
self.
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
104
|
+
for price_source, totals in self.totals.items():
|
|
105
|
+
usd_total = totals["usd"]
|
|
106
|
+
eur_total = CurrencyConverter().convert(usd_total, "USD", "EUR")
|
|
107
|
+
self.totals.update({price_source: {"usd": usd_total, "eur": eur_total}}) # type: ignore
|
|
98
108
|
|
|
99
109
|
if update_sheet_callback:
|
|
100
|
-
update_sheet_callback(["", ""
|
|
101
|
-
|
|
102
|
-
[
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
110
|
+
update_sheet_callback(["", ""] + ["", ""] * len(self.parser.SOURCES))
|
|
111
|
+
for price_source, totals in self.totals.items():
|
|
112
|
+
usd_total = totals["usd"]
|
|
113
|
+
eur_total = totals["eur"]
|
|
114
|
+
update_sheet_callback(
|
|
115
|
+
[
|
|
116
|
+
f"[{datetime.now().strftime('%Y-%m-%d')}] {price_source.value.title()} Total:",
|
|
117
|
+
f"${usd_total:.2f}",
|
|
118
|
+
f"€{eur_total:.2f}",
|
|
119
|
+
"",
|
|
120
|
+
]
|
|
121
|
+
)
|
|
109
122
|
|
|
110
123
|
self._print_total()
|
|
111
|
-
PriceLogs.save(self.usd_total, self.eur_total)
|
|
112
124
|
self._send_discord_notification()
|
|
113
125
|
|
|
126
|
+
# TODO: modify price logs, charts etc for multiple sources (only use steam as source for now)
|
|
127
|
+
steam_usd_total = self.totals[PriceSource.STEAM]["usd"]
|
|
128
|
+
steam_eur_total = self.totals[PriceSource.STEAM]["eur"]
|
|
129
|
+
PriceLogs.save(steam_usd_total, steam_eur_total)
|
|
130
|
+
|
|
114
131
|
def _print_total(self):
|
|
115
132
|
"""Print the total prices in USD and EUR, formatted with titles and
|
|
116
133
|
separators.
|
|
117
134
|
"""
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
135
|
+
console.title("USD Total", "green")
|
|
136
|
+
for price_source, totals in self.totals.items():
|
|
137
|
+
usd_total = totals.get("usd")
|
|
138
|
+
console.print(f"{price_source.value.title():<10}: ${usd_total:.2f}")
|
|
121
139
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
140
|
+
console.title("EUR Total", "green")
|
|
141
|
+
for price_source, totals in self.totals.items():
|
|
142
|
+
eur_total = totals.get("eur")
|
|
143
|
+
console.print(f"{price_source.value.title():<10}: €{eur_total:.2f}")
|
|
125
144
|
|
|
126
|
-
|
|
127
|
-
console.print(f"[bold green]{end_string}\n")
|
|
145
|
+
console.separator("green")
|
|
128
146
|
|
|
129
147
|
def _send_discord_notification(self):
|
|
130
148
|
"""Send a message to a Discord webhook if notifications are enabled in the
|
|
@@ -171,208 +189,80 @@ class Scraper:
|
|
|
171
189
|
|
|
172
190
|
return page
|
|
173
191
|
|
|
174
|
-
def
|
|
175
|
-
"""
|
|
176
|
-
Print the title for a case, capsule, or custom item.
|
|
177
|
-
|
|
178
|
-
:param raw_item_str: The raw string to convert into an item name and title.
|
|
179
|
-
:param from_config: Whether the raw item string is from the config file.
|
|
180
|
-
:param from_href: Whether the raw item string is an href.
|
|
181
|
-
:return: The formatted item name.
|
|
182
|
-
"""
|
|
183
|
-
if from_config:
|
|
184
|
-
item_name = raw_item_str.replace("_", " ").title()
|
|
185
|
-
elif from_href:
|
|
186
|
-
item_name = unquote(raw_item_str.split("/")[-1])
|
|
187
|
-
else:
|
|
188
|
-
item_name = raw_item_str
|
|
189
|
-
|
|
190
|
-
item_title = item_name.center(MAX_LINE_LEN, SEPARATOR)
|
|
191
|
-
console.print(f"[bold magenta]{item_title}\n")
|
|
192
|
-
return item_name
|
|
193
|
-
|
|
194
|
-
def _parse_item_price(self, item_page, item_href):
|
|
195
|
-
"""
|
|
196
|
-
Parse the price of an item from the given steamcommunity market page and item
|
|
197
|
-
href.
|
|
198
|
-
|
|
199
|
-
:param item_page: The HTTP response object containing the item page content.
|
|
200
|
-
:param item_href: The href of the item listing to find the price for.
|
|
201
|
-
:return: The price of the item as a float.
|
|
202
|
-
:raises ValueError: If the item listing or price span cannot be found.
|
|
203
|
-
"""
|
|
204
|
-
item_soup = BeautifulSoup(item_page.content, "html.parser")
|
|
205
|
-
item_listing = item_soup.find("a", attrs={"href": f"{item_href}"})
|
|
206
|
-
if not isinstance(item_listing, Tag):
|
|
207
|
-
raise ValueError(f"Failed to find item listing: {item_href}")
|
|
208
|
-
|
|
209
|
-
item_price_span = item_listing.find("span", attrs={"class": "normal_price"})
|
|
210
|
-
if not isinstance(item_price_span, Tag):
|
|
211
|
-
raise ValueError(f"Failed to find price span in item listing: {item_href}")
|
|
212
|
-
|
|
213
|
-
price_str = item_price_span.text.split()[2]
|
|
214
|
-
price = float(price_str.replace("$", ""))
|
|
215
|
-
|
|
216
|
-
return price
|
|
217
|
-
|
|
218
|
-
def _scrape_capsule_prices(self, capsule_section, capsule_info, update_sheet_callback=None):
|
|
219
|
-
"""
|
|
220
|
-
Scrape prices for a specific capsule section, printing the details to the
|
|
221
|
-
console.
|
|
222
|
-
|
|
223
|
-
:param capsule_section: The section name in the config for the capsule.
|
|
224
|
-
:param capsule_info: A dictionary containing information about the capsule page,
|
|
225
|
-
hrefs, and names.
|
|
226
|
-
:param update_sheet_callback: Optional callback function to update a tksheet
|
|
227
|
-
that is displayed in the GUI with the latest scraper price calculation.
|
|
192
|
+
def _scrape_prices_from_all_sources(self, item_href, owned):
|
|
228
193
|
"""
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
try:
|
|
232
|
-
capsule_page = self._get_page(capsule_info["page"])
|
|
233
|
-
for capsule_href in capsule_info["items"]:
|
|
234
|
-
capsule_name = unquote(capsule_href.split("/")[-1])
|
|
235
|
-
config_capsule_name = capsule_name.replace(" ", "_").lower()
|
|
236
|
-
owned = config.getint(capsule_section, config_capsule_name, fallback=0)
|
|
237
|
-
if owned == 0:
|
|
238
|
-
continue
|
|
239
|
-
|
|
240
|
-
price_usd = self._parse_item_price(capsule_page, capsule_href)
|
|
241
|
-
price_usd_owned = round(float(owned * price_usd), 2)
|
|
242
|
-
|
|
243
|
-
console.print(f"[bold deep_sky_blue4]{capsule_name}")
|
|
244
|
-
console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
245
|
-
capsule_usd_total += price_usd_owned
|
|
246
|
-
|
|
247
|
-
if update_sheet_callback:
|
|
248
|
-
update_sheet_callback([capsule_name, owned, price_usd, price_usd_owned])
|
|
249
|
-
except (RetryError, ValueError):
|
|
250
|
-
self.error_stack.append(RequestLimitExceededError())
|
|
251
|
-
self._print_error()
|
|
252
|
-
except Exception as error:
|
|
253
|
-
self.error_stack.append(UnexpectedError(error))
|
|
254
|
-
self._print_error()
|
|
255
|
-
|
|
256
|
-
return capsule_usd_total
|
|
257
|
-
|
|
258
|
-
def _scrape_capsule_section_prices(self, update_sheet_callback=None):
|
|
259
|
-
"""
|
|
260
|
-
Scrape prices for all capsule sections defined in the configuration.
|
|
261
|
-
|
|
262
|
-
:param update_sheet_callback: Optional callback function to update a tksheet
|
|
263
|
-
that is displayed in the GUI with the latest scraper price calculation.
|
|
264
|
-
"""
|
|
265
|
-
capsule_usd_total = 0
|
|
266
|
-
for capsule_section, capsule_info in CAPSULE_INFO.items():
|
|
267
|
-
if self.error_stack:
|
|
268
|
-
break
|
|
194
|
+
For a given item href and owned count, scrape the item's price from all sources
|
|
195
|
+
available to the currently registered parser.
|
|
269
196
|
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
if not config.getboolean("App Settings", "use_proxy", fallback=False):
|
|
277
|
-
time.sleep(1)
|
|
278
|
-
|
|
279
|
-
return capsule_usd_total
|
|
280
|
-
|
|
281
|
-
def _market_page_from_href(self, item_href):
|
|
282
|
-
"""
|
|
283
|
-
Convert an href of a Steam Community Market item to a market page URL.
|
|
284
|
-
|
|
285
|
-
:param item_href: The href of the item listing, typically ending with the item's
|
|
286
|
-
name.
|
|
287
|
-
:return: A URL string for the Steam Community Market page of the item.
|
|
288
|
-
"""
|
|
289
|
-
url_encoded_name = item_href.split("/")[-1]
|
|
290
|
-
page_url = f"https://steamcommunity.com/market/search?q={url_encoded_name}"
|
|
291
|
-
|
|
292
|
-
return page_url
|
|
293
|
-
|
|
294
|
-
def _scrape_case_prices(self, update_sheet_callback=None):
|
|
295
|
-
"""
|
|
296
|
-
Scrape prices for all cases defined in the configuration.
|
|
297
|
-
|
|
298
|
-
For each case, it prints the case name, owned count, price per item, and total
|
|
299
|
-
price for owned items.
|
|
300
|
-
|
|
301
|
-
:param update_sheet_callback: Optional callback function to update a tksheet
|
|
302
|
-
that is displayed in the GUI with the latest scraper price calculation.
|
|
197
|
+
:param item_href: The url of the steamcommunity market listing of the item
|
|
198
|
+
:param owned: How many of this item the user owns
|
|
199
|
+
:return: A list of item prices for the different sources
|
|
200
|
+
:raises RequestException: If the request fails.
|
|
201
|
+
:raises RetryError: If the retry limit is reached.
|
|
202
|
+
:raises ValueError: If the parser could not find the item
|
|
303
203
|
"""
|
|
304
|
-
|
|
305
|
-
for
|
|
306
|
-
if self.error_stack:
|
|
307
|
-
break
|
|
308
|
-
if int(owned) == 0:
|
|
309
|
-
continue
|
|
310
|
-
|
|
311
|
-
case_name = self._print_item_title(config_case_name, from_config=True)
|
|
204
|
+
prices = []
|
|
205
|
+
for price_source in self.parser.SOURCES:
|
|
312
206
|
try:
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
price_usd = self.
|
|
316
|
-
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
317
|
-
|
|
318
|
-
console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
319
|
-
case_usd_total += price_usd_owned
|
|
207
|
+
item_page_url = self.parser.get_item_page_url(item_href, price_source)
|
|
208
|
+
item_page = self._get_page(item_page_url)
|
|
209
|
+
price_usd = self.parser.parse_item_price(item_page, item_href, price_source)
|
|
320
210
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
211
|
+
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
212
|
+
self.totals[price_source]["usd"] += price_usd_owned
|
|
213
|
+
|
|
214
|
+
prices += [price_usd, price_usd_owned]
|
|
215
|
+
console.price(
|
|
216
|
+
self.parser.PRICE_INFO,
|
|
217
|
+
owned,
|
|
218
|
+
price_source.value.title(),
|
|
219
|
+
price_usd,
|
|
220
|
+
price_usd_owned,
|
|
221
|
+
)
|
|
222
|
+
except ValueError as error:
|
|
223
|
+
prices += [0.0, 0.0]
|
|
224
|
+
self.error_stack.append(ParsingError(error))
|
|
331
225
|
self._print_error()
|
|
332
226
|
|
|
333
|
-
return
|
|
227
|
+
return prices
|
|
334
228
|
|
|
335
|
-
def
|
|
229
|
+
def _scrape_item_prices(self, section, update_sheet_callback=None):
|
|
336
230
|
"""
|
|
337
|
-
Scrape prices for
|
|
231
|
+
Scrape prices for all items defined in a configuration section that uses hrefs
|
|
232
|
+
as option keys.
|
|
338
233
|
|
|
339
|
-
For each
|
|
340
|
-
|
|
234
|
+
For each item, it prints the item name, owned count, price per item, and total
|
|
235
|
+
price for owned items.
|
|
341
236
|
|
|
342
237
|
:param update_sheet_callback: Optional callback function to update a tksheet
|
|
343
238
|
that is displayed in the GUI with the latest scraper price calculation.
|
|
344
239
|
"""
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
if self.error_stack:
|
|
240
|
+
for item_href, owned in config.items(section):
|
|
241
|
+
if self.error_stack and isinstance(self.error_stack[-1], RequestLimitExceededError):
|
|
348
242
|
break
|
|
349
243
|
if int(owned) == 0:
|
|
350
244
|
continue
|
|
351
245
|
|
|
352
|
-
|
|
246
|
+
item_name = config.option_to_name(item_href, href=True)
|
|
247
|
+
console.title(item_name, "magenta")
|
|
353
248
|
try:
|
|
354
|
-
|
|
355
|
-
custom_item_page = self._get_page(custom_item_page_url)
|
|
356
|
-
price_usd = self._parse_item_price(custom_item_page, custom_item_href)
|
|
357
|
-
price_usd_owned = round(float(int(owned) * price_usd), 2)
|
|
358
|
-
|
|
359
|
-
console.print(PRICE_INFO.format(owned, price_usd, price_usd_owned))
|
|
360
|
-
custom_item_usd_total += price_usd_owned
|
|
249
|
+
prices = self._scrape_prices_from_all_sources(item_href, owned)
|
|
361
250
|
|
|
362
251
|
if update_sheet_callback:
|
|
363
|
-
update_sheet_callback([
|
|
252
|
+
update_sheet_callback([item_name, owned] + prices)
|
|
364
253
|
|
|
365
|
-
if
|
|
254
|
+
if (
|
|
255
|
+
not config.getboolean("App Settings", "use_proxy", fallback=False)
|
|
256
|
+
and self.parser.NEEDS_TIMEOUT
|
|
257
|
+
):
|
|
366
258
|
time.sleep(1)
|
|
367
|
-
except
|
|
259
|
+
except RetryError:
|
|
368
260
|
self.error_stack.append(RequestLimitExceededError())
|
|
369
261
|
self._print_error()
|
|
370
262
|
except Exception as error:
|
|
371
263
|
self.error_stack.append(UnexpectedError(error))
|
|
372
264
|
self._print_error()
|
|
373
265
|
|
|
374
|
-
return custom_item_usd_total
|
|
375
|
-
|
|
376
266
|
|
|
377
267
|
if __name__ == "__main__":
|
|
378
268
|
scraper = Scraper()
|
|
@@ -7,6 +7,10 @@ PADDING_LEFT = 4
|
|
|
7
7
|
PADDING_RIGHT = 0
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
MAX_LINE_LEN = 72
|
|
11
|
+
SEPARATOR = "-"
|
|
12
|
+
|
|
13
|
+
|
|
10
14
|
class PaddedConsole:
|
|
11
15
|
def __init__(self, padding=(PADDING_TOP, PADDING_RIGHT, PADDING_BOTTOM, PADDING_LEFT)):
|
|
12
16
|
"""Initialize a PaddedConsole with specified padding."""
|
|
@@ -22,6 +26,21 @@ class PaddedConsole:
|
|
|
22
26
|
text = "[bold red][!] " + text
|
|
23
27
|
self.print(text)
|
|
24
28
|
|
|
29
|
+
def title(self, text, color):
|
|
30
|
+
"""Print the given text as a title."""
|
|
31
|
+
title = text.center(MAX_LINE_LEN, SEPARATOR)
|
|
32
|
+
console.print(f"\n[bold {color}]{title}\n")
|
|
33
|
+
|
|
34
|
+
def separator(self, color):
|
|
35
|
+
"""Print a separator line."""
|
|
36
|
+
separator = SEPARATOR * MAX_LINE_LEN
|
|
37
|
+
console.print(f"[bold {color}]{separator}")
|
|
38
|
+
|
|
39
|
+
def price(self, price_str, price_source, owned, steam_market_price, total_owned):
|
|
40
|
+
# pylint: disable=too-many-arguments,too-many-positional-arguments
|
|
41
|
+
"""Print price information."""
|
|
42
|
+
console.print(price_str.format(price_source, owned, steam_market_price, total_owned))
|
|
43
|
+
|
|
25
44
|
def __getattr__(self, attr):
|
|
26
45
|
"""Ensure console methods can be called directly on PaddedConsole."""
|
|
27
46
|
return getattr(self.console, attr)
|