yad2-scraper 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
yad2_scraper/constants.py CHANGED
@@ -18,9 +18,10 @@ DEFAULT_REQUEST_HEADERS = {
18
18
  ALLOW_REQUEST_REDIRECTS = True
19
19
  VERIFY_REQUEST_SSL = True
20
20
 
21
- ANTIBOT_CONTENT = b"Are you for real" # robot-captcha
21
+ ANTIBOT_CONTENT_IDENTIFIER = b"Are you for real" # robot-captcha
22
+ YAD2_CONTENT_IDENTIFIER = b"https://www.yad2.co.il/"
22
23
 
23
- FIRST_PAGE = 1
24
+ FIRST_PAGE_NUMBER = 1
24
25
  NOT_MENTIONED_PRICE_RANGE = 0, 0
25
26
 
26
27
  NEXT_DATA_SCRIPT_ID = "__NEXT_DATA__"
@@ -1,2 +1,31 @@
1
- class AntiBotDetectedError(Exception):
1
+ import httpx
2
+ from typing import List
3
+
4
+
5
+ class ResponseError(httpx.HTTPStatusError):
6
+ # This adds the request/response objects to the error
2
7
  pass
8
+
9
+
10
+ class AntiBotDetectedError(ResponseError):
11
+ pass
12
+
13
+
14
+ class UnexpectedContentError(ResponseError):
15
+ pass
16
+
17
+
18
+ class MaxRetriesExceededError(Exception):
19
+ def __init__(self, msg: str, errors: List[Exception] = None):
20
+ super().__init__(msg)
21
+ self.errors = errors
22
+
23
+
24
+ class MaxRequestRetriesExceededError(MaxRetriesExceededError):
25
+ def __init__(self, method: str, url: str, max_retries: int, errors: List[Exception] = None):
26
+ self.method = method
27
+ self.url = url
28
+ self.max_retries = max_retries
29
+
30
+ msg = f"All {self.max_retries} retry attempts for {self.method} request to '{self.url}' have failed"
31
+ super().__init__(msg, errors)
yad2_scraper/scraper.py CHANGED
@@ -6,16 +6,19 @@ from typing import Optional, Dict, Any, Tuple, Union, Type, TypeVar
6
6
 
7
7
  from yad2_scraper.category import Yad2Category
8
8
  from yad2_scraper.query import QueryFilters
9
- from yad2_scraper.utils import get_random_user_agent, validate_http_response
9
+ from yad2_scraper.utils import get_random_user_agent
10
+ from yad2_scraper.exceptions import AntiBotDetectedError, UnexpectedContentError, MaxRequestRetriesExceededError
10
11
  from yad2_scraper.constants import (
11
12
  DEFAULT_REQUEST_HEADERS,
12
13
  ALLOW_REQUEST_REDIRECTS,
13
- VERIFY_REQUEST_SSL
14
+ VERIFY_REQUEST_SSL,
15
+ ANTIBOT_CONTENT_IDENTIFIER,
16
+ YAD2_CONTENT_IDENTIFIER
14
17
  )
15
18
 
16
19
  Category = TypeVar("Category", bound=Yad2Category)
17
20
  DelayRange = Tuple[float, float]
18
- QueryParams = Union[QueryFilters, Dict[str, Any]]
21
+ QueryParamTypes = Union[QueryFilters, Dict[str, Any]]
19
22
 
20
23
  logger = logging.getLogger(__name__)
21
24
 
@@ -23,85 +26,149 @@ logger = logging.getLogger(__name__)
23
26
  class Yad2Scraper:
24
27
  def __init__(
25
28
  self,
26
- session: Optional[httpx.Client] = None,
27
- request_kwargs: Dict[str, Any] = None,
29
+ client: Optional[httpx.Client] = None,
30
+ request_defaults: Optional[Dict[str, Any]] = None,
28
31
  randomize_user_agent: bool = False,
29
- requests_delay_range: Optional[DelayRange] = None,
32
+ random_delay_range: Optional[DelayRange] = None,
33
+ max_retries: int = 0
30
34
  ):
31
- self.session = session or httpx.Client(
35
+ self.client = client or httpx.Client(
32
36
  headers=DEFAULT_REQUEST_HEADERS,
33
37
  follow_redirects=ALLOW_REQUEST_REDIRECTS,
34
38
  verify=VERIFY_REQUEST_SSL
35
39
  )
36
- self.request_kwargs = request_kwargs or {}
40
+ self.request_defaults = request_defaults or {}
37
41
  self.randomize_user_agent = randomize_user_agent
38
- self.requests_delay_range = requests_delay_range
42
+ self.random_delay_range = random_delay_range
43
+ self.max_retries = max_retries
39
44
 
40
- logger.debug(f"Initialized with session {self.session} and request kwargs: {self.request_kwargs}")
41
-
42
- def set_user_agent(self, user_agent: str):
43
- self.session.headers["User-Agent"] = user_agent
44
- logger.debug(f"User-Agent session header set to: '{user_agent}'")
45
-
46
- def set_no_script(self, no_script: bool):
47
- value = "1" if no_script else "0" # str(int(no_script))
48
- self.session.cookies.set("noscript", value)
49
- logger.debug(f"NoScript session cookie set to: '{value}'")
45
+ logger.debug(f"Scraper initialized with client: {self.client}")
50
46
 
51
47
  def fetch_category(
52
48
  self,
53
49
  url: str,
54
- query_params: Optional[QueryParams] = None,
50
+ params: Optional[QueryParamTypes] = None,
55
51
  category_type: Type[Category] = Yad2Category
56
52
  ) -> Category:
57
53
  logger.debug(f"Fetching category from URL: '{url}'")
58
- response = self.get(url, query_params)
54
+ response = self.get(url, params)
59
55
  logger.debug(f"Category fetched successfully from URL: '{url}'")
60
56
  return category_type.from_html_io(response)
61
57
 
62
- def get(self, url: str, query_params: Optional[QueryParams] = None) -> httpx.Response:
63
- return self.request("GET", url, query_params=query_params)
58
+ def get(self, url: str, params: Optional[QueryParamTypes] = None) -> httpx.Response:
59
+ return self.request("GET", url, params=params)
60
+
61
+ def request(self, method: str, url: str, params: Optional[QueryParamTypes] = None) -> httpx.Response:
62
+ request_options = self._prepare_request_options(params=params)
63
+
64
+ try:
65
+ return self._send_request(method, url, request_options)
66
+ except Exception as error:
67
+ return self._handle_request_error(method, url, request_options, error)
64
68
 
65
- def request(self, method: str, url: str, query_params: Optional[QueryParams] = None) -> httpx.Response:
66
- request_kwargs = self._prepare_request_kwargs(query_params=query_params)
69
+ def set_user_agent(self, user_agent: str) -> None:
70
+ self.client.headers["User-Agent"] = user_agent
71
+ logger.debug(f"User-Agent client header set to: '{user_agent}'")
67
72
 
68
- if self.requests_delay_range:
73
+ def set_no_script(self, no_script: bool) -> None:
74
+ value = "1" if no_script else "0"
75
+ self.client.cookies.set("noscript", value)
76
+ logger.debug(f"noscript client cookie set to: '{value}'")
77
+
78
+ def close(self) -> None:
79
+ logger.debug("Closing scraper client")
80
+ self.client.close()
81
+ logger.info("Scraper client closed")
82
+
83
+ def _send_request(self, method: str, url: str, request_options: Dict[str, Any]) -> httpx.Response:
84
+ if self.randomize_user_agent:
85
+ self._set_random_user_agent(request_options)
86
+
87
+ if self.random_delay_range:
69
88
  self._apply_request_delay()
70
89
 
71
- try:
72
- logger.info(f"Making {method} request to URL: '{url}'") # request kwargs not logged - may be sensitive
73
- response = self.session.request(method, url, **request_kwargs)
74
- logger.debug(f"Received response with status code: {response.status_code}")
90
+ logger.info(f"Making {method} request to URL: '{url}'")
91
+ response = self.client.request(method, url, **request_options)
92
+ logger.debug(f"Received response with status code: {response.status_code}")
93
+ self._validate_response(response)
75
94
 
76
- validate_http_response(response)
77
- logger.debug("Response validation succeeded")
78
- except Exception as error:
79
- logger.error(f"Request to '{url}' failed: {error}")
95
+ return response
96
+
97
+ def _handle_request_error(
98
+ self,
99
+ method: str,
100
+ url: str,
101
+ request_options: Dict[str, Any],
102
+ error: Exception
103
+ ) -> httpx.Response:
104
+ logger.error(f"{method} request to '{url}' failed: {error}")
105
+
106
+ if self.max_retries == 0:
80
107
  raise error
81
108
 
82
- return response
109
+ return self._retry_request(method, url, request_options)
83
110
 
84
- def _prepare_request_kwargs(self, query_params: Optional[QueryParams] = None) -> Dict[str, Any]:
85
- logger.debug("Preparing request kwargs from defaults")
86
- request_kwargs = self.request_kwargs.copy()
111
+ def _retry_request(self, method: str, url: str, request_options: Dict[str, Any]) -> httpx.Response:
112
+ logger.info(f"Retrying {method} request to '{url}' (max retries: {self.max_retries})")
87
113
 
88
- if query_params:
89
- request_kwargs.setdefault("params", {}).update(query_params)
90
- logger.debug(f"Updated request kwargs with query params: {query_params}")
114
+ errors = []
91
115
 
92
- if self.randomize_user_agent:
93
- random_user_agent = get_random_user_agent()
94
- request_kwargs.setdefault("headers", {})["User-Agent"] = random_user_agent
95
- logger.debug(f"Updated request kwargs with random 'User-Agent' header: '{random_user_agent}'")
116
+ for retry_attempt in range(1, self.max_retries + 1):
117
+ try:
118
+ logger.debug(f"Retry attempt {retry_attempt}/{self.max_retries}")
119
+ return self._send_request(method, url, request_options)
120
+ except Exception as error:
121
+ logger.warning(f"Retry attempt {retry_attempt} failed: {error}")
122
+ errors.append(error)
123
+
124
+ error_to_raise = MaxRequestRetriesExceededError(method, url, self.max_retries, errors)
125
+ logger.error(str(error_to_raise))
126
+ raise error_to_raise from errors[-1]
127
+
128
+ def _prepare_request_options(self, params: Optional[QueryParamTypes] = None) -> Dict[str, Any]:
129
+ logger.debug("Preparing request options from defaults")
130
+ request_options = self.request_defaults.copy()
131
+
132
+ if params:
133
+ request_options.setdefault("params", {}).update(params)
134
+ logger.debug(f"Updated request options with query params: {params}")
96
135
 
97
- return request_kwargs
136
+ return request_options
98
137
 
99
138
  def _apply_request_delay(self):
100
- delay = random.uniform(*self.requests_delay_range)
139
+ delay = random.uniform(*self.random_delay_range)
101
140
  logger.debug(f"Applying request delay of {delay:.2f} seconds")
102
141
  time.sleep(delay)
103
142
 
104
- def close(self):
105
- logger.debug("Closing scraper session")
106
- self.session.close()
107
- logger.info("Scraper session closed")
143
+ @staticmethod
144
+ def _set_random_user_agent(request_options: Dict[str, str]):
145
+ user_agent = get_random_user_agent()
146
+ request_options.setdefault("headers", {})["User-Agent"] = user_agent
147
+ logger.debug(f"Updated request options with random User-Agent header: '{user_agent}'")
148
+
149
+ @staticmethod
150
+ def _validate_response(response: httpx.Response):
151
+ response.raise_for_status()
152
+
153
+ if ANTIBOT_CONTENT_IDENTIFIER in response.content:
154
+ raise AntiBotDetectedError(
155
+ f"The response contains Anti-Bot content",
156
+ request=response.request,
157
+ response=response
158
+ )
159
+ if YAD2_CONTENT_IDENTIFIER not in response.content:
160
+ raise UnexpectedContentError(
161
+ "The response does not contain yad2 content",
162
+ request=response.request,
163
+ response=response
164
+ )
165
+
166
+ logger.debug("Response validation succeeded")
167
+
168
+ def __enter__(self):
169
+ logger.debug("Entering scraper context")
170
+ return self
171
+
172
+ def __exit__(self, exc_type, exc_val, exc_tb):
173
+ logger.debug("Exiting scraper context")
174
+ self.close()
yad2_scraper/utils.py CHANGED
@@ -1,11 +1,7 @@
1
- import httpx
2
1
  from fake_useragent import FakeUserAgent
3
2
  from bs4 import BeautifulSoup, Tag
4
3
  from typing import Union, List
5
4
 
6
- from yad2_scraper.exceptions import AntiBotDetectedError
7
- from yad2_scraper.constants import ANTIBOT_CONTENT
8
-
9
5
  fua = FakeUserAgent()
10
6
 
11
7
 
@@ -24,13 +20,6 @@ def get_parent_url(url: str) -> str:
24
20
  return url.rstrip("/").rsplit("/", 1)[0]
25
21
 
26
22
 
27
- def validate_http_response(response: httpx.Response):
28
- response.raise_for_status()
29
-
30
- if ANTIBOT_CONTENT in response.content:
31
- raise AntiBotDetectedError(f"The response contains Anti-Bot content")
32
-
33
-
34
23
  def find_html_tag_by_class_substring(e: Union[BeautifulSoup, Tag], tag_name: str, substring: str) -> Tag:
35
24
  return e.find(tag_name, class_=lambda class_name: class_name and substring in class_name)
36
25
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: yad2-scraper
3
- Version: 0.2.0
3
+ Version: 0.3.0
4
4
  Summary: Scrape Yad2 in Python.
5
5
  License: LICENSE
6
6
  Author: dav ost
@@ -0,0 +1,12 @@
1
+ yad2_scraper/__init__.py,sha256=UUiIk6TAHTAP4IY86bIR4TcY3VVMTCyEF0Sq1MSneMM,141
2
+ yad2_scraper/category.py,sha256=KXLyjMOlPzu3xj08-uRmffAMD83DbqFVm-y1-T83Djw,910
3
+ yad2_scraper/constants.py,sha256=RAikaxRILQyiNeZG-_MPAwPi83abK5sscHdzDOrFge8,910
4
+ yad2_scraper/exceptions.py,sha256=Vewa3CmVEdH6Wok3YP2686RoIrA7myKnjDQTNEZAn7w,830
5
+ yad2_scraper/next_data.py,sha256=-vqvXJqugk-895_kOnwb7J8kUjugg28Aqrh4Z_ct11M,512
6
+ yad2_scraper/query.py,sha256=WaOWUlyNye9MNXv3hkiUaBFDeV9lbkvHiaDHWYKzgtY,1194
7
+ yad2_scraper/scraper.py,sha256=QeLNFxwTQSN9Dq3zotBFnnTU5XHQrnEoWOJD3qfj2w8,6564
8
+ yad2_scraper/utils.py,sha256=48flvJPUje3nDHL3F_C3pPw3pf3ycke0f1WoXq2cSeE,837
9
+ yad2_scraper-0.3.0.dist-info/LICENSE,sha256=JCpnDxMx2kE40e0UQ1svSmifrLWg2Gni5VTkJR68thY,1065
10
+ yad2_scraper-0.3.0.dist-info/METADATA,sha256=YOj8J10dvwS7fYdC6nEzu0Ea-D-CpndhzN9o5LujvNk,925
11
+ yad2_scraper-0.3.0.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
12
+ yad2_scraper-0.3.0.dist-info/RECORD,,
@@ -1,12 +0,0 @@
1
- yad2_scraper/__init__.py,sha256=UUiIk6TAHTAP4IY86bIR4TcY3VVMTCyEF0Sq1MSneMM,141
2
- yad2_scraper/category.py,sha256=KXLyjMOlPzu3xj08-uRmffAMD83DbqFVm-y1-T83Djw,910
3
- yad2_scraper/constants.py,sha256=HtZh0okD7FhbeAw3VladHsXIBlbxvZDoXaWm8aaVYvk,839
4
- yad2_scraper/exceptions.py,sha256=Ym3k3aQHe4-SQtfDch5DdnNN3RK6dbXLCZej1chSEGQ,48
5
- yad2_scraper/next_data.py,sha256=-vqvXJqugk-895_kOnwb7J8kUjugg28Aqrh4Z_ct11M,512
6
- yad2_scraper/query.py,sha256=WaOWUlyNye9MNXv3hkiUaBFDeV9lbkvHiaDHWYKzgtY,1194
7
- yad2_scraper/scraper.py,sha256=bGAjuu-ZhNYo4NBmrZU096NC2uYHvnxusAFZs1mjnRU,4202
8
- yad2_scraper/utils.py,sha256=9R-XJgWcmt_HTR_JQbFIFHcCg-II4ClFqlLQQZOU74g,1170
9
- yad2_scraper-0.2.0.dist-info/LICENSE,sha256=JCpnDxMx2kE40e0UQ1svSmifrLWg2Gni5VTkJR68thY,1065
10
- yad2_scraper-0.2.0.dist-info/METADATA,sha256=fznOi8yGp07DU7AMDSAg3LKuYJ5Gu6X6G8qS-SFfJt0,925
11
- yad2_scraper-0.2.0.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
12
- yad2_scraper-0.2.0.dist-info/RECORD,,