datamarket 0.7.101__tar.gz → 0.7.102__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamarket might be problematic. Click here for more details.

Files changed (35) hide show
  1. {datamarket-0.7.101 → datamarket-0.7.102}/PKG-INFO +1 -1
  2. {datamarket-0.7.101 → datamarket-0.7.102}/pyproject.toml +1 -1
  3. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/exceptions/main.py +13 -0
  4. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/main.py +39 -1
  5. {datamarket-0.7.101 → datamarket-0.7.102}/LICENSE +0 -0
  6. {datamarket-0.7.101 → datamarket-0.7.102}/README.md +0 -0
  7. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/__init__.py +0 -0
  8. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/exceptions/__init__.py +0 -0
  9. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/__init__.py +0 -0
  10. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/alchemy.py +0 -0
  11. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/aws.py +0 -0
  12. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/azure.py +0 -0
  13. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/drive.py +0 -0
  14. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/ftp.py +0 -0
  15. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/nominatim.py +0 -0
  16. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/peerdb.py +0 -0
  17. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/proxy.py +0 -0
  18. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/interfaces/tinybird.py +0 -0
  19. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/params/__init__.py +0 -0
  20. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/params/nominatim.py +0 -0
  21. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/__init__.py +0 -0
  22. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/airflow.py +0 -0
  23. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/alchemy.py +0 -0
  24. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/nominatim.py +0 -0
  25. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/playwright/__init__.py +0 -0
  26. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/playwright/async_api.py +0 -0
  27. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/playwright/sync_api.py +0 -0
  28. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/selenium.py +0 -0
  29. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/soda.py +0 -0
  30. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/strings/__init__.py +0 -0
  31. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/strings/normalization.py +0 -0
  32. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/strings/obfuscation.py +0 -0
  33. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/strings/standardization.py +0 -0
  34. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/typer.py +0 -0
  35. {datamarket-0.7.101 → datamarket-0.7.102}/src/datamarket/utils/types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: datamarket
3
- Version: 0.7.101
3
+ Version: 0.7.102
4
4
  Summary: Utilities that integrate advanced scraping knowledge into just one library.
5
5
  License: GPL-3.0-or-later
6
6
  Author: DataMarket
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "datamarket"
3
- version = "0.7.101"
3
+ version = "0.7.102"
4
4
  description = "Utilities that integrate advanced scraping knowledge into just one library."
5
5
  authors = ["DataMarket <techsupport@datamarket.es>"]
6
6
  license = "GPL-3.0-or-later"
@@ -2,6 +2,9 @@
2
2
  # CLASSES
3
3
 
4
4
 
5
+ import requests
6
+
7
+
5
8
  class RedirectionDetectedError(Exception):
6
9
  def __init__(self, message="Redirection detected!"):
7
10
  self.message = message
@@ -18,3 +21,13 @@ class BadRequestError(Exception):
18
21
  def __init__(self, message="Bad request!"):
19
22
  self.message = message
20
23
  super().__init__(self.message)
24
+
25
+ class ManagedHTTPError(Exception):
26
+ """Signal that this HTTP status was handled and should not be retried."""
27
+ def __init__(self, response: requests.Response, *, url: str | None = None, message: str | None = None):
28
+ self.response = response
29
+ self.request = getattr(response, "request", None)
30
+ self.status_code = getattr(response, "status_code", None)
31
+ self.url = url or (self.request.url if self.request is not None else None)
32
+ self.message = message
33
+ super().__init__(message or f"HTTP {self.status_code} for {self.url}")
@@ -10,6 +10,7 @@ import re
10
10
  import shlex
11
11
  import subprocess
12
12
  import time
13
+ from typing import Sequence
13
14
  from babel.numbers import parse_decimal
14
15
 
15
16
  from bs4 import BeautifulSoup
@@ -26,6 +27,8 @@ from tenacity import (
26
27
  wait_exponential,
27
28
  )
28
29
 
30
+ from datamarket.exceptions.main import ManagedHTTPError
31
+
29
32
  from ..exceptions import RedirectionDetectedError, NotFoundError, BadRequestError
30
33
  from ..interfaces.proxy import ProxyInterface
31
34
 
@@ -132,7 +135,9 @@ def parse_field(dict_struct, field_path, format_method=None):
132
135
 
133
136
 
134
137
  @retry(
135
- retry=retry_if_not_exception_type((NotFoundError, BadRequestError, RedirectionDetectedError, ProxyError)),
138
+ retry=retry_if_not_exception_type(
139
+ (NotFoundError, BadRequestError, RedirectionDetectedError, ProxyError, ManagedHTTPError)
140
+ ),
136
141
  wait=wait_exponential(exp_base=3, multiplier=3, max=60),
137
142
  stop=stop_after_attempt(5),
138
143
  before_sleep=before_sleep_log(logger, logging.WARNING),
@@ -146,8 +151,37 @@ def get_data(
146
151
  proxy_interface: ProxyInterface = None,
147
152
  use_auth_proxies: bool = False,
148
153
  max_proxy_delay: timedelta = timedelta(minutes=10),
154
+ ignored_status_codes: Sequence[int] = (),
149
155
  **kwargs,
150
156
  ):
157
+ """
158
+ Fetches data from a given URL using HTTP requests, with support for proxy configuration, retries, and flexible output formats.
159
+
160
+ Args:
161
+ url (str): The target URL to fetch data from.
162
+ method (str, optional): HTTP method to use (e.g., 'GET', 'POST'). Defaults to 'GET'.
163
+ output (str, optional): Output format ('json', 'text', 'soup', 'response'). Defaults to 'json'.
164
+ sleep (tuple, optional): Tuple specifying max and min sleep times (seconds) after request. Defaults to (6, 3).
165
+ proxy_interface (ProxyInterface, optional): Proxy provider. If None, no proxy is used. Defaults to None.
166
+ use_auth_proxies (bool, optional): Whether to use authenticated proxies. Defaults to False.
167
+ max_proxy_delay (timedelta, optional): Maximum delay for proxy retry logic. Defaults to 10 minutes.
168
+ ignored_status_codes (Sequence[int], optional): Status codes to ignore and return response for. Defaults to ().
169
+ **kwargs: Additional arguments passed to the requests method.
170
+
171
+ Returns:
172
+ Depends on the 'output' argument:
173
+ - 'json': Parsed JSON response.
174
+ - 'text': Response text.
175
+ - 'soup': BeautifulSoup-parsed HTML.
176
+ - 'response': Raw requests.Response object.
177
+
178
+ Raises:
179
+ ManagedHTTPError: If a response status code is in `ignored_status_codes`.
180
+ NotFoundError: If a 404 status code is returned.
181
+ BadRequestError: If a 400 status code is returned.
182
+ RedirectionDetectedError, ProxyError: On specific error conditions.
183
+ requests.HTTPError: For other HTTP errors if not ignored.
184
+ """
151
185
  retry_type = retry_if_exception_type(ProxyError)
152
186
  wait = wait_exponential(exp_base=3, multiplier=3, max=60)
153
187
  stop = stop_after_delay(max_proxy_delay)
@@ -174,11 +208,15 @@ def get_data(
174
208
 
175
209
  ban_sleep(*sleep)
176
210
 
211
+ if r.status_code in ignored_status_codes:
212
+ raise ManagedHTTPError(r, url=url, message=f"Status {r.status_code} in ignored_status_codes for URL {url}")
177
213
  if r.status_code == 404:
178
214
  raise NotFoundError(f"404 Not Found error for {url}")
179
215
  if r.status_code == 400:
180
216
  raise BadRequestError(f"400 Bad Request error for {url}")
217
+
181
218
  r.raise_for_status()
219
+
182
220
  r.encoding = "utf-8"
183
221
 
184
222
  if output == "json":
File without changes
File without changes