apify 2.1.0b2__tar.gz → 2.1.0b4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apify might be problematic. Click here for more details.

Files changed (38) hide show
  1. {apify-2.1.0b2 → apify-2.1.0b4}/PKG-INFO +2 -2
  2. {apify-2.1.0b2 → apify-2.1.0b4}/pyproject.toml +4 -4
  3. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/requests.py +7 -5
  4. {apify-2.1.0b2 → apify-2.1.0b4}/LICENSE +0 -0
  5. {apify-2.1.0b2 → apify-2.1.0b4}/README.md +0 -0
  6. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/__init__.py +0 -0
  7. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_actor.py +0 -0
  8. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_configuration.py +0 -0
  9. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_consts.py +0 -0
  10. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_crypto.py +0 -0
  11. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_models.py +0 -0
  12. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_platform_event_manager.py +0 -0
  13. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_proxy_configuration.py +0 -0
  14. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/_utils.py +0 -0
  15. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/__init__.py +0 -0
  16. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/_apify_storage_client.py +0 -0
  17. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/_dataset_client.py +0 -0
  18. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/_dataset_collection_client.py +0 -0
  19. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/_key_value_store_client.py +0 -0
  20. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/_key_value_store_collection_client.py +0 -0
  21. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/_request_queue_client.py +0 -0
  22. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/_request_queue_collection_client.py +0 -0
  23. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/apify_storage_client/py.typed +0 -0
  24. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/log.py +0 -0
  25. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/py.typed +0 -0
  26. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/__init__.py +0 -0
  27. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/middlewares/__init__.py +0 -0
  28. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/middlewares/apify_proxy.py +0 -0
  29. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/middlewares/py.typed +0 -0
  30. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/pipelines/__init__.py +0 -0
  31. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/pipelines/actor_dataset_push.py +0 -0
  32. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/pipelines/py.typed +0 -0
  33. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/py.typed +0 -0
  34. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/scheduler.py +0 -0
  35. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/scrapy/utils.py +0 -0
  36. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/storages/__init__.py +0 -0
  37. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/storages/_request_list.py +0 -0
  38. {apify-2.1.0b2 → apify-2.1.0b4}/src/apify/storages/py.typed +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apify
3
- Version: 2.1.0b2
3
+ Version: 2.1.0b4
4
4
  Summary: Apify SDK for Python
5
5
  License: Apache-2.0
6
6
  Keywords: apify,sdk,automation,chrome,crawlee,crawler,headless,scraper,scraping
@@ -21,7 +21,7 @@ Classifier: Topic :: Software Development :: Libraries
21
21
  Provides-Extra: scrapy
22
22
  Requires-Dist: apify-client (>=1.8.1)
23
23
  Requires-Dist: apify-shared (>=1.1.2)
24
- Requires-Dist: crawlee (>=0.3.9)
24
+ Requires-Dist: crawlee (>=0.4.0,<0.5.0)
25
25
  Requires-Dist: cryptography (>=42.0.0)
26
26
  Requires-Dist: httpx (>=0.27.0)
27
27
  Requires-Dist: lazy-object-proxy (>=1.10.0)
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "apify"
7
- version = "2.1.0b2"
7
+ version = "2.1.0b4"
8
8
  description = "Apify SDK for Python"
9
9
  authors = ["Apify Technologies s.r.o. <support@apify.com>"]
10
10
  license = "Apache-2.0"
@@ -48,7 +48,7 @@ keywords = [
48
48
  python = "^3.9"
49
49
  apify-client = ">=1.8.1"
50
50
  apify-shared = ">=1.1.2"
51
- crawlee = ">=0.3.9"
51
+ crawlee = "~0.4.0"
52
52
  cryptography = ">=42.0.0"
53
53
  httpx = ">=0.27.0"
54
54
  lazy-object-proxy = ">=1.10.0"
@@ -65,13 +65,13 @@ pre-commit = "~4.0.0"
65
65
  pydoc-markdown = "~4.8.0"
66
66
  pytest = "~8.3.0"
67
67
  pytest-asyncio = "~0.24.0"
68
- pytest-cov = "~5.0.0"
68
+ pytest-cov = "~6.0.0"
69
69
  pytest-only = "~2.1.0"
70
70
  pytest-timeout = "~2.3.0"
71
71
  pytest-xdist = "~3.6.0"
72
72
  respx = "~0.21.0"
73
73
  ruff = "~0.7.0"
74
- setuptools = "~75.0.0" # setuptools are used by pytest but not explicitly required
74
+ setuptools = "~75.6.0" # setuptools are used by pytest but not explicitly required
75
75
 
76
76
  [tool.poetry.extras]
77
77
  scrapy = ["scrapy"]
@@ -42,8 +42,10 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
42
42
  Returns:
43
43
  The converted Apify request if the conversion was successful, otherwise None.
44
44
  """
45
- if not isinstance(cast(Any, scrapy_request), Request):
46
- Actor.log.warning('Failed to convert to Apify request: Scrapy request must be a Request instance.')
45
+ if not isinstance(scrapy_request, Request):
46
+ Actor.log.warning( # type: ignore[unreachable]
47
+ 'Failed to convert to Apify request: Scrapy request must be a Request instance.'
48
+ )
47
49
  return None
48
50
 
49
51
  call_id = crypto_random_object_id(8)
@@ -53,7 +55,7 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
53
55
  if _is_request_produced_by_middleware(scrapy_request):
54
56
  unique_key = compute_unique_key(
55
57
  url=scrapy_request.url,
56
- method=scrapy_request.method,
58
+ method=scrapy_request.method, # type: ignore[arg-type] # str vs literal
57
59
  payload=scrapy_request.body,
58
60
  use_extended_unique_key=True,
59
61
  )
@@ -80,9 +82,9 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
80
82
 
81
83
  # Convert Scrapy's headers to a HttpHeaders and store them in the apify_request
82
84
  if isinstance(scrapy_request.headers, Headers):
83
- apify_request.headers = HttpHeaders(scrapy_request.headers.to_unicode_dict())
85
+ apify_request.headers = HttpHeaders(dict(scrapy_request.headers.to_unicode_dict()))
84
86
  else:
85
- Actor.log.warning(
87
+ Actor.log.warning( # type: ignore[unreachable]
86
88
  f'Invalid scrapy_request.headers type, not scrapy.http.headers.Headers: {scrapy_request.headers}'
87
89
  )
88
90
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes