apify 2.2.1b3__tar.gz → 2.2.1b5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apify might be problematic. Click here for more details.

Files changed (38) hide show
  1. {apify-2.2.1b3 → apify-2.2.1b5}/PKG-INFO +6 -4
  2. {apify-2.2.1b3 → apify-2.2.1b5}/README.md +5 -3
  3. {apify-2.2.1b3 → apify-2.2.1b5}/pyproject.toml +19 -9
  4. {apify-2.2.1b3 → apify-2.2.1b5}/LICENSE +0 -0
  5. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/__init__.py +0 -0
  6. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_actor.py +0 -0
  7. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_configuration.py +0 -0
  8. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_consts.py +0 -0
  9. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_crypto.py +0 -0
  10. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_models.py +0 -0
  11. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_platform_event_manager.py +0 -0
  12. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_proxy_configuration.py +0 -0
  13. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/_utils.py +0 -0
  14. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/__init__.py +0 -0
  15. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/_apify_storage_client.py +0 -0
  16. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/_dataset_client.py +0 -0
  17. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/_dataset_collection_client.py +0 -0
  18. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/_key_value_store_client.py +0 -0
  19. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/_key_value_store_collection_client.py +0 -0
  20. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/_request_queue_client.py +0 -0
  21. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/_request_queue_collection_client.py +0 -0
  22. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/apify_storage_client/py.typed +0 -0
  23. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/log.py +0 -0
  24. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/py.typed +0 -0
  25. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/__init__.py +0 -0
  26. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/middlewares/__init__.py +0 -0
  27. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/middlewares/apify_proxy.py +0 -0
  28. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/middlewares/py.typed +0 -0
  29. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/pipelines/__init__.py +0 -0
  30. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/pipelines/actor_dataset_push.py +0 -0
  31. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/pipelines/py.typed +0 -0
  32. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/py.typed +0 -0
  33. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/requests.py +0 -0
  34. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/scheduler.py +0 -0
  35. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/scrapy/utils.py +0 -0
  36. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/storages/__init__.py +0 -0
  37. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/storages/_request_list.py +0 -0
  38. {apify-2.2.1b3 → apify-2.2.1b5}/src/apify/storages/py.typed +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apify
3
- Version: 2.2.1b3
3
+ Version: 2.2.1b5
4
4
  Summary: Apify SDK for Python
5
5
  License: Apache-2.0
6
6
  Keywords: apify,sdk,automation,chrome,crawlee,crawler,headless,scraper,scraping
@@ -75,10 +75,11 @@ Below are few examples demonstrating how to use the Apify SDK with some web scra
75
75
  This example illustrates how to integrate the Apify SDK with [HTTPX](https://www.python-httpx.org/) and [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) to scrape data from web pages.
76
76
 
77
77
  ```python
78
- from apify import Actor
79
78
  from bs4 import BeautifulSoup
80
79
  from httpx import AsyncClient
81
80
 
81
+ from apify import Actor
82
+
82
83
 
83
84
  async def main() -> None:
84
85
  async with Actor:
@@ -123,8 +124,9 @@ async def main() -> None:
123
124
  This example demonstrates how to use the Apify SDK alongside `PlaywrightCrawler` from [Crawlee](https://crawlee.dev/python) to perform web scraping.
124
125
 
125
126
  ```python
126
- from apify import Actor, Request
127
- from crawlee.playwright_crawler import PlaywrightCrawler, PlaywrightCrawlingContext
127
+ from crawlee.crawlers import PlaywrightCrawler, PlaywrightCrawlingContext
128
+
129
+ from apify import Actor
128
130
 
129
131
 
130
132
  async def main() -> None:
@@ -36,10 +36,11 @@ Below are few examples demonstrating how to use the Apify SDK with some web scra
36
36
  This example illustrates how to integrate the Apify SDK with [HTTPX](https://www.python-httpx.org/) and [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) to scrape data from web pages.
37
37
 
38
38
  ```python
39
- from apify import Actor
40
39
  from bs4 import BeautifulSoup
41
40
  from httpx import AsyncClient
42
41
 
42
+ from apify import Actor
43
+
43
44
 
44
45
  async def main() -> None:
45
46
  async with Actor:
@@ -84,8 +85,9 @@ async def main() -> None:
84
85
  This example demonstrates how to use the Apify SDK alongside `PlaywrightCrawler` from [Crawlee](https://crawlee.dev/python) to perform web scraping.
85
86
 
86
87
  ```python
87
- from apify import Actor, Request
88
- from crawlee.playwright_crawler import PlaywrightCrawler, PlaywrightCrawlingContext
88
+ from crawlee.crawlers import PlaywrightCrawler, PlaywrightCrawlingContext
89
+
90
+ from apify import Actor
89
91
 
90
92
 
91
93
  async def main() -> None:
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "apify"
7
- version = "2.2.1b3"
7
+ version = "2.2.1b5"
8
8
  description = "Apify SDK for Python"
9
9
  authors = ["Apify Technologies s.r.o. <support@apify.com>"]
10
10
  license = "Apache-2.0"
@@ -58,7 +58,7 @@ websockets = ">=10.0 <14.0.0"
58
58
 
59
59
  [tool.poetry.group.dev.dependencies]
60
60
  build = "~1.2.0"
61
- filelock = "~3.16.0"
61
+ filelock = "~3.17.0"
62
62
  griffe = "~1.5.0"
63
63
  mypy = "~1.14.0"
64
64
  pre-commit = "~4.1.0"
@@ -78,6 +78,7 @@ scrapy = ["scrapy"]
78
78
 
79
79
  [tool.ruff]
80
80
  line-length = 120
81
+ include = ["src/**/*.py", "tests/**/*.py", "docs/**/*.py", "website/**/*.py"]
81
82
 
82
83
  [tool.ruff.lint]
83
84
  select = ["ALL"]
@@ -128,10 +129,12 @@ indent-style = "space"
128
129
  "TRY301", # Abstract `raise` to an inner function
129
130
  "TID252", # Prefer absolute imports over relative imports from parent modules
130
131
  ]
131
- "**/{docs}/**" = [
132
- "D", # Everything from the pydocstyle
133
- "INP001", # File {filename} is part of an implicit namespace package, add an __init__.py
134
- "F841", # Local variable {variable} is assigned to but never used
132
+ "**/{docs,website}/**" = [
133
+ "D", # Everything from the pydocstyle
134
+ "INP001", # File {filename} is part of an implicit namespace package, add an __init__.py
135
+ "F841", # Local variable {variable} is assigned to but never used
136
+ "TRY301", # Abstract `raise` to an inner function
137
+ "PLW0603", # Using the global statement to update `{name}` is discouraged
135
138
  ]
136
139
 
137
140
  [tool.ruff.lint.flake8-quotes]
@@ -166,7 +169,7 @@ timeout = 1200
166
169
  [tool.mypy]
167
170
  python_version = "3.9"
168
171
  plugins = ["pydantic.mypy"]
169
- files = ["src", "tests"]
172
+ files = ["src", "tests", "docs", "website"]
170
173
  check_untyped_defs = true
171
174
  disallow_incomplete_defs = true
172
175
  disallow_untyped_calls = true
@@ -180,13 +183,20 @@ warn_unused_ignores = true
180
183
  exclude = []
181
184
 
182
185
  [[tool.mypy.overrides]]
183
- module = ['scrapy', 'scrapy.*', 'lazy_object_proxy']
186
+ module = [
187
+ 'bs4',
188
+ 'lazy_object_proxy',
189
+ 'nest_asyncio',
190
+ 'playwright.*',
191
+ 'scrapy.*',
192
+ 'selenium.*',
193
+ ]
184
194
  ignore_missing_imports = true
185
195
 
186
196
  [tool.basedpyright]
187
197
  pythonVersion = "3.9"
188
198
  typeCheckingMode = "standard"
189
- include = ["src", "tests"]
199
+ include = ["src", "tests", "docs", "website"]
190
200
 
191
201
  [tool.coverage.report]
192
202
  exclude_lines = [
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes