apify 2.0.0b14__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apify might be problematic. Click here for more details.
- apify/_actor.py +22 -6
- apify/_configuration.py +18 -4
- apify/_crypto.py +2 -0
- apify/_platform_event_manager.py +0 -2
- apify/_proxy_configuration.py +1 -0
- apify/_utils.py +1 -0
- apify/apify_storage_client/_request_queue_client.py +3 -15
- apify/log.py +31 -1
- apify/scrapy/requests.py +8 -9
- apify/scrapy/utils.py +4 -0
- {apify-2.0.0b14.dist-info → apify-2.0.1.dist-info}/METADATA +98 -16
- {apify-2.0.0b14.dist-info → apify-2.0.1.dist-info}/RECORD +14 -14
- {apify-2.0.0b14.dist-info → apify-2.0.1.dist-info}/WHEEL +1 -1
- {apify-2.0.0b14.dist-info → apify-2.0.1.dist-info}/LICENSE +0 -0
apify/_actor.py
CHANGED
|
@@ -24,7 +24,7 @@ from apify._platform_event_manager import EventManager, LocalEventManager, Platf
|
|
|
24
24
|
from apify._proxy_configuration import ProxyConfiguration
|
|
25
25
|
from apify._utils import get_system_info, is_running_in_ipython
|
|
26
26
|
from apify.apify_storage_client import ApifyStorageClient
|
|
27
|
-
from apify.log import logger
|
|
27
|
+
from apify.log import _configure_logging, logger
|
|
28
28
|
from apify.storages import Dataset, KeyValueStore, RequestQueue
|
|
29
29
|
|
|
30
30
|
if TYPE_CHECKING:
|
|
@@ -46,16 +46,24 @@ class _ActorType:
|
|
|
46
46
|
_configuration: Configuration
|
|
47
47
|
_is_exiting = False
|
|
48
48
|
|
|
49
|
-
def __init__(
|
|
49
|
+
def __init__(
|
|
50
|
+
self,
|
|
51
|
+
configuration: Configuration | None = None,
|
|
52
|
+
*,
|
|
53
|
+
configure_logging: bool = True,
|
|
54
|
+
) -> None:
|
|
50
55
|
"""Create an Actor instance.
|
|
51
56
|
|
|
52
57
|
Note that you don't have to do this, all the functionality is accessible using the default instance
|
|
53
58
|
(e.g. `Actor.open_dataset()`).
|
|
54
59
|
|
|
55
60
|
Args:
|
|
56
|
-
|
|
61
|
+
configuration: The Actor configuration to be used. If not passed, a new Configuration instance will
|
|
62
|
+
be created.
|
|
63
|
+
configure_logging: Should the default logging configuration be configured?
|
|
57
64
|
"""
|
|
58
|
-
self._configuration =
|
|
65
|
+
self._configuration = configuration or Configuration.get_global_configuration()
|
|
66
|
+
self._configure_logging = configure_logging
|
|
59
67
|
self._apify_client = self.new_client()
|
|
60
68
|
|
|
61
69
|
self._event_manager: EventManager
|
|
@@ -81,6 +89,9 @@ class _ActorType:
|
|
|
81
89
|
When you exit the `async with` block, the `Actor.exit()` method is called, and if any exception happens while
|
|
82
90
|
executing the block code, the `Actor.fail` method is called.
|
|
83
91
|
"""
|
|
92
|
+
if self._configure_logging:
|
|
93
|
+
_configure_logging(self._configuration)
|
|
94
|
+
|
|
84
95
|
await self.init()
|
|
85
96
|
return self
|
|
86
97
|
|
|
@@ -111,15 +122,20 @@ class _ActorType:
|
|
|
111
122
|
|
|
112
123
|
return super().__repr__()
|
|
113
124
|
|
|
114
|
-
def __call__(self,
|
|
125
|
+
def __call__(self, configuration: Configuration | None = None, *, configure_logging: bool = True) -> Self:
|
|
115
126
|
"""Make a new Actor instance with a non-default configuration."""
|
|
116
|
-
return self.__class__(
|
|
127
|
+
return self.__class__(configuration=configuration, configure_logging=configure_logging)
|
|
117
128
|
|
|
118
129
|
@property
|
|
119
130
|
def apify_client(self) -> ApifyClientAsync:
|
|
120
131
|
"""The ApifyClientAsync instance the Actor instance uses."""
|
|
121
132
|
return self._apify_client
|
|
122
133
|
|
|
134
|
+
@property
|
|
135
|
+
def configuration(self) -> Configuration:
|
|
136
|
+
"""The Configuration instance the Actor instance uses."""
|
|
137
|
+
return self._configuration
|
|
138
|
+
|
|
123
139
|
@property
|
|
124
140
|
def config(self) -> Configuration:
|
|
125
141
|
"""The Configuration instance the Actor instance uses."""
|
apify/_configuration.py
CHANGED
|
@@ -5,8 +5,10 @@ from datetime import datetime, timedelta
|
|
|
5
5
|
from typing import Annotated
|
|
6
6
|
|
|
7
7
|
from pydantic import AliasChoices, BeforeValidator, Field
|
|
8
|
+
from typing_extensions import deprecated
|
|
8
9
|
|
|
9
10
|
from crawlee._utils.models import timedelta_ms
|
|
11
|
+
from crawlee._utils.urls import validate_http_url
|
|
10
12
|
from crawlee.configuration import Configuration as CrawleeConfiguration
|
|
11
13
|
|
|
12
14
|
|
|
@@ -160,15 +162,16 @@ class Configuration(CrawleeConfiguration):
|
|
|
160
162
|
str | None,
|
|
161
163
|
Field(
|
|
162
164
|
alias='apify_sdk_latest_version',
|
|
163
|
-
deprecated=True,
|
|
164
165
|
description='Specifies the most recent release version of the Apify SDK for Javascript. Used for '
|
|
165
166
|
'checking for updates.',
|
|
166
167
|
),
|
|
168
|
+
deprecated('SDK version checking is not supported for the Python SDK'),
|
|
167
169
|
] = None
|
|
168
170
|
|
|
169
171
|
log_format: Annotated[
|
|
170
172
|
str | None,
|
|
171
|
-
Field(alias='apify_log_format'
|
|
173
|
+
Field(alias='apify_log_format'),
|
|
174
|
+
deprecated('Adjust the log format in code instead'),
|
|
172
175
|
] = None
|
|
173
176
|
|
|
174
177
|
max_paid_dataset_items: Annotated[
|
|
@@ -248,6 +251,7 @@ class Configuration(CrawleeConfiguration):
|
|
|
248
251
|
),
|
|
249
252
|
description='Date when the Actor will time out',
|
|
250
253
|
),
|
|
254
|
+
BeforeValidator(lambda val: val if val != '' else None), # We should accept empty environment variables as well
|
|
251
255
|
] = None
|
|
252
256
|
|
|
253
257
|
standby_port: Annotated[
|
|
@@ -256,7 +260,17 @@ class Configuration(CrawleeConfiguration):
|
|
|
256
260
|
alias='actor_standby_port',
|
|
257
261
|
description='TCP port for the Actor to start an HTTP server to receive messages in the Actor Standby mode',
|
|
258
262
|
),
|
|
259
|
-
|
|
263
|
+
deprecated('Use `web_server_port` instead'),
|
|
264
|
+
] = 4321
|
|
265
|
+
|
|
266
|
+
standby_url: Annotated[
|
|
267
|
+
str,
|
|
268
|
+
BeforeValidator(validate_http_url),
|
|
269
|
+
Field(
|
|
270
|
+
alias='actor_standby_url',
|
|
271
|
+
description='URL for accessing web servers of Actor runs in Standby mode',
|
|
272
|
+
),
|
|
273
|
+
] = 'http://localhost'
|
|
260
274
|
|
|
261
275
|
token: Annotated[
|
|
262
276
|
str | None,
|
|
@@ -293,7 +307,7 @@ class Configuration(CrawleeConfiguration):
|
|
|
293
307
|
'actor_web_server_url',
|
|
294
308
|
'apify_container_url',
|
|
295
309
|
),
|
|
296
|
-
description='Unique public URL for accessing
|
|
310
|
+
description='Unique public URL for accessing a specific Actor run web server from the outside world',
|
|
297
311
|
),
|
|
298
312
|
] = 'http://localhost:4321'
|
|
299
313
|
|
apify/_crypto.py
CHANGED
|
@@ -114,6 +114,7 @@ def private_decrypt(
|
|
|
114
114
|
return decipher_bytes.decode('utf-8')
|
|
115
115
|
|
|
116
116
|
|
|
117
|
+
@ignore_docs
|
|
117
118
|
def load_private_key(private_key_file_base64: str, private_key_password: str) -> rsa.RSAPrivateKey:
|
|
118
119
|
private_key = serialization.load_pem_private_key(
|
|
119
120
|
base64.b64decode(private_key_file_base64.encode('utf-8')),
|
|
@@ -133,6 +134,7 @@ def _load_public_key(public_key_file_base64: str) -> rsa.RSAPublicKey:
|
|
|
133
134
|
return public_key
|
|
134
135
|
|
|
135
136
|
|
|
137
|
+
@ignore_docs
|
|
136
138
|
def decrypt_input_secrets(private_key: rsa.RSAPrivateKey, input_data: Any) -> Any:
|
|
137
139
|
"""Decrypt input secrets."""
|
|
138
140
|
if not isinstance(input_data, dict):
|
apify/_platform_event_manager.py
CHANGED
|
@@ -8,7 +8,6 @@ import websockets.client
|
|
|
8
8
|
from pydantic import BaseModel, Discriminator, Field, TypeAdapter
|
|
9
9
|
from typing_extensions import Self, Unpack, override
|
|
10
10
|
|
|
11
|
-
from apify_shared.utils import ignore_docs
|
|
12
11
|
from crawlee.events._event_manager import EventManager, EventManagerOptions
|
|
13
12
|
from crawlee.events._local_event_manager import LocalEventManager
|
|
14
13
|
from crawlee.events._types import (
|
|
@@ -126,7 +125,6 @@ event_data_adapter: TypeAdapter[EventMessage | DeprecatedEvent | UnknownEvent] =
|
|
|
126
125
|
)
|
|
127
126
|
|
|
128
127
|
|
|
129
|
-
@ignore_docs
|
|
130
128
|
class PlatformEventManager(EventManager):
|
|
131
129
|
"""A class for managing Actor events.
|
|
132
130
|
|
apify/_proxy_configuration.py
CHANGED
apify/_utils.py
CHANGED
|
@@ -11,6 +11,7 @@ def get_system_info() -> dict:
|
|
|
11
11
|
system_info: dict[str, str | bool] = {
|
|
12
12
|
'apify_sdk_version': metadata.version('apify'),
|
|
13
13
|
'apify_client_version': metadata.version('apify-client'),
|
|
14
|
+
'crawlee_version': metadata.version('crawlee'),
|
|
14
15
|
'python_version': python_version,
|
|
15
16
|
'os': sys.platform,
|
|
16
17
|
}
|
|
@@ -2,7 +2,6 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
|
-
from more_itertools import chunked
|
|
6
5
|
from typing_extensions import override
|
|
7
6
|
|
|
8
7
|
from crawlee import Request
|
|
@@ -158,11 +157,8 @@ class RequestQueueClient(BaseRequestQueueClient):
|
|
|
158
157
|
*,
|
|
159
158
|
forefront: bool = False,
|
|
160
159
|
) -> BatchRequestsOperationResponse:
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
for chunk in chunked(requests, 25): # The API endpoint won't accept more than 25 requests at once
|
|
165
|
-
response = await self._client.batch_add_requests(
|
|
160
|
+
return BatchRequestsOperationResponse.model_validate(
|
|
161
|
+
await self._client.batch_add_requests(
|
|
166
162
|
requests=[
|
|
167
163
|
r.model_dump(
|
|
168
164
|
by_alias=True,
|
|
@@ -174,18 +170,10 @@ class RequestQueueClient(BaseRequestQueueClient):
|
|
|
174
170
|
'data',
|
|
175
171
|
},
|
|
176
172
|
)
|
|
177
|
-
for r in
|
|
173
|
+
for r in requests
|
|
178
174
|
],
|
|
179
175
|
forefront=forefront,
|
|
180
176
|
)
|
|
181
|
-
processed.extend(response['processedRequests'])
|
|
182
|
-
unprocessed.extend(response['unprocessedRequests'])
|
|
183
|
-
|
|
184
|
-
return BatchRequestsOperationResponse.model_validate(
|
|
185
|
-
{
|
|
186
|
-
'processedRequests': processed,
|
|
187
|
-
'unprocessedRequests': unprocessed,
|
|
188
|
-
}
|
|
189
177
|
)
|
|
190
178
|
|
|
191
179
|
@override
|
apify/log.py
CHANGED
|
@@ -1,8 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
4
5
|
|
|
5
|
-
from
|
|
6
|
+
from apify_shared.utils import ignore_docs
|
|
7
|
+
from crawlee._log_config import CrawleeLogFormatter, configure_logger, get_configured_log_level
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from apify import Configuration
|
|
6
11
|
|
|
7
12
|
# Name of the logger used throughout the library (resolves to 'apify')
|
|
8
13
|
logger_name = __name__.split('.')[0]
|
|
@@ -11,5 +16,30 @@ logger_name = __name__.split('.')[0]
|
|
|
11
16
|
logger = logging.getLogger(logger_name)
|
|
12
17
|
|
|
13
18
|
|
|
19
|
+
@ignore_docs
|
|
14
20
|
class ActorLogFormatter(CrawleeLogFormatter): # noqa: D101 Inherited from parent class
|
|
15
21
|
pass
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _configure_logging(configuration: Configuration) -> None:
|
|
25
|
+
apify_client_logger = logging.getLogger('apify_client')
|
|
26
|
+
configure_logger(apify_client_logger, configuration, remove_old_handlers=True)
|
|
27
|
+
|
|
28
|
+
level = get_configured_log_level(configuration)
|
|
29
|
+
|
|
30
|
+
# Keep apify_client logger quiet unless debug logging is requested
|
|
31
|
+
if level > logging.DEBUG:
|
|
32
|
+
apify_client_logger.setLevel(logging.INFO)
|
|
33
|
+
else:
|
|
34
|
+
apify_client_logger.setLevel(level)
|
|
35
|
+
|
|
36
|
+
# Silence HTTPX logger unless debug logging is requested
|
|
37
|
+
httpx_logger = logging.getLogger('httpx')
|
|
38
|
+
if level > logging.DEBUG:
|
|
39
|
+
httpx_logger.setLevel(logging.WARNING)
|
|
40
|
+
else:
|
|
41
|
+
httpx_logger.setLevel(level)
|
|
42
|
+
|
|
43
|
+
# Use configured log level for apify logger
|
|
44
|
+
apify_logger = logging.getLogger('apify')
|
|
45
|
+
configure_logger(apify_logger, configuration, remove_old_handlers=True)
|
apify/scrapy/requests.py
CHANGED
|
@@ -4,6 +4,8 @@ import codecs
|
|
|
4
4
|
import pickle
|
|
5
5
|
from typing import Any, cast
|
|
6
6
|
|
|
7
|
+
from apify_shared.utils import ignore_docs
|
|
8
|
+
|
|
7
9
|
try:
|
|
8
10
|
from scrapy import Request, Spider
|
|
9
11
|
from scrapy.http.headers import Headers
|
|
@@ -14,6 +16,7 @@ except ImportError as exc:
|
|
|
14
16
|
) from exc
|
|
15
17
|
|
|
16
18
|
from crawlee import Request as CrawleeRequest
|
|
19
|
+
from crawlee._types import HttpHeaders
|
|
17
20
|
from crawlee._utils.crypto import crypto_random_object_id
|
|
18
21
|
from crawlee._utils.requests import compute_unique_key, unique_key_to_request_id
|
|
19
22
|
|
|
@@ -28,6 +31,7 @@ def _is_request_produced_by_middleware(scrapy_request: Request) -> bool:
|
|
|
28
31
|
return bool(scrapy_request.meta.get('redirect_times')) or bool(scrapy_request.meta.get('retry_times'))
|
|
29
32
|
|
|
30
33
|
|
|
34
|
+
@ignore_docs
|
|
31
35
|
def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest | None:
|
|
32
36
|
"""Convert a Scrapy request to an Apify request.
|
|
33
37
|
|
|
@@ -74,9 +78,9 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
|
|
|
74
78
|
id=request_id,
|
|
75
79
|
)
|
|
76
80
|
|
|
77
|
-
# Convert Scrapy's headers to a
|
|
81
|
+
# Convert Scrapy's headers to a HttpHeaders and store them in the apify_request
|
|
78
82
|
if isinstance(scrapy_request.headers, Headers):
|
|
79
|
-
apify_request.headers =
|
|
83
|
+
apify_request.headers = HttpHeaders(scrapy_request.headers.to_unicode_dict())
|
|
80
84
|
else:
|
|
81
85
|
Actor.log.warning(
|
|
82
86
|
f'Invalid scrapy_request.headers type, not scrapy.http.headers.Headers: {scrapy_request.headers}'
|
|
@@ -98,6 +102,7 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
|
|
|
98
102
|
return apify_request
|
|
99
103
|
|
|
100
104
|
|
|
105
|
+
@ignore_docs
|
|
101
106
|
def to_scrapy_request(apify_request: CrawleeRequest, spider: Spider) -> Request:
|
|
102
107
|
"""Convert an Apify request to a Scrapy request.
|
|
103
108
|
|
|
@@ -160,13 +165,7 @@ def to_scrapy_request(apify_request: CrawleeRequest, spider: Spider) -> Request:
|
|
|
160
165
|
|
|
161
166
|
# Add optional 'headers' field
|
|
162
167
|
if apify_request.headers:
|
|
163
|
-
|
|
164
|
-
scrapy_request.headers = Headers(apify_request.headers)
|
|
165
|
-
else:
|
|
166
|
-
Actor.log.warning(
|
|
167
|
-
'apify_request[headers] is not an instance of the dict class, '
|
|
168
|
-
f'apify_request[headers] = {apify_request.headers}',
|
|
169
|
-
)
|
|
168
|
+
scrapy_request.headers |= Headers(apify_request.headers)
|
|
170
169
|
|
|
171
170
|
# Add optional 'userData' field
|
|
172
171
|
if apify_request.user_data:
|
apify/scrapy/utils.py
CHANGED
|
@@ -4,6 +4,8 @@ import asyncio
|
|
|
4
4
|
from base64 import b64encode
|
|
5
5
|
from urllib.parse import unquote
|
|
6
6
|
|
|
7
|
+
from apify_shared.utils import ignore_docs
|
|
8
|
+
|
|
7
9
|
try:
|
|
8
10
|
from scrapy.settings import Settings # noqa: TCH002
|
|
9
11
|
from scrapy.utils.project import get_project_settings
|
|
@@ -18,6 +20,7 @@ except ImportError as exc:
|
|
|
18
20
|
nested_event_loop: asyncio.AbstractEventLoop = asyncio.new_event_loop()
|
|
19
21
|
|
|
20
22
|
|
|
23
|
+
@ignore_docs
|
|
21
24
|
def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'latin-1') -> bytes:
|
|
22
25
|
"""Generate a basic authentication header for the given username and password."""
|
|
23
26
|
string = f'{unquote(username)}:{unquote(password)}'
|
|
@@ -25,6 +28,7 @@ def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'la
|
|
|
25
28
|
return b'Basic ' + b64encode(user_pass)
|
|
26
29
|
|
|
27
30
|
|
|
31
|
+
@ignore_docs
|
|
28
32
|
def get_running_event_loop_id() -> int:
|
|
29
33
|
"""Get the ID of the currently running event loop.
|
|
30
34
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: apify
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.1
|
|
4
4
|
Summary: Apify SDK for Python
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
Keywords: apify,sdk,automation,chrome,crawlee,crawler,headless,scraper,scraping
|
|
@@ -16,11 +16,12 @@ Classifier: Programming Language :: Python :: 3.9
|
|
|
16
16
|
Classifier: Programming Language :: Python :: 3.10
|
|
17
17
|
Classifier: Programming Language :: Python :: 3.11
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
19
20
|
Classifier: Topic :: Software Development :: Libraries
|
|
20
21
|
Provides-Extra: scrapy
|
|
21
|
-
Requires-Dist: apify-client (>=1.
|
|
22
|
+
Requires-Dist: apify-client (>=1.8.1)
|
|
22
23
|
Requires-Dist: apify-shared (>=1.1.2)
|
|
23
|
-
Requires-Dist: crawlee (>=0.3.
|
|
24
|
+
Requires-Dist: crawlee (>=0.3.8)
|
|
24
25
|
Requires-Dist: cryptography (>=42.0.0)
|
|
25
26
|
Requires-Dist: httpx (>=0.27.0)
|
|
26
27
|
Requires-Dist: lazy-object-proxy (>=1.10.0)
|
|
@@ -64,27 +65,108 @@ pip install apify[scrapy]
|
|
|
64
65
|
|
|
65
66
|
For usage instructions, check the documentation on [Apify Docs](https://docs.apify.com/sdk/python/).
|
|
66
67
|
|
|
67
|
-
##
|
|
68
|
+
## Examples
|
|
69
|
+
|
|
70
|
+
Below are few examples demonstrating how to use the Apify SDK with some web scraping-related libraries.
|
|
71
|
+
|
|
72
|
+
### Apify SDK with HTTPX and BeautifulSoup
|
|
73
|
+
|
|
74
|
+
This example illustrates how to integrate the Apify SDK with [HTTPX](https://www.python-httpx.org/) and [BeautifulSoup](https://pypi.org/project/beautifulsoup4/) to scrape data from web pages.
|
|
68
75
|
|
|
69
76
|
```python
|
|
70
77
|
from apify import Actor
|
|
71
78
|
from bs4 import BeautifulSoup
|
|
72
79
|
from httpx import AsyncClient
|
|
73
80
|
|
|
81
|
+
|
|
82
|
+
async def main() -> None:
|
|
83
|
+
async with Actor:
|
|
84
|
+
# Retrieve the Actor input, and use default values if not provided.
|
|
85
|
+
actor_input = await Actor.get_input() or {}
|
|
86
|
+
start_urls = actor_input.get('start_urls', [{'url': 'https://apify.com'}])
|
|
87
|
+
|
|
88
|
+
# Open the default request queue for handling URLs to be processed.
|
|
89
|
+
request_queue = await Actor.open_request_queue()
|
|
90
|
+
|
|
91
|
+
# Enqueue the start URLs.
|
|
92
|
+
for start_url in start_urls:
|
|
93
|
+
url = start_url.get('url')
|
|
94
|
+
await request_queue.add_request(url)
|
|
95
|
+
|
|
96
|
+
# Process the URLs from the request queue.
|
|
97
|
+
while request := await request_queue.fetch_next_request():
|
|
98
|
+
Actor.log.info(f'Scraping {request.url} ...')
|
|
99
|
+
|
|
100
|
+
# Fetch the HTTP response from the specified URL using HTTPX.
|
|
101
|
+
async with AsyncClient() as client:
|
|
102
|
+
response = await client.get(request.url)
|
|
103
|
+
|
|
104
|
+
# Parse the HTML content using Beautiful Soup.
|
|
105
|
+
soup = BeautifulSoup(response.content, 'html.parser')
|
|
106
|
+
|
|
107
|
+
# Extract the desired data.
|
|
108
|
+
data = {
|
|
109
|
+
'url': actor_input['url'],
|
|
110
|
+
'title': soup.title.string,
|
|
111
|
+
'h1s': [h1.text for h1 in soup.find_all('h1')],
|
|
112
|
+
'h2s': [h2.text for h2 in soup.find_all('h2')],
|
|
113
|
+
'h3s': [h3.text for h3 in soup.find_all('h3')],
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
# Store the extracted data to the default dataset.
|
|
117
|
+
await Actor.push_data(data)
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
### Apify SDK with PlaywrightCrawler from Crawlee
|
|
121
|
+
|
|
122
|
+
This example demonstrates how to use the Apify SDK alongside `PlaywrightCrawler` from [Crawlee](https://crawlee.dev/python) to perform web scraping.
|
|
123
|
+
|
|
124
|
+
```python
|
|
125
|
+
from apify import Actor, Request
|
|
126
|
+
from crawlee.playwright_crawler import PlaywrightCrawler, PlaywrightCrawlingContext
|
|
127
|
+
|
|
128
|
+
|
|
74
129
|
async def main() -> None:
|
|
75
130
|
async with Actor:
|
|
76
|
-
#
|
|
77
|
-
actor_input = await Actor.get_input()
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
131
|
+
# Retrieve the Actor input, and use default values if not provided.
|
|
132
|
+
actor_input = await Actor.get_input() or {}
|
|
133
|
+
start_urls = [url.get('url') for url in actor_input.get('start_urls', [{'url': 'https://apify.com'}])]
|
|
134
|
+
|
|
135
|
+
# Exit if no start URLs are provided.
|
|
136
|
+
if not start_urls:
|
|
137
|
+
Actor.log.info('No start URLs specified in Actor input, exiting...')
|
|
138
|
+
await Actor.exit()
|
|
139
|
+
|
|
140
|
+
# Create a crawler.
|
|
141
|
+
crawler = PlaywrightCrawler(
|
|
142
|
+
# Limit the crawl to max requests. Remove or increase it for crawling all links.
|
|
143
|
+
max_requests_per_crawl=50,
|
|
144
|
+
headless=True,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# Define a request handler, which will be called for every request.
|
|
148
|
+
@crawler.router.default_handler
|
|
149
|
+
async def request_handler(context: PlaywrightCrawlingContext) -> None:
|
|
150
|
+
url = context.request.url
|
|
151
|
+
Actor.log.info(f'Scraping {url}...')
|
|
152
|
+
|
|
153
|
+
# Extract the desired data.
|
|
154
|
+
data = {
|
|
155
|
+
'url': context.request.url,
|
|
156
|
+
'title': await context.page.title(),
|
|
157
|
+
'h1s': [await h1.text_content() for h1 in await context.page.locator('h1').all()],
|
|
158
|
+
'h2s': [await h2.text_content() for h2 in await context.page.locator('h2').all()],
|
|
159
|
+
'h3s': [await h3.text_content() for h3 in await context.page.locator('h3').all()],
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
# Store the extracted data to the default dataset.
|
|
163
|
+
await context.push_data(data)
|
|
164
|
+
|
|
165
|
+
# Enqueue additional links found on the current page.
|
|
166
|
+
await context.enqueue_links()
|
|
167
|
+
|
|
168
|
+
# Run the crawler with the starting URLs.
|
|
169
|
+
await crawler.run(start_urls)
|
|
88
170
|
```
|
|
89
171
|
|
|
90
172
|
## What are Actors?
|
|
@@ -1,22 +1,22 @@
|
|
|
1
1
|
apify/__init__.py,sha256=ikoi2EpDYl6y-XSVtlU8UsdQdMEyOiIJCRRAaZFDOP8,550
|
|
2
|
-
apify/_actor.py,sha256=
|
|
3
|
-
apify/_configuration.py,sha256=
|
|
2
|
+
apify/_actor.py,sha256=oPgQ3rxxIEzVcZ9XtI3lf1a_6gwIMgxihNuYGjJpGww,41816
|
|
3
|
+
apify/_configuration.py,sha256=n67sK9g8llsG8ZqIWhi1BEHliplGs5PTB6Ig7_0PaMU,9629
|
|
4
4
|
apify/_consts.py,sha256=_Xq4hOfOA1iZ3n1P967YWdyncKivpbX6RTlp_qanUoE,330
|
|
5
|
-
apify/_crypto.py,sha256=
|
|
5
|
+
apify/_crypto.py,sha256=e0_aM3l9_5Osk-jszYOOjrAKK60OggSHbiw5c30QnsU,5638
|
|
6
6
|
apify/_models.py,sha256=oYlTEr-DyQAE-V2rrYD5PhUxTXVPdAig7QV-u6CJw3E,5571
|
|
7
|
-
apify/_platform_event_manager.py,sha256=
|
|
8
|
-
apify/_proxy_configuration.py,sha256=
|
|
9
|
-
apify/_utils.py,sha256=
|
|
7
|
+
apify/_platform_event_manager.py,sha256=K4cHabbQ7_ex7vkX-c-VhAOp8Efw3HDn5Wp4lfA-qAU,7571
|
|
8
|
+
apify/_proxy_configuration.py,sha256=2z4VV_NrnIp6pDpgQKlKpcHM2pPyXiOpFedpPWje48A,13087
|
|
9
|
+
apify/_utils.py,sha256=Ghho3Gf11zYN8qhjhAkZRvQ--A9Js36GHB0YSDGUK58,694
|
|
10
10
|
apify/apify_storage_client/__init__.py,sha256=-UbR68bFsDR6ln8OFs4t50eqcnY36hujO-SeOt-KmcA,114
|
|
11
11
|
apify/apify_storage_client/_apify_storage_client.py,sha256=xi4OFchxhe-1-sykanH6Zcya4OcBhn2uf7OQ1pV4Ins,2338
|
|
12
12
|
apify/apify_storage_client/_dataset_client.py,sha256=j9seF2OKvbSMD9R9XF9fpa1vtr_1w4JcRV--WCmvU4E,5501
|
|
13
13
|
apify/apify_storage_client/_dataset_collection_client.py,sha256=fkYvYGQCigHD2CDzpWk0swNAkfvAinAhMGpYqllle3E,1445
|
|
14
14
|
apify/apify_storage_client/_key_value_store_client.py,sha256=uyeQgb75sGFsqIS4sq4hEZ3QP81COLfS3tmTqHc0tso,3340
|
|
15
15
|
apify/apify_storage_client/_key_value_store_collection_client.py,sha256=vCtMTI-jx89Qp5WHILDNkCthwLuv0MAwm1J_5E4aypU,1519
|
|
16
|
-
apify/apify_storage_client/_request_queue_client.py,sha256=
|
|
16
|
+
apify/apify_storage_client/_request_queue_client.py,sha256=jAiFkaJ38_myHFGTw-Rk21wmpbN0UCR2w2SFoimFGFc,5826
|
|
17
17
|
apify/apify_storage_client/_request_queue_collection_client.py,sha256=NnO73UJ9ZrjV8xoudo30wfaM-SojRkG0guhxDyB-K1g,1527
|
|
18
18
|
apify/apify_storage_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
-
apify/log.py,sha256=
|
|
19
|
+
apify/log.py,sha256=zIVjrqQ1DNWNQQOAmdmR9oAbf4nJH7CSMB6u4OOUf6I,1448
|
|
20
20
|
apify/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
21
|
apify/scrapy/__init__.py,sha256=qDPV_zTRFaUqoFOyS5g4uBfz-UCkmWYJ82VXQ_3Cw6k,348
|
|
22
22
|
apify/scrapy/middlewares/__init__.py,sha256=tfW-d3WFWLeNEjL8fTmon6NwgD-OXx1Bw2fBdU-wPy4,114
|
|
@@ -26,12 +26,12 @@ apify/scrapy/pipelines/__init__.py,sha256=GWPeLN_Zwj8vRBWtXW6DaxdB7mvyQ7Jw5Tz1cc
|
|
|
26
26
|
apify/scrapy/pipelines/actor_dataset_push.py,sha256=QERmmExQOGIKQ70-p-lCj5qyE-c-fnYplEqd4mgaB1Q,953
|
|
27
27
|
apify/scrapy/pipelines/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
28
|
apify/scrapy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
|
-
apify/scrapy/requests.py,sha256=
|
|
29
|
+
apify/scrapy/requests.py,sha256=F4VNaX2fGqybJKbhcRcz0_m6dXse5LzKll4gtMuTRko,7480
|
|
30
30
|
apify/scrapy/scheduler.py,sha256=AAIKY5i1QxkC1mtmix6n3M2eQaOw-d1T56Noue9xToc,6013
|
|
31
|
-
apify/scrapy/utils.py,sha256=
|
|
31
|
+
apify/scrapy/utils.py,sha256=0XdFxi1qlUa6gHXG96e1FU9gW0N5Rsu0sVZklFYfC2U,2884
|
|
32
32
|
apify/storages/__init__.py,sha256=-9tEYJVabVs_eRVhUehxN58GH0UG8OfuGjGwuDieP2M,122
|
|
33
33
|
apify/storages/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
|
-
apify-2.0.
|
|
35
|
-
apify-2.0.
|
|
36
|
-
apify-2.0.
|
|
37
|
-
apify-2.0.
|
|
34
|
+
apify-2.0.1.dist-info/LICENSE,sha256=AsFjHssKjj4LGd2ZCqXn6FBzMqcWdjQre1byPPSypVw,11355
|
|
35
|
+
apify-2.0.1.dist-info/METADATA,sha256=yD0LEyfMGH3_ePU2-BCBtCJMwFwTOvUVOP730cXjrE0,8655
|
|
36
|
+
apify-2.0.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
|
37
|
+
apify-2.0.1.dist-info/RECORD,,
|
|
File without changes
|