apify 1.7.1b1__py3-none-any.whl → 2.2.0b14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apify might be problematic. Click here for more details.
- apify/__init__.py +19 -4
- apify/_actor.py +1030 -0
- apify/_configuration.py +370 -0
- apify/_consts.py +10 -0
- apify/_crypto.py +31 -27
- apify/_models.py +117 -0
- apify/_platform_event_manager.py +231 -0
- apify/_proxy_configuration.py +320 -0
- apify/_utils.py +18 -484
- apify/apify_storage_client/__init__.py +3 -0
- apify/apify_storage_client/_apify_storage_client.py +68 -0
- apify/apify_storage_client/_dataset_client.py +190 -0
- apify/apify_storage_client/_dataset_collection_client.py +51 -0
- apify/apify_storage_client/_key_value_store_client.py +94 -0
- apify/apify_storage_client/_key_value_store_collection_client.py +51 -0
- apify/apify_storage_client/_request_queue_client.py +176 -0
- apify/apify_storage_client/_request_queue_collection_client.py +51 -0
- apify/apify_storage_client/py.typed +0 -0
- apify/log.py +22 -105
- apify/scrapy/__init__.py +11 -3
- apify/scrapy/middlewares/__init__.py +3 -1
- apify/scrapy/middlewares/apify_proxy.py +29 -27
- apify/scrapy/middlewares/py.typed +0 -0
- apify/scrapy/pipelines/__init__.py +3 -1
- apify/scrapy/pipelines/actor_dataset_push.py +6 -3
- apify/scrapy/pipelines/py.typed +0 -0
- apify/scrapy/py.typed +0 -0
- apify/scrapy/requests.py +60 -58
- apify/scrapy/scheduler.py +28 -19
- apify/scrapy/utils.py +10 -32
- apify/storages/__init__.py +4 -10
- apify/storages/_request_list.py +150 -0
- apify/storages/py.typed +0 -0
- apify-2.2.0b14.dist-info/METADATA +211 -0
- apify-2.2.0b14.dist-info/RECORD +38 -0
- {apify-1.7.1b1.dist-info → apify-2.2.0b14.dist-info}/WHEEL +1 -2
- apify/_memory_storage/__init__.py +0 -3
- apify/_memory_storage/file_storage_utils.py +0 -71
- apify/_memory_storage/memory_storage_client.py +0 -219
- apify/_memory_storage/resource_clients/__init__.py +0 -19
- apify/_memory_storage/resource_clients/base_resource_client.py +0 -141
- apify/_memory_storage/resource_clients/base_resource_collection_client.py +0 -114
- apify/_memory_storage/resource_clients/dataset.py +0 -452
- apify/_memory_storage/resource_clients/dataset_collection.py +0 -48
- apify/_memory_storage/resource_clients/key_value_store.py +0 -533
- apify/_memory_storage/resource_clients/key_value_store_collection.py +0 -48
- apify/_memory_storage/resource_clients/request_queue.py +0 -466
- apify/_memory_storage/resource_clients/request_queue_collection.py +0 -48
- apify/actor.py +0 -1351
- apify/config.py +0 -127
- apify/consts.py +0 -67
- apify/event_manager.py +0 -236
- apify/proxy_configuration.py +0 -365
- apify/storages/base_storage.py +0 -181
- apify/storages/dataset.py +0 -494
- apify/storages/key_value_store.py +0 -257
- apify/storages/request_queue.py +0 -602
- apify/storages/storage_client_manager.py +0 -72
- apify-1.7.1b1.dist-info/METADATA +0 -149
- apify-1.7.1b1.dist-info/RECORD +0 -41
- apify-1.7.1b1.dist-info/top_level.txt +0 -1
- {apify-1.7.1b1.dist-info → apify-2.2.0b14.dist-info}/LICENSE +0 -0
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import TYPE_CHECKING, Annotated, Any, Literal, Union
|
|
6
|
+
|
|
7
|
+
import websockets.client
|
|
8
|
+
from pydantic import BaseModel, Discriminator, Field, TypeAdapter
|
|
9
|
+
from typing_extensions import Self, Unpack, override
|
|
10
|
+
|
|
11
|
+
from crawlee.events._event_manager import EventManager, EventManagerOptions
|
|
12
|
+
from crawlee.events._local_event_manager import LocalEventManager
|
|
13
|
+
from crawlee.events._types import (
|
|
14
|
+
Event,
|
|
15
|
+
EventAbortingData,
|
|
16
|
+
EventExitData,
|
|
17
|
+
EventMigratingData,
|
|
18
|
+
EventPersistStateData,
|
|
19
|
+
EventSystemInfoData,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
from apify._utils import docs_group
|
|
23
|
+
from apify.log import logger
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from types import TracebackType
|
|
27
|
+
|
|
28
|
+
from apify._configuration import Configuration
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
__all__ = ['EventManager', 'LocalEventManager', 'PlatformEventManager']
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@docs_group('Data structures')
|
|
35
|
+
class PersistStateEvent(BaseModel):
|
|
36
|
+
name: Literal[Event.PERSIST_STATE]
|
|
37
|
+
data: Annotated[EventPersistStateData, Field(default_factory=lambda: EventPersistStateData(is_migrating=False))]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@docs_group('Data structures')
|
|
41
|
+
class SystemInfoEventData(BaseModel):
|
|
42
|
+
mem_avg_bytes: Annotated[float, Field(alias='memAvgBytes')]
|
|
43
|
+
mem_current_bytes: Annotated[float, Field(alias='memCurrentBytes')]
|
|
44
|
+
mem_max_bytes: Annotated[float, Field(alias='memMaxBytes')]
|
|
45
|
+
cpu_avg_usage: Annotated[float, Field(alias='cpuAvgUsage')]
|
|
46
|
+
cpu_max_usage: Annotated[float, Field(alias='cpuMaxUsage')]
|
|
47
|
+
cpu_current_usage: Annotated[float, Field(alias='cpuCurrentUsage')]
|
|
48
|
+
is_cpu_overloaded: Annotated[bool, Field(alias='isCpuOverloaded')]
|
|
49
|
+
created_at: Annotated[datetime, Field(alias='createdAt')]
|
|
50
|
+
|
|
51
|
+
def to_crawlee_format(self) -> EventSystemInfoData:
|
|
52
|
+
return EventSystemInfoData.model_validate(
|
|
53
|
+
{
|
|
54
|
+
'cpu_info': {
|
|
55
|
+
'used_ratio': self.cpu_current_usage / 100,
|
|
56
|
+
'created_at': self.created_at,
|
|
57
|
+
},
|
|
58
|
+
'memory_info': {
|
|
59
|
+
'total_size': self.mem_max_bytes,
|
|
60
|
+
'current_size': self.mem_current_bytes,
|
|
61
|
+
'created_at': self.created_at,
|
|
62
|
+
},
|
|
63
|
+
}
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@docs_group('Data structures')
|
|
68
|
+
class SystemInfoEvent(BaseModel):
|
|
69
|
+
name: Literal[Event.SYSTEM_INFO]
|
|
70
|
+
data: SystemInfoEventData
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@docs_group('Data structures')
|
|
74
|
+
class MigratingEvent(BaseModel):
|
|
75
|
+
name: Literal[Event.MIGRATING]
|
|
76
|
+
data: Annotated[EventMigratingData, Field(default_factory=EventMigratingData)]
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@docs_group('Data structures')
|
|
80
|
+
class AbortingEvent(BaseModel):
|
|
81
|
+
name: Literal[Event.ABORTING]
|
|
82
|
+
data: Annotated[EventAbortingData, Field(default_factory=EventAbortingData)]
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@docs_group('Data structures')
|
|
86
|
+
class ExitEvent(BaseModel):
|
|
87
|
+
name: Literal[Event.EXIT]
|
|
88
|
+
data: Annotated[EventExitData, Field(default_factory=EventExitData)]
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@docs_group('Data structures')
|
|
92
|
+
class EventWithoutData(BaseModel):
|
|
93
|
+
name: Literal[
|
|
94
|
+
Event.SESSION_RETIRED,
|
|
95
|
+
Event.BROWSER_LAUNCHED,
|
|
96
|
+
Event.BROWSER_RETIRED,
|
|
97
|
+
Event.BROWSER_CLOSED,
|
|
98
|
+
Event.PAGE_CREATED,
|
|
99
|
+
Event.PAGE_CLOSED,
|
|
100
|
+
]
|
|
101
|
+
data: Any = None
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@docs_group('Data structures')
|
|
105
|
+
class DeprecatedEvent(BaseModel):
|
|
106
|
+
name: Literal['cpuInfo']
|
|
107
|
+
data: Annotated[dict[str, Any], Field(default_factory=dict)]
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@docs_group('Data structures')
|
|
111
|
+
class UnknownEvent(BaseModel):
|
|
112
|
+
name: str
|
|
113
|
+
data: Annotated[dict[str, Any], Field(default_factory=dict)]
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
EventMessage = Union[
|
|
117
|
+
PersistStateEvent,
|
|
118
|
+
SystemInfoEvent,
|
|
119
|
+
MigratingEvent,
|
|
120
|
+
AbortingEvent,
|
|
121
|
+
ExitEvent,
|
|
122
|
+
EventWithoutData,
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
event_data_adapter: TypeAdapter[EventMessage | DeprecatedEvent | UnknownEvent] = TypeAdapter(
|
|
127
|
+
Union[
|
|
128
|
+
Annotated[
|
|
129
|
+
EventMessage,
|
|
130
|
+
Discriminator('name'),
|
|
131
|
+
],
|
|
132
|
+
DeprecatedEvent,
|
|
133
|
+
UnknownEvent,
|
|
134
|
+
]
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@docs_group('Classes')
|
|
139
|
+
class PlatformEventManager(EventManager):
|
|
140
|
+
"""A class for managing Actor events.
|
|
141
|
+
|
|
142
|
+
You shouldn't use this class directly,
|
|
143
|
+
but instead use it via the `Actor.on()` and `Actor.off()` methods.
|
|
144
|
+
"""
|
|
145
|
+
|
|
146
|
+
_platform_events_websocket: websockets.client.WebSocketClientProtocol | None = None
|
|
147
|
+
_process_platform_messages_task: asyncio.Task | None = None
|
|
148
|
+
_send_system_info_interval_task: asyncio.Task | None = None
|
|
149
|
+
_connected_to_platform_websocket: asyncio.Future = asyncio.Future()
|
|
150
|
+
|
|
151
|
+
def __init__(self, config: Configuration, **kwargs: Unpack[EventManagerOptions]) -> None:
|
|
152
|
+
"""Create an instance of the EventManager.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
config: The Actor configuration to be used in this event manager.
|
|
156
|
+
kwargs: Event manager options - forwarded to the base class
|
|
157
|
+
"""
|
|
158
|
+
super().__init__(**kwargs)
|
|
159
|
+
|
|
160
|
+
self._config = config
|
|
161
|
+
self._listener_tasks = set()
|
|
162
|
+
self._connected_to_platform_websocket = asyncio.Future[bool]()
|
|
163
|
+
|
|
164
|
+
@override
|
|
165
|
+
async def __aenter__(self) -> Self:
|
|
166
|
+
await super().__aenter__()
|
|
167
|
+
self._connected_to_platform_websocket = asyncio.Future()
|
|
168
|
+
|
|
169
|
+
# Run tasks but don't await them
|
|
170
|
+
if self._config.actor_events_ws_url:
|
|
171
|
+
self._process_platform_messages_task = asyncio.create_task(
|
|
172
|
+
self._process_platform_messages(self._config.actor_events_ws_url)
|
|
173
|
+
)
|
|
174
|
+
is_connected = await self._connected_to_platform_websocket
|
|
175
|
+
if not is_connected:
|
|
176
|
+
raise RuntimeError('Error connecting to platform events websocket!')
|
|
177
|
+
else:
|
|
178
|
+
logger.debug('APIFY_ACTOR_EVENTS_WS_URL env var not set, no events from Apify platform will be emitted.')
|
|
179
|
+
|
|
180
|
+
return self
|
|
181
|
+
|
|
182
|
+
@override
|
|
183
|
+
async def __aexit__(
|
|
184
|
+
self,
|
|
185
|
+
exc_type: type[BaseException] | None,
|
|
186
|
+
exc_value: BaseException | None,
|
|
187
|
+
exc_traceback: TracebackType | None,
|
|
188
|
+
) -> None:
|
|
189
|
+
if self._platform_events_websocket:
|
|
190
|
+
await self._platform_events_websocket.close()
|
|
191
|
+
|
|
192
|
+
if self._process_platform_messages_task:
|
|
193
|
+
await self._process_platform_messages_task
|
|
194
|
+
|
|
195
|
+
await super().__aexit__(exc_type, exc_value, exc_traceback)
|
|
196
|
+
|
|
197
|
+
async def _process_platform_messages(self, ws_url: str) -> None:
|
|
198
|
+
try:
|
|
199
|
+
async with websockets.client.connect(ws_url) as websocket:
|
|
200
|
+
self._platform_events_websocket = websocket
|
|
201
|
+
self._connected_to_platform_websocket.set_result(True)
|
|
202
|
+
|
|
203
|
+
async for message in websocket:
|
|
204
|
+
try:
|
|
205
|
+
parsed_message = event_data_adapter.validate_json(message)
|
|
206
|
+
|
|
207
|
+
if isinstance(parsed_message, DeprecatedEvent):
|
|
208
|
+
continue
|
|
209
|
+
|
|
210
|
+
if isinstance(parsed_message, UnknownEvent):
|
|
211
|
+
logger.info(
|
|
212
|
+
f'Unknown message received: event_name={parsed_message.name}, '
|
|
213
|
+
f'event_data={parsed_message.data}'
|
|
214
|
+
)
|
|
215
|
+
continue
|
|
216
|
+
|
|
217
|
+
self.emit(
|
|
218
|
+
event=parsed_message.name,
|
|
219
|
+
event_data=parsed_message.data
|
|
220
|
+
if not isinstance(parsed_message.data, SystemInfoEventData)
|
|
221
|
+
else parsed_message.data.to_crawlee_format(),
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
if parsed_message.name == Event.MIGRATING:
|
|
225
|
+
await self._emit_persist_state_event_rec_task.stop()
|
|
226
|
+
self.emit(event=Event.PERSIST_STATE, event_data=EventPersistStateData(is_migrating=True))
|
|
227
|
+
except Exception:
|
|
228
|
+
logger.exception('Cannot parse Actor event', extra={'message': message})
|
|
229
|
+
except Exception:
|
|
230
|
+
logger.exception('Error in websocket connection')
|
|
231
|
+
self._connected_to_platform_websocket.set_result(False)
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ipaddress
|
|
4
|
+
import re
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from re import Pattern
|
|
7
|
+
from typing import TYPE_CHECKING, Any
|
|
8
|
+
from urllib.parse import urljoin, urlparse
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
|
|
12
|
+
from apify_shared.consts import ApifyEnvVars
|
|
13
|
+
from apify_shared.utils import ignore_docs
|
|
14
|
+
from crawlee.proxy_configuration import ProxyConfiguration as CrawleeProxyConfiguration
|
|
15
|
+
from crawlee.proxy_configuration import ProxyInfo as CrawleeProxyInfo
|
|
16
|
+
from crawlee.proxy_configuration import _NewUrlFunction
|
|
17
|
+
|
|
18
|
+
from apify._configuration import Configuration
|
|
19
|
+
from apify._utils import docs_group
|
|
20
|
+
from apify.log import logger
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from apify_client import ApifyClientAsync
|
|
24
|
+
from crawlee import Request
|
|
25
|
+
|
|
26
|
+
APIFY_PROXY_VALUE_REGEX = re.compile(r'^[\w._~]+$')
|
|
27
|
+
COUNTRY_CODE_REGEX = re.compile(r'^[A-Z]{2}$')
|
|
28
|
+
SESSION_ID_MAX_LENGTH = 50
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@ignore_docs
|
|
32
|
+
def is_url(url: str) -> bool:
|
|
33
|
+
"""Check if the given string is a valid URL."""
|
|
34
|
+
try:
|
|
35
|
+
parsed_url = urlparse(urljoin(url, '/'))
|
|
36
|
+
has_all_parts = all([parsed_url.scheme, parsed_url.netloc, parsed_url.path])
|
|
37
|
+
is_domain = '.' in parsed_url.netloc
|
|
38
|
+
is_localhost = parsed_url.netloc == 'localhost'
|
|
39
|
+
try:
|
|
40
|
+
ipaddress.ip_address(parsed_url.netloc)
|
|
41
|
+
is_ip_address = True
|
|
42
|
+
except Exception:
|
|
43
|
+
is_ip_address = False
|
|
44
|
+
|
|
45
|
+
return has_all_parts and any([is_domain, is_localhost, is_ip_address])
|
|
46
|
+
except Exception:
|
|
47
|
+
return False
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _check(
|
|
51
|
+
value: Any,
|
|
52
|
+
*,
|
|
53
|
+
label: str | None,
|
|
54
|
+
pattern: Pattern | None = None,
|
|
55
|
+
min_length: int | None = None,
|
|
56
|
+
max_length: int | None = None,
|
|
57
|
+
) -> None:
|
|
58
|
+
error_str = f'Value {value}'
|
|
59
|
+
if label:
|
|
60
|
+
error_str += f' of argument {label}'
|
|
61
|
+
|
|
62
|
+
if min_length and len(value) < min_length:
|
|
63
|
+
raise ValueError(f'{error_str} is shorter than minimum allowed length {min_length}')
|
|
64
|
+
|
|
65
|
+
if max_length and len(value) > max_length:
|
|
66
|
+
raise ValueError(f'{error_str} is longer than maximum allowed length {max_length}')
|
|
67
|
+
|
|
68
|
+
if pattern and not re.fullmatch(pattern, value):
|
|
69
|
+
raise ValueError(f'{error_str} does not match pattern {pattern.pattern!r}')
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@docs_group('Classes')
|
|
73
|
+
@dataclass
|
|
74
|
+
class ProxyInfo(CrawleeProxyInfo):
|
|
75
|
+
"""Provides information about a proxy connection that is used for requests."""
|
|
76
|
+
|
|
77
|
+
groups: list[str] = field(default_factory=list)
|
|
78
|
+
"""An array of proxy groups to be used by the [Apify Proxy](https://docs.apify.com/proxy). If not provided,
|
|
79
|
+
the proxy will select the groups automatically."""
|
|
80
|
+
|
|
81
|
+
country_code: str | None = None
|
|
82
|
+
"""If set and relevant proxies are available in your Apify account, all proxied requests will use IP addresses
|
|
83
|
+
that are geolocated to the specified country. For example `GB` for IPs from Great Britain. Note that online
|
|
84
|
+
services often have their own rules for handling geolocation and thus the country selection is a best attempt
|
|
85
|
+
at geolocation, rather than a guaranteed hit. This parameter is optional, by default, each proxied request is
|
|
86
|
+
assigned an IP address from a random country. The country code needs to be a two letter ISO country code.
|
|
87
|
+
See the [full list of available country codes](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements).
|
|
88
|
+
This parameter is optional, by default, the proxy uses all available proxy servers from all countries.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@docs_group('Classes')
|
|
93
|
+
class ProxyConfiguration(CrawleeProxyConfiguration):
|
|
94
|
+
"""Configures a connection to a proxy server with the provided options.
|
|
95
|
+
|
|
96
|
+
Proxy servers are used to prevent target websites from blocking your crawlers based on IP address rate limits or
|
|
97
|
+
blacklists. The default servers used by this class are managed by [Apify Proxy](https://docs.apify.com/proxy).
|
|
98
|
+
To be able to use Apify Proxy, you need an Apify account and access to the selected proxies. If you provide
|
|
99
|
+
no configuration option, the proxies will be managed automatically using a smart algorithm.
|
|
100
|
+
|
|
101
|
+
If you want to use your own proxies, use the `proxy_urls` or `new_url_function` constructor options. Your list
|
|
102
|
+
of proxy URLs will be rotated by the configuration, if this option is provided.
|
|
103
|
+
"""
|
|
104
|
+
|
|
105
|
+
_configuration: Configuration
|
|
106
|
+
|
|
107
|
+
@ignore_docs
|
|
108
|
+
def __init__(
|
|
109
|
+
self,
|
|
110
|
+
*,
|
|
111
|
+
password: str | None = None,
|
|
112
|
+
groups: list[str] | None = None,
|
|
113
|
+
country_code: str | None = None,
|
|
114
|
+
proxy_urls: list[str | None] | None = None,
|
|
115
|
+
new_url_function: _NewUrlFunction | None = None,
|
|
116
|
+
tiered_proxy_urls: list[list[str | None]] | None = None,
|
|
117
|
+
_actor_config: Configuration | None = None,
|
|
118
|
+
_apify_client: ApifyClientAsync | None = None,
|
|
119
|
+
) -> None:
|
|
120
|
+
"""Create a ProxyConfiguration instance.
|
|
121
|
+
|
|
122
|
+
It is highly recommended to use `Actor.create_proxy_configuration()` instead of this.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
password: Password for the Apify Proxy. If not provided, will use os.environ['APIFY_PROXY_PASSWORD'],
|
|
126
|
+
if available.
|
|
127
|
+
groups: Proxy groups which the Apify Proxy should use, if provided.
|
|
128
|
+
country_code: Country which the Apify Proxy should use, if provided.
|
|
129
|
+
proxy_urls: Custom proxy server URLs which should be rotated through.
|
|
130
|
+
new_url_function: Function which returns a custom proxy URL to be used.
|
|
131
|
+
tiered_proxy_urls: Proxy URLs arranged into tiers
|
|
132
|
+
"""
|
|
133
|
+
_actor_config = _actor_config or Configuration.get_global_configuration()
|
|
134
|
+
|
|
135
|
+
if groups:
|
|
136
|
+
groups = [str(group) for group in groups]
|
|
137
|
+
for group in groups:
|
|
138
|
+
_check(group, label='groups', pattern=APIFY_PROXY_VALUE_REGEX)
|
|
139
|
+
|
|
140
|
+
if country_code:
|
|
141
|
+
country_code = str(country_code)
|
|
142
|
+
_check(country_code, label='country_code', pattern=COUNTRY_CODE_REGEX)
|
|
143
|
+
|
|
144
|
+
if (proxy_urls or new_url_function or tiered_proxy_urls) and (groups or country_code):
|
|
145
|
+
raise ValueError(
|
|
146
|
+
'Cannot combine custom proxies with Apify Proxy!'
|
|
147
|
+
' It is not allowed to set "proxy_urls" or "new_url_function" combined with'
|
|
148
|
+
' "groups" or "country_code".'
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
if proxy_urls and any('apify.com' in (url or '') for url in proxy_urls):
|
|
152
|
+
logger.warning(
|
|
153
|
+
'Some Apify proxy features may work incorrectly. Please consider setting up Apify properties '
|
|
154
|
+
'instead of `proxy_urls`.\n'
|
|
155
|
+
'See https://sdk.apify.com/docs/guides/proxy-management#apify-proxy-configuration'
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
self._uses_apify_proxy = not (proxy_urls or new_url_function or tiered_proxy_urls)
|
|
159
|
+
|
|
160
|
+
super().__init__(
|
|
161
|
+
proxy_urls=[f'http://{_actor_config.proxy_hostname}:{_actor_config.proxy_port}']
|
|
162
|
+
if self._uses_apify_proxy
|
|
163
|
+
else proxy_urls,
|
|
164
|
+
new_url_function=new_url_function,
|
|
165
|
+
tiered_proxy_urls=tiered_proxy_urls,
|
|
166
|
+
)
|
|
167
|
+
self._configuration = _actor_config
|
|
168
|
+
|
|
169
|
+
self.is_man_in_the_middle = False
|
|
170
|
+
|
|
171
|
+
self._apify_client = _apify_client
|
|
172
|
+
|
|
173
|
+
self._hostname = self._configuration.proxy_hostname
|
|
174
|
+
self._port = self._configuration.proxy_port
|
|
175
|
+
self._password = password or self._configuration.proxy_password
|
|
176
|
+
|
|
177
|
+
self._groups = list(groups) if groups else []
|
|
178
|
+
self._country_code = country_code
|
|
179
|
+
|
|
180
|
+
async def initialize(self) -> None:
|
|
181
|
+
"""Load the Apify Proxy password if the API token is provided and check access to Apify Proxy and proxy groups.
|
|
182
|
+
|
|
183
|
+
Only called if Apify Proxy configuration is used. Also checks if country has access to Apify Proxy groups
|
|
184
|
+
if the country code is provided.
|
|
185
|
+
|
|
186
|
+
You should use the Actor.create_proxy_configuration function to create a pre-initialized
|
|
187
|
+
`ProxyConfiguration` instance instead of calling this manually.
|
|
188
|
+
"""
|
|
189
|
+
if self._uses_apify_proxy:
|
|
190
|
+
await self._maybe_fetch_password()
|
|
191
|
+
await self._check_access()
|
|
192
|
+
|
|
193
|
+
async def new_proxy_info(
|
|
194
|
+
self,
|
|
195
|
+
session_id: str | None = None,
|
|
196
|
+
request: Request | None = None,
|
|
197
|
+
proxy_tier: int | None = None,
|
|
198
|
+
) -> ProxyInfo | None:
|
|
199
|
+
"""Create a new ProxyInfo object.
|
|
200
|
+
|
|
201
|
+
Use it if you want to work with a rich representation of a proxy URL. If you need the URL string only,
|
|
202
|
+
use `ProxyConfiguration.new_url`.
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
session_id: Represents the identifier of a proxy session (https://docs.apify.com/proxy#sessions).
|
|
206
|
+
All the HTTP requests going through the proxy with the same session identifier will use the same
|
|
207
|
+
target proxy server (i.e. the same IP address). The identifier must not be longer than 50 characters
|
|
208
|
+
and include only the following: `0-9`, `a-z`, `A-Z`, `"."`, `"_"` and `"~"`.
|
|
209
|
+
request: request for which the proxy info is being issued, used in proxy tier handling.
|
|
210
|
+
proxy_tier: allows forcing the proxy tier to be used.
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
Dictionary that represents information about the proxy and its configuration.
|
|
214
|
+
"""
|
|
215
|
+
if session_id is not None:
|
|
216
|
+
_check(session_id, label='session_id', max_length=SESSION_ID_MAX_LENGTH, pattern=APIFY_PROXY_VALUE_REGEX)
|
|
217
|
+
|
|
218
|
+
proxy_info = await super().new_proxy_info(session_id=session_id, request=request, proxy_tier=proxy_tier)
|
|
219
|
+
|
|
220
|
+
if proxy_info is None:
|
|
221
|
+
return None
|
|
222
|
+
|
|
223
|
+
if self._uses_apify_proxy:
|
|
224
|
+
parsed_url = httpx.URL(proxy_info.url)
|
|
225
|
+
username = self._get_username(session_id)
|
|
226
|
+
|
|
227
|
+
return ProxyInfo(
|
|
228
|
+
url=f'http://{username}:{self._password or ""}@{parsed_url.host}:{parsed_url.port}',
|
|
229
|
+
scheme='http',
|
|
230
|
+
hostname=proxy_info.hostname,
|
|
231
|
+
port=proxy_info.port,
|
|
232
|
+
username=username,
|
|
233
|
+
password=self._password or '',
|
|
234
|
+
session_id=proxy_info.session_id,
|
|
235
|
+
proxy_tier=proxy_info.proxy_tier,
|
|
236
|
+
groups=self._groups,
|
|
237
|
+
country_code=self._country_code or None,
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
return ProxyInfo(
|
|
241
|
+
url=proxy_info.url,
|
|
242
|
+
scheme=proxy_info.scheme,
|
|
243
|
+
hostname=proxy_info.hostname,
|
|
244
|
+
port=proxy_info.port,
|
|
245
|
+
username=proxy_info.username,
|
|
246
|
+
password=proxy_info.password,
|
|
247
|
+
session_id=proxy_info.session_id,
|
|
248
|
+
proxy_tier=proxy_info.proxy_tier,
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
async def _maybe_fetch_password(self) -> None:
|
|
252
|
+
token = self._configuration.token
|
|
253
|
+
|
|
254
|
+
if token and self._apify_client:
|
|
255
|
+
user_info = await self._apify_client.user().get()
|
|
256
|
+
if user_info:
|
|
257
|
+
password = user_info['proxy']['password']
|
|
258
|
+
|
|
259
|
+
if self._password:
|
|
260
|
+
if self._password != password:
|
|
261
|
+
logger.warning(
|
|
262
|
+
'The Apify Proxy password you provided belongs to a different user than the Apify '
|
|
263
|
+
'token you are using. Are you sure this is correct?'
|
|
264
|
+
)
|
|
265
|
+
else:
|
|
266
|
+
self._password = password
|
|
267
|
+
|
|
268
|
+
if not self._password:
|
|
269
|
+
raise ValueError(
|
|
270
|
+
'Apify Proxy password must be provided using the "password" constructor argument '
|
|
271
|
+
f'or the "{ApifyEnvVars.PROXY_PASSWORD}" environment variable. If you add '
|
|
272
|
+
f'the "{ApifyEnvVars.TOKEN}" environment variable, the password will be automatically inferred.'
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
async def _check_access(self) -> None:
|
|
276
|
+
proxy_status_url = f'{self._configuration.proxy_status_url}/?format=json'
|
|
277
|
+
proxy_info = await self.new_proxy_info()
|
|
278
|
+
|
|
279
|
+
if proxy_info is None:
|
|
280
|
+
return
|
|
281
|
+
|
|
282
|
+
status = None
|
|
283
|
+
async with httpx.AsyncClient(proxy=proxy_info.url, timeout=10) as client:
|
|
284
|
+
for _ in range(2):
|
|
285
|
+
try:
|
|
286
|
+
response = await client.get(proxy_status_url)
|
|
287
|
+
status = response.json()
|
|
288
|
+
break
|
|
289
|
+
except Exception: # noqa: S110
|
|
290
|
+
# retry on connection errors
|
|
291
|
+
pass
|
|
292
|
+
|
|
293
|
+
if status:
|
|
294
|
+
if not status['connected']:
|
|
295
|
+
raise ConnectionError(status['connectionError'])
|
|
296
|
+
|
|
297
|
+
self.is_man_in_the_middle = status['isManInTheMiddle']
|
|
298
|
+
else:
|
|
299
|
+
logger.warning(
|
|
300
|
+
'Apify Proxy access check timed out. Watch out for errors with status code 407. If you see some, it '
|
|
301
|
+
'most likely means you do not have access to either all or some of the proxies you are trying to use.'
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
def _get_username(self, session_id: int | str | None = None) -> str:
|
|
305
|
+
if session_id is not None:
|
|
306
|
+
session_id = f'{session_id}'
|
|
307
|
+
|
|
308
|
+
parts: list[str] = []
|
|
309
|
+
|
|
310
|
+
if self._groups:
|
|
311
|
+
parts.append(f'groups-{"+".join(self._groups)}')
|
|
312
|
+
if session_id is not None:
|
|
313
|
+
parts.append(f'session-{session_id}')
|
|
314
|
+
if self._country_code:
|
|
315
|
+
parts.append(f'country-{self._country_code}')
|
|
316
|
+
|
|
317
|
+
if not parts:
|
|
318
|
+
return 'auto'
|
|
319
|
+
|
|
320
|
+
return ','.join(parts)
|