apify 2.0.0b15__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apify might be problematic. Click here for more details.

apify/_configuration.py CHANGED
@@ -5,8 +5,10 @@ from datetime import datetime, timedelta
5
5
  from typing import Annotated
6
6
 
7
7
  from pydantic import AliasChoices, BeforeValidator, Field
8
+ from typing_extensions import deprecated
8
9
 
9
10
  from crawlee._utils.models import timedelta_ms
11
+ from crawlee._utils.urls import validate_http_url
10
12
  from crawlee.configuration import Configuration as CrawleeConfiguration
11
13
 
12
14
 
@@ -160,15 +162,16 @@ class Configuration(CrawleeConfiguration):
160
162
  str | None,
161
163
  Field(
162
164
  alias='apify_sdk_latest_version',
163
- deprecated=True,
164
165
  description='Specifies the most recent release version of the Apify SDK for Javascript. Used for '
165
166
  'checking for updates.',
166
167
  ),
168
+ deprecated('SDK version checking is not supported for the Python SDK'),
167
169
  ] = None
168
170
 
169
171
  log_format: Annotated[
170
172
  str | None,
171
- Field(alias='apify_log_format', deprecated=True),
173
+ Field(alias='apify_log_format'),
174
+ deprecated('Adjust the log format in code instead'),
172
175
  ] = None
173
176
 
174
177
  max_paid_dataset_items: Annotated[
@@ -248,6 +251,7 @@ class Configuration(CrawleeConfiguration):
248
251
  ),
249
252
  description='Date when the Actor will time out',
250
253
  ),
254
+ BeforeValidator(lambda val: val if val != '' else None), # We should accept empty environment variables as well
251
255
  ] = None
252
256
 
253
257
  standby_port: Annotated[
@@ -256,7 +260,17 @@ class Configuration(CrawleeConfiguration):
256
260
  alias='actor_standby_port',
257
261
  description='TCP port for the Actor to start an HTTP server to receive messages in the Actor Standby mode',
258
262
  ),
259
- ] = 4322
263
+ deprecated('Use `web_server_port` instead'),
264
+ ] = 4321
265
+
266
+ standby_url: Annotated[
267
+ str,
268
+ BeforeValidator(validate_http_url),
269
+ Field(
270
+ alias='actor_standby_url',
271
+ description='URL for accessing web servers of Actor runs in Standby mode',
272
+ ),
273
+ ] = 'http://localhost'
260
274
 
261
275
  token: Annotated[
262
276
  str | None,
@@ -293,7 +307,7 @@ class Configuration(CrawleeConfiguration):
293
307
  'actor_web_server_url',
294
308
  'apify_container_url',
295
309
  ),
296
- description='Unique public URL for accessing the Actor run web server from the outside world',
310
+ description='Unique public URL for accessing a specific Actor run web server from the outside world',
297
311
  ),
298
312
  ] = 'http://localhost:4321'
299
313
 
apify/_crypto.py CHANGED
@@ -114,6 +114,7 @@ def private_decrypt(
114
114
  return decipher_bytes.decode('utf-8')
115
115
 
116
116
 
117
+ @ignore_docs
117
118
  def load_private_key(private_key_file_base64: str, private_key_password: str) -> rsa.RSAPrivateKey:
118
119
  private_key = serialization.load_pem_private_key(
119
120
  base64.b64decode(private_key_file_base64.encode('utf-8')),
@@ -133,6 +134,7 @@ def _load_public_key(public_key_file_base64: str) -> rsa.RSAPublicKey:
133
134
  return public_key
134
135
 
135
136
 
137
+ @ignore_docs
136
138
  def decrypt_input_secrets(private_key: rsa.RSAPrivateKey, input_data: Any) -> Any:
137
139
  """Decrypt input secrets."""
138
140
  if not isinstance(input_data, dict):
@@ -8,7 +8,6 @@ import websockets.client
8
8
  from pydantic import BaseModel, Discriminator, Field, TypeAdapter
9
9
  from typing_extensions import Self, Unpack, override
10
10
 
11
- from apify_shared.utils import ignore_docs
12
11
  from crawlee.events._event_manager import EventManager, EventManagerOptions
13
12
  from crawlee.events._local_event_manager import LocalEventManager
14
13
  from crawlee.events._types import (
@@ -126,7 +125,6 @@ event_data_adapter: TypeAdapter[EventMessage | DeprecatedEvent | UnknownEvent] =
126
125
  )
127
126
 
128
127
 
129
- @ignore_docs
130
128
  class PlatformEventManager(EventManager):
131
129
  """A class for managing Actor events.
132
130
 
@@ -27,6 +27,7 @@ COUNTRY_CODE_REGEX = re.compile(r'^[A-Z]{2}$')
27
27
  SESSION_ID_MAX_LENGTH = 50
28
28
 
29
29
 
30
+ @ignore_docs
30
31
  def is_url(url: str) -> bool:
31
32
  """Check if the given string is a valid URL."""
32
33
  try:
apify/_utils.py CHANGED
@@ -11,6 +11,7 @@ def get_system_info() -> dict:
11
11
  system_info: dict[str, str | bool] = {
12
12
  'apify_sdk_version': metadata.version('apify'),
13
13
  'apify_client_version': metadata.version('apify-client'),
14
+ 'crawlee_version': metadata.version('crawlee'),
14
15
  'python_version': python_version,
15
16
  'os': sys.platform,
16
17
  }
@@ -2,7 +2,6 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING
4
4
 
5
- from more_itertools import chunked
6
5
  from typing_extensions import override
7
6
 
8
7
  from crawlee import Request
@@ -158,11 +157,8 @@ class RequestQueueClient(BaseRequestQueueClient):
158
157
  *,
159
158
  forefront: bool = False,
160
159
  ) -> BatchRequestsOperationResponse:
161
- processed = []
162
- unprocessed = []
163
-
164
- for chunk in chunked(requests, 25): # The API endpoint won't accept more than 25 requests at once
165
- response = await self._client.batch_add_requests(
160
+ return BatchRequestsOperationResponse.model_validate(
161
+ await self._client.batch_add_requests(
166
162
  requests=[
167
163
  r.model_dump(
168
164
  by_alias=True,
@@ -174,18 +170,10 @@ class RequestQueueClient(BaseRequestQueueClient):
174
170
  'data',
175
171
  },
176
172
  )
177
- for r in chunk
173
+ for r in requests
178
174
  ],
179
175
  forefront=forefront,
180
176
  )
181
- processed.extend(response['processedRequests'])
182
- unprocessed.extend(response['unprocessedRequests'])
183
-
184
- return BatchRequestsOperationResponse.model_validate(
185
- {
186
- 'processedRequests': processed,
187
- 'unprocessedRequests': unprocessed,
188
- }
189
177
  )
190
178
 
191
179
  @override
apify/log.py CHANGED
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import logging
4
4
  from typing import TYPE_CHECKING
5
5
 
6
+ from apify_shared.utils import ignore_docs
6
7
  from crawlee._log_config import CrawleeLogFormatter, configure_logger, get_configured_log_level
7
8
 
8
9
  if TYPE_CHECKING:
@@ -15,6 +16,7 @@ logger_name = __name__.split('.')[0]
15
16
  logger = logging.getLogger(logger_name)
16
17
 
17
18
 
19
+ @ignore_docs
18
20
  class ActorLogFormatter(CrawleeLogFormatter): # noqa: D101 Inherited from parent class
19
21
  pass
20
22
 
apify/scrapy/requests.py CHANGED
@@ -4,6 +4,8 @@ import codecs
4
4
  import pickle
5
5
  from typing import Any, cast
6
6
 
7
+ from apify_shared.utils import ignore_docs
8
+
7
9
  try:
8
10
  from scrapy import Request, Spider
9
11
  from scrapy.http.headers import Headers
@@ -14,6 +16,7 @@ except ImportError as exc:
14
16
  ) from exc
15
17
 
16
18
  from crawlee import Request as CrawleeRequest
19
+ from crawlee._types import HttpHeaders
17
20
  from crawlee._utils.crypto import crypto_random_object_id
18
21
  from crawlee._utils.requests import compute_unique_key, unique_key_to_request_id
19
22
 
@@ -28,6 +31,7 @@ def _is_request_produced_by_middleware(scrapy_request: Request) -> bool:
28
31
  return bool(scrapy_request.meta.get('redirect_times')) or bool(scrapy_request.meta.get('retry_times'))
29
32
 
30
33
 
34
+ @ignore_docs
31
35
  def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest | None:
32
36
  """Convert a Scrapy request to an Apify request.
33
37
 
@@ -74,9 +78,9 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
74
78
  id=request_id,
75
79
  )
76
80
 
77
- # Convert Scrapy's headers to a dictionary and store them in the apify_request
81
+ # Convert Scrapy's headers to a HttpHeaders and store them in the apify_request
78
82
  if isinstance(scrapy_request.headers, Headers):
79
- apify_request.headers = dict(scrapy_request.headers.to_unicode_dict())
83
+ apify_request.headers = HttpHeaders(scrapy_request.headers.to_unicode_dict())
80
84
  else:
81
85
  Actor.log.warning(
82
86
  f'Invalid scrapy_request.headers type, not scrapy.http.headers.Headers: {scrapy_request.headers}'
@@ -98,6 +102,7 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
98
102
  return apify_request
99
103
 
100
104
 
105
+ @ignore_docs
101
106
  def to_scrapy_request(apify_request: CrawleeRequest, spider: Spider) -> Request:
102
107
  """Convert an Apify request to a Scrapy request.
103
108
 
@@ -160,13 +165,7 @@ def to_scrapy_request(apify_request: CrawleeRequest, spider: Spider) -> Request:
160
165
 
161
166
  # Add optional 'headers' field
162
167
  if apify_request.headers:
163
- if isinstance(cast(Any, apify_request.headers), dict):
164
- scrapy_request.headers = Headers(apify_request.headers)
165
- else:
166
- Actor.log.warning(
167
- 'apify_request[headers] is not an instance of the dict class, '
168
- f'apify_request[headers] = {apify_request.headers}',
169
- )
168
+ scrapy_request.headers |= Headers(apify_request.headers)
170
169
 
171
170
  # Add optional 'userData' field
172
171
  if apify_request.user_data:
apify/scrapy/utils.py CHANGED
@@ -4,6 +4,8 @@ import asyncio
4
4
  from base64 import b64encode
5
5
  from urllib.parse import unquote
6
6
 
7
+ from apify_shared.utils import ignore_docs
8
+
7
9
  try:
8
10
  from scrapy.settings import Settings # noqa: TCH002
9
11
  from scrapy.utils.project import get_project_settings
@@ -18,6 +20,7 @@ except ImportError as exc:
18
20
  nested_event_loop: asyncio.AbstractEventLoop = asyncio.new_event_loop()
19
21
 
20
22
 
23
+ @ignore_docs
21
24
  def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'latin-1') -> bytes:
22
25
  """Generate a basic authentication header for the given username and password."""
23
26
  string = f'{unquote(username)}:{unquote(password)}'
@@ -25,6 +28,7 @@ def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'la
25
28
  return b'Basic ' + b64encode(user_pass)
26
29
 
27
30
 
31
+ @ignore_docs
28
32
  def get_running_event_loop_id() -> int:
29
33
  """Get the ID of the currently running event loop.
30
34
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apify
3
- Version: 2.0.0b15
3
+ Version: 2.0.1
4
4
  Summary: Apify SDK for Python
5
5
  License: Apache-2.0
6
6
  Keywords: apify,sdk,automation,chrome,crawlee,crawler,headless,scraper,scraping
@@ -16,11 +16,12 @@ Classifier: Programming Language :: Python :: 3.9
16
16
  Classifier: Programming Language :: Python :: 3.10
17
17
  Classifier: Programming Language :: Python :: 3.11
18
18
  Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Programming Language :: Python :: 3.13
19
20
  Classifier: Topic :: Software Development :: Libraries
20
21
  Provides-Extra: scrapy
21
- Requires-Dist: apify-client (>=1.7.1)
22
+ Requires-Dist: apify-client (>=1.8.1)
22
23
  Requires-Dist: apify-shared (>=1.1.2)
23
- Requires-Dist: crawlee (>=0.3.5)
24
+ Requires-Dist: crawlee (>=0.3.8)
24
25
  Requires-Dist: cryptography (>=42.0.0)
25
26
  Requires-Dist: httpx (>=0.27.0)
26
27
  Requires-Dist: lazy-object-proxy (>=1.10.0)
@@ -1,22 +1,22 @@
1
1
  apify/__init__.py,sha256=ikoi2EpDYl6y-XSVtlU8UsdQdMEyOiIJCRRAaZFDOP8,550
2
2
  apify/_actor.py,sha256=oPgQ3rxxIEzVcZ9XtI3lf1a_6gwIMgxihNuYGjJpGww,41816
3
- apify/_configuration.py,sha256=gf7YOun32Whc9DamhoWDLmcUeNwtWVmmBPrl4oq6s4I,8997
3
+ apify/_configuration.py,sha256=n67sK9g8llsG8ZqIWhi1BEHliplGs5PTB6Ig7_0PaMU,9629
4
4
  apify/_consts.py,sha256=_Xq4hOfOA1iZ3n1P967YWdyncKivpbX6RTlp_qanUoE,330
5
- apify/_crypto.py,sha256=b4Czs1NLPkaNzkPjovObjSIbsKnRrgtBkM9JvOysUMA,5612
5
+ apify/_crypto.py,sha256=e0_aM3l9_5Osk-jszYOOjrAKK60OggSHbiw5c30QnsU,5638
6
6
  apify/_models.py,sha256=oYlTEr-DyQAE-V2rrYD5PhUxTXVPdAig7QV-u6CJw3E,5571
7
- apify/_platform_event_manager.py,sha256=h5fBmXtKD4t-yCdOSiLM1-DnCrIbGEmYmz2mOU3A8bA,7627
8
- apify/_proxy_configuration.py,sha256=VdKh_AyCwaCUlpCyaCe30L2S9OZ-vL1SN1g8oLwSeYA,13074
9
- apify/_utils.py,sha256=x4lnR9RNulySiEQTft-GeQqUcJsRr0k8p0Sv9NTeWFg,638
7
+ apify/_platform_event_manager.py,sha256=K4cHabbQ7_ex7vkX-c-VhAOp8Efw3HDn5Wp4lfA-qAU,7571
8
+ apify/_proxy_configuration.py,sha256=2z4VV_NrnIp6pDpgQKlKpcHM2pPyXiOpFedpPWje48A,13087
9
+ apify/_utils.py,sha256=Ghho3Gf11zYN8qhjhAkZRvQ--A9Js36GHB0YSDGUK58,694
10
10
  apify/apify_storage_client/__init__.py,sha256=-UbR68bFsDR6ln8OFs4t50eqcnY36hujO-SeOt-KmcA,114
11
11
  apify/apify_storage_client/_apify_storage_client.py,sha256=xi4OFchxhe-1-sykanH6Zcya4OcBhn2uf7OQ1pV4Ins,2338
12
12
  apify/apify_storage_client/_dataset_client.py,sha256=j9seF2OKvbSMD9R9XF9fpa1vtr_1w4JcRV--WCmvU4E,5501
13
13
  apify/apify_storage_client/_dataset_collection_client.py,sha256=fkYvYGQCigHD2CDzpWk0swNAkfvAinAhMGpYqllle3E,1445
14
14
  apify/apify_storage_client/_key_value_store_client.py,sha256=uyeQgb75sGFsqIS4sq4hEZ3QP81COLfS3tmTqHc0tso,3340
15
15
  apify/apify_storage_client/_key_value_store_collection_client.py,sha256=vCtMTI-jx89Qp5WHILDNkCthwLuv0MAwm1J_5E4aypU,1519
16
- apify/apify_storage_client/_request_queue_client.py,sha256=P8ws8jEzi2PWpp-cvYfV7kwuKbgH813BpNQ_wMSVtTA,6278
16
+ apify/apify_storage_client/_request_queue_client.py,sha256=jAiFkaJ38_myHFGTw-Rk21wmpbN0UCR2w2SFoimFGFc,5826
17
17
  apify/apify_storage_client/_request_queue_collection_client.py,sha256=NnO73UJ9ZrjV8xoudo30wfaM-SojRkG0guhxDyB-K1g,1527
18
18
  apify/apify_storage_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- apify/log.py,sha256=pX6ppIvds8OKqjFpIcshqG4zp_5DiOUU31ksyfSExto,1392
19
+ apify/log.py,sha256=zIVjrqQ1DNWNQQOAmdmR9oAbf4nJH7CSMB6u4OOUf6I,1448
20
20
  apify/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  apify/scrapy/__init__.py,sha256=qDPV_zTRFaUqoFOyS5g4uBfz-UCkmWYJ82VXQ_3Cw6k,348
22
22
  apify/scrapy/middlewares/__init__.py,sha256=tfW-d3WFWLeNEjL8fTmon6NwgD-OXx1Bw2fBdU-wPy4,114
@@ -26,12 +26,12 @@ apify/scrapy/pipelines/__init__.py,sha256=GWPeLN_Zwj8vRBWtXW6DaxdB7mvyQ7Jw5Tz1cc
26
26
  apify/scrapy/pipelines/actor_dataset_push.py,sha256=QERmmExQOGIKQ70-p-lCj5qyE-c-fnYplEqd4mgaB1Q,953
27
27
  apify/scrapy/pipelines/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  apify/scrapy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
- apify/scrapy/requests.py,sha256=pmm2M-cwrTXyI3t1nRBo9pS6nHfc4zkzS25-NXxzd9I,7637
29
+ apify/scrapy/requests.py,sha256=F4VNaX2fGqybJKbhcRcz0_m6dXse5LzKll4gtMuTRko,7480
30
30
  apify/scrapy/scheduler.py,sha256=AAIKY5i1QxkC1mtmix6n3M2eQaOw-d1T56Noue9xToc,6013
31
- apify/scrapy/utils.py,sha256=tz_Y8CTqe6KbyMMhLF3m7qqR46jtNH5U7Ty7e19roPU,2814
31
+ apify/scrapy/utils.py,sha256=0XdFxi1qlUa6gHXG96e1FU9gW0N5Rsu0sVZklFYfC2U,2884
32
32
  apify/storages/__init__.py,sha256=-9tEYJVabVs_eRVhUehxN58GH0UG8OfuGjGwuDieP2M,122
33
33
  apify/storages/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- apify-2.0.0b15.dist-info/LICENSE,sha256=AsFjHssKjj4LGd2ZCqXn6FBzMqcWdjQre1byPPSypVw,11355
35
- apify-2.0.0b15.dist-info/METADATA,sha256=Gx2P3qQWIGngjAoHLXcPuhF3PAe1wy-DdkxdfQWYEg4,8607
36
- apify-2.0.0b15.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
37
- apify-2.0.0b15.dist-info/RECORD,,
34
+ apify-2.0.1.dist-info/LICENSE,sha256=AsFjHssKjj4LGd2ZCqXn6FBzMqcWdjQre1byPPSypVw,11355
35
+ apify-2.0.1.dist-info/METADATA,sha256=yD0LEyfMGH3_ePU2-BCBtCJMwFwTOvUVOP730cXjrE0,8655
36
+ apify-2.0.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
37
+ apify-2.0.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
2
+ Generator: poetry-core 1.9.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any