apify 2.0.1__py3-none-any.whl → 2.0.1b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apify might be problematic. Click here for more details.

apify/_configuration.py CHANGED
@@ -5,10 +5,8 @@ from datetime import datetime, timedelta
5
5
  from typing import Annotated
6
6
 
7
7
  from pydantic import AliasChoices, BeforeValidator, Field
8
- from typing_extensions import deprecated
9
8
 
10
9
  from crawlee._utils.models import timedelta_ms
11
- from crawlee._utils.urls import validate_http_url
12
10
  from crawlee.configuration import Configuration as CrawleeConfiguration
13
11
 
14
12
 
@@ -162,16 +160,15 @@ class Configuration(CrawleeConfiguration):
162
160
  str | None,
163
161
  Field(
164
162
  alias='apify_sdk_latest_version',
163
+ deprecated=True,
165
164
  description='Specifies the most recent release version of the Apify SDK for Javascript. Used for '
166
165
  'checking for updates.',
167
166
  ),
168
- deprecated('SDK version checking is not supported for the Python SDK'),
169
167
  ] = None
170
168
 
171
169
  log_format: Annotated[
172
170
  str | None,
173
- Field(alias='apify_log_format'),
174
- deprecated('Adjust the log format in code instead'),
171
+ Field(alias='apify_log_format', deprecated=True),
175
172
  ] = None
176
173
 
177
174
  max_paid_dataset_items: Annotated[
@@ -251,7 +248,6 @@ class Configuration(CrawleeConfiguration):
251
248
  ),
252
249
  description='Date when the Actor will time out',
253
250
  ),
254
- BeforeValidator(lambda val: val if val != '' else None), # We should accept empty environment variables as well
255
251
  ] = None
256
252
 
257
253
  standby_port: Annotated[
@@ -260,17 +256,7 @@ class Configuration(CrawleeConfiguration):
260
256
  alias='actor_standby_port',
261
257
  description='TCP port for the Actor to start an HTTP server to receive messages in the Actor Standby mode',
262
258
  ),
263
- deprecated('Use `web_server_port` instead'),
264
- ] = 4321
265
-
266
- standby_url: Annotated[
267
- str,
268
- BeforeValidator(validate_http_url),
269
- Field(
270
- alias='actor_standby_url',
271
- description='URL for accessing web servers of Actor runs in Standby mode',
272
- ),
273
- ] = 'http://localhost'
259
+ ] = 4322
274
260
 
275
261
  token: Annotated[
276
262
  str | None,
@@ -307,7 +293,7 @@ class Configuration(CrawleeConfiguration):
307
293
  'actor_web_server_url',
308
294
  'apify_container_url',
309
295
  ),
310
- description='Unique public URL for accessing a specific Actor run web server from the outside world',
296
+ description='Unique public URL for accessing the Actor run web server from the outside world',
311
297
  ),
312
298
  ] = 'http://localhost:4321'
313
299
 
apify/_crypto.py CHANGED
@@ -114,7 +114,6 @@ def private_decrypt(
114
114
  return decipher_bytes.decode('utf-8')
115
115
 
116
116
 
117
- @ignore_docs
118
117
  def load_private_key(private_key_file_base64: str, private_key_password: str) -> rsa.RSAPrivateKey:
119
118
  private_key = serialization.load_pem_private_key(
120
119
  base64.b64decode(private_key_file_base64.encode('utf-8')),
@@ -134,7 +133,6 @@ def _load_public_key(public_key_file_base64: str) -> rsa.RSAPublicKey:
134
133
  return public_key
135
134
 
136
135
 
137
- @ignore_docs
138
136
  def decrypt_input_secrets(private_key: rsa.RSAPrivateKey, input_data: Any) -> Any:
139
137
  """Decrypt input secrets."""
140
138
  if not isinstance(input_data, dict):
@@ -8,6 +8,7 @@ import websockets.client
8
8
  from pydantic import BaseModel, Discriminator, Field, TypeAdapter
9
9
  from typing_extensions import Self, Unpack, override
10
10
 
11
+ from apify_shared.utils import ignore_docs
11
12
  from crawlee.events._event_manager import EventManager, EventManagerOptions
12
13
  from crawlee.events._local_event_manager import LocalEventManager
13
14
  from crawlee.events._types import (
@@ -125,6 +126,7 @@ event_data_adapter: TypeAdapter[EventMessage | DeprecatedEvent | UnknownEvent] =
125
126
  )
126
127
 
127
128
 
129
+ @ignore_docs
128
130
  class PlatformEventManager(EventManager):
129
131
  """A class for managing Actor events.
130
132
 
@@ -27,7 +27,6 @@ COUNTRY_CODE_REGEX = re.compile(r'^[A-Z]{2}$')
27
27
  SESSION_ID_MAX_LENGTH = 50
28
28
 
29
29
 
30
- @ignore_docs
31
30
  def is_url(url: str) -> bool:
32
31
  """Check if the given string is a valid URL."""
33
32
  try:
apify/_utils.py CHANGED
@@ -11,7 +11,6 @@ def get_system_info() -> dict:
11
11
  system_info: dict[str, str | bool] = {
12
12
  'apify_sdk_version': metadata.version('apify'),
13
13
  'apify_client_version': metadata.version('apify-client'),
14
- 'crawlee_version': metadata.version('crawlee'),
15
14
  'python_version': python_version,
16
15
  'os': sys.platform,
17
16
  }
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING
4
4
 
5
+ from more_itertools import chunked
5
6
  from typing_extensions import override
6
7
 
7
8
  from crawlee import Request
@@ -157,8 +158,11 @@ class RequestQueueClient(BaseRequestQueueClient):
157
158
  *,
158
159
  forefront: bool = False,
159
160
  ) -> BatchRequestsOperationResponse:
160
- return BatchRequestsOperationResponse.model_validate(
161
- await self._client.batch_add_requests(
161
+ processed = []
162
+ unprocessed = []
163
+
164
+ for chunk in chunked(requests, 25): # The API endpoint won't accept more than 25 requests at once
165
+ response = await self._client.batch_add_requests(
162
166
  requests=[
163
167
  r.model_dump(
164
168
  by_alias=True,
@@ -170,10 +174,18 @@ class RequestQueueClient(BaseRequestQueueClient):
170
174
  'data',
171
175
  },
172
176
  )
173
- for r in requests
177
+ for r in chunk
174
178
  ],
175
179
  forefront=forefront,
176
180
  )
181
+ processed.extend(response['processedRequests'])
182
+ unprocessed.extend(response['unprocessedRequests'])
183
+
184
+ return BatchRequestsOperationResponse.model_validate(
185
+ {
186
+ 'processedRequests': processed,
187
+ 'unprocessedRequests': unprocessed,
188
+ }
177
189
  )
178
190
 
179
191
  @override
apify/log.py CHANGED
@@ -3,7 +3,6 @@ from __future__ import annotations
3
3
  import logging
4
4
  from typing import TYPE_CHECKING
5
5
 
6
- from apify_shared.utils import ignore_docs
7
6
  from crawlee._log_config import CrawleeLogFormatter, configure_logger, get_configured_log_level
8
7
 
9
8
  if TYPE_CHECKING:
@@ -16,7 +15,6 @@ logger_name = __name__.split('.')[0]
16
15
  logger = logging.getLogger(logger_name)
17
16
 
18
17
 
19
- @ignore_docs
20
18
  class ActorLogFormatter(CrawleeLogFormatter): # noqa: D101 Inherited from parent class
21
19
  pass
22
20
 
apify/scrapy/requests.py CHANGED
@@ -4,8 +4,6 @@ import codecs
4
4
  import pickle
5
5
  from typing import Any, cast
6
6
 
7
- from apify_shared.utils import ignore_docs
8
-
9
7
  try:
10
8
  from scrapy import Request, Spider
11
9
  from scrapy.http.headers import Headers
@@ -16,7 +14,6 @@ except ImportError as exc:
16
14
  ) from exc
17
15
 
18
16
  from crawlee import Request as CrawleeRequest
19
- from crawlee._types import HttpHeaders
20
17
  from crawlee._utils.crypto import crypto_random_object_id
21
18
  from crawlee._utils.requests import compute_unique_key, unique_key_to_request_id
22
19
 
@@ -31,7 +28,6 @@ def _is_request_produced_by_middleware(scrapy_request: Request) -> bool:
31
28
  return bool(scrapy_request.meta.get('redirect_times')) or bool(scrapy_request.meta.get('retry_times'))
32
29
 
33
30
 
34
- @ignore_docs
35
31
  def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest | None:
36
32
  """Convert a Scrapy request to an Apify request.
37
33
 
@@ -78,9 +74,9 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
78
74
  id=request_id,
79
75
  )
80
76
 
81
- # Convert Scrapy's headers to a HttpHeaders and store them in the apify_request
77
+ # Convert Scrapy's headers to a dictionary and store them in the apify_request
82
78
  if isinstance(scrapy_request.headers, Headers):
83
- apify_request.headers = HttpHeaders(scrapy_request.headers.to_unicode_dict())
79
+ apify_request.headers = dict(scrapy_request.headers.to_unicode_dict())
84
80
  else:
85
81
  Actor.log.warning(
86
82
  f'Invalid scrapy_request.headers type, not scrapy.http.headers.Headers: {scrapy_request.headers}'
@@ -102,7 +98,6 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
102
98
  return apify_request
103
99
 
104
100
 
105
- @ignore_docs
106
101
  def to_scrapy_request(apify_request: CrawleeRequest, spider: Spider) -> Request:
107
102
  """Convert an Apify request to a Scrapy request.
108
103
 
@@ -165,7 +160,13 @@ def to_scrapy_request(apify_request: CrawleeRequest, spider: Spider) -> Request:
165
160
 
166
161
  # Add optional 'headers' field
167
162
  if apify_request.headers:
168
- scrapy_request.headers |= Headers(apify_request.headers)
163
+ if isinstance(cast(Any, apify_request.headers), dict):
164
+ scrapy_request.headers = Headers(apify_request.headers)
165
+ else:
166
+ Actor.log.warning(
167
+ 'apify_request[headers] is not an instance of the dict class, '
168
+ f'apify_request[headers] = {apify_request.headers}',
169
+ )
169
170
 
170
171
  # Add optional 'userData' field
171
172
  if apify_request.user_data:
apify/scrapy/utils.py CHANGED
@@ -4,8 +4,6 @@ import asyncio
4
4
  from base64 import b64encode
5
5
  from urllib.parse import unquote
6
6
 
7
- from apify_shared.utils import ignore_docs
8
-
9
7
  try:
10
8
  from scrapy.settings import Settings # noqa: TCH002
11
9
  from scrapy.utils.project import get_project_settings
@@ -20,7 +18,6 @@ except ImportError as exc:
20
18
  nested_event_loop: asyncio.AbstractEventLoop = asyncio.new_event_loop()
21
19
 
22
20
 
23
- @ignore_docs
24
21
  def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'latin-1') -> bytes:
25
22
  """Generate a basic authentication header for the given username and password."""
26
23
  string = f'{unquote(username)}:{unquote(password)}'
@@ -28,7 +25,6 @@ def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'la
28
25
  return b'Basic ' + b64encode(user_pass)
29
26
 
30
27
 
31
- @ignore_docs
32
28
  def get_running_event_loop_id() -> int:
33
29
  """Get the ID of the currently running event loop.
34
30
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apify
3
- Version: 2.0.1
3
+ Version: 2.0.1b1
4
4
  Summary: Apify SDK for Python
5
5
  License: Apache-2.0
6
6
  Keywords: apify,sdk,automation,chrome,crawlee,crawler,headless,scraper,scraping
@@ -16,12 +16,11 @@ Classifier: Programming Language :: Python :: 3.9
16
16
  Classifier: Programming Language :: Python :: 3.10
17
17
  Classifier: Programming Language :: Python :: 3.11
18
18
  Classifier: Programming Language :: Python :: 3.12
19
- Classifier: Programming Language :: Python :: 3.13
20
19
  Classifier: Topic :: Software Development :: Libraries
21
20
  Provides-Extra: scrapy
22
- Requires-Dist: apify-client (>=1.8.1)
21
+ Requires-Dist: apify-client (>=1.7.1)
23
22
  Requires-Dist: apify-shared (>=1.1.2)
24
- Requires-Dist: crawlee (>=0.3.8)
23
+ Requires-Dist: crawlee (>=0.3.5)
25
24
  Requires-Dist: cryptography (>=42.0.0)
26
25
  Requires-Dist: httpx (>=0.27.0)
27
26
  Requires-Dist: lazy-object-proxy (>=1.10.0)
@@ -1,22 +1,22 @@
1
1
  apify/__init__.py,sha256=ikoi2EpDYl6y-XSVtlU8UsdQdMEyOiIJCRRAaZFDOP8,550
2
2
  apify/_actor.py,sha256=oPgQ3rxxIEzVcZ9XtI3lf1a_6gwIMgxihNuYGjJpGww,41816
3
- apify/_configuration.py,sha256=n67sK9g8llsG8ZqIWhi1BEHliplGs5PTB6Ig7_0PaMU,9629
3
+ apify/_configuration.py,sha256=gf7YOun32Whc9DamhoWDLmcUeNwtWVmmBPrl4oq6s4I,8997
4
4
  apify/_consts.py,sha256=_Xq4hOfOA1iZ3n1P967YWdyncKivpbX6RTlp_qanUoE,330
5
- apify/_crypto.py,sha256=e0_aM3l9_5Osk-jszYOOjrAKK60OggSHbiw5c30QnsU,5638
5
+ apify/_crypto.py,sha256=b4Czs1NLPkaNzkPjovObjSIbsKnRrgtBkM9JvOysUMA,5612
6
6
  apify/_models.py,sha256=oYlTEr-DyQAE-V2rrYD5PhUxTXVPdAig7QV-u6CJw3E,5571
7
- apify/_platform_event_manager.py,sha256=K4cHabbQ7_ex7vkX-c-VhAOp8Efw3HDn5Wp4lfA-qAU,7571
8
- apify/_proxy_configuration.py,sha256=2z4VV_NrnIp6pDpgQKlKpcHM2pPyXiOpFedpPWje48A,13087
9
- apify/_utils.py,sha256=Ghho3Gf11zYN8qhjhAkZRvQ--A9Js36GHB0YSDGUK58,694
7
+ apify/_platform_event_manager.py,sha256=h5fBmXtKD4t-yCdOSiLM1-DnCrIbGEmYmz2mOU3A8bA,7627
8
+ apify/_proxy_configuration.py,sha256=VdKh_AyCwaCUlpCyaCe30L2S9OZ-vL1SN1g8oLwSeYA,13074
9
+ apify/_utils.py,sha256=x4lnR9RNulySiEQTft-GeQqUcJsRr0k8p0Sv9NTeWFg,638
10
10
  apify/apify_storage_client/__init__.py,sha256=-UbR68bFsDR6ln8OFs4t50eqcnY36hujO-SeOt-KmcA,114
11
11
  apify/apify_storage_client/_apify_storage_client.py,sha256=xi4OFchxhe-1-sykanH6Zcya4OcBhn2uf7OQ1pV4Ins,2338
12
12
  apify/apify_storage_client/_dataset_client.py,sha256=j9seF2OKvbSMD9R9XF9fpa1vtr_1w4JcRV--WCmvU4E,5501
13
13
  apify/apify_storage_client/_dataset_collection_client.py,sha256=fkYvYGQCigHD2CDzpWk0swNAkfvAinAhMGpYqllle3E,1445
14
14
  apify/apify_storage_client/_key_value_store_client.py,sha256=uyeQgb75sGFsqIS4sq4hEZ3QP81COLfS3tmTqHc0tso,3340
15
15
  apify/apify_storage_client/_key_value_store_collection_client.py,sha256=vCtMTI-jx89Qp5WHILDNkCthwLuv0MAwm1J_5E4aypU,1519
16
- apify/apify_storage_client/_request_queue_client.py,sha256=jAiFkaJ38_myHFGTw-Rk21wmpbN0UCR2w2SFoimFGFc,5826
16
+ apify/apify_storage_client/_request_queue_client.py,sha256=P8ws8jEzi2PWpp-cvYfV7kwuKbgH813BpNQ_wMSVtTA,6278
17
17
  apify/apify_storage_client/_request_queue_collection_client.py,sha256=NnO73UJ9ZrjV8xoudo30wfaM-SojRkG0guhxDyB-K1g,1527
18
18
  apify/apify_storage_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- apify/log.py,sha256=zIVjrqQ1DNWNQQOAmdmR9oAbf4nJH7CSMB6u4OOUf6I,1448
19
+ apify/log.py,sha256=pX6ppIvds8OKqjFpIcshqG4zp_5DiOUU31ksyfSExto,1392
20
20
  apify/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  apify/scrapy/__init__.py,sha256=qDPV_zTRFaUqoFOyS5g4uBfz-UCkmWYJ82VXQ_3Cw6k,348
22
22
  apify/scrapy/middlewares/__init__.py,sha256=tfW-d3WFWLeNEjL8fTmon6NwgD-OXx1Bw2fBdU-wPy4,114
@@ -26,12 +26,12 @@ apify/scrapy/pipelines/__init__.py,sha256=GWPeLN_Zwj8vRBWtXW6DaxdB7mvyQ7Jw5Tz1cc
26
26
  apify/scrapy/pipelines/actor_dataset_push.py,sha256=QERmmExQOGIKQ70-p-lCj5qyE-c-fnYplEqd4mgaB1Q,953
27
27
  apify/scrapy/pipelines/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  apify/scrapy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
- apify/scrapy/requests.py,sha256=F4VNaX2fGqybJKbhcRcz0_m6dXse5LzKll4gtMuTRko,7480
29
+ apify/scrapy/requests.py,sha256=pmm2M-cwrTXyI3t1nRBo9pS6nHfc4zkzS25-NXxzd9I,7637
30
30
  apify/scrapy/scheduler.py,sha256=AAIKY5i1QxkC1mtmix6n3M2eQaOw-d1T56Noue9xToc,6013
31
- apify/scrapy/utils.py,sha256=0XdFxi1qlUa6gHXG96e1FU9gW0N5Rsu0sVZklFYfC2U,2884
31
+ apify/scrapy/utils.py,sha256=tz_Y8CTqe6KbyMMhLF3m7qqR46jtNH5U7Ty7e19roPU,2814
32
32
  apify/storages/__init__.py,sha256=-9tEYJVabVs_eRVhUehxN58GH0UG8OfuGjGwuDieP2M,122
33
33
  apify/storages/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- apify-2.0.1.dist-info/LICENSE,sha256=AsFjHssKjj4LGd2ZCqXn6FBzMqcWdjQre1byPPSypVw,11355
35
- apify-2.0.1.dist-info/METADATA,sha256=yD0LEyfMGH3_ePU2-BCBtCJMwFwTOvUVOP730cXjrE0,8655
36
- apify-2.0.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
37
- apify-2.0.1.dist-info/RECORD,,
34
+ apify-2.0.1b1.dist-info/LICENSE,sha256=AsFjHssKjj4LGd2ZCqXn6FBzMqcWdjQre1byPPSypVw,11355
35
+ apify-2.0.1b1.dist-info/METADATA,sha256=GPwXUjHARouYHQTkQ8HCOgMn-rJcf8taaugNu5aoVkA,8606
36
+ apify-2.0.1b1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
37
+ apify-2.0.1b1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.1
2
+ Generator: poetry-core 1.9.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any