apify 2.0.1b5__tar.gz → 2.0.1b6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. {apify-2.0.1b5 → apify-2.0.1b6}/PKG-INFO +1 -1
  2. {apify-2.0.1b5 → apify-2.0.1b6}/pyproject.toml +1 -1
  3. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_crypto.py +2 -0
  4. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_platform_event_manager.py +0 -2
  5. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_proxy_configuration.py +1 -0
  6. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/log.py +2 -0
  7. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/requests.py +4 -0
  8. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/utils.py +4 -0
  9. {apify-2.0.1b5 → apify-2.0.1b6}/LICENSE +0 -0
  10. {apify-2.0.1b5 → apify-2.0.1b6}/README.md +0 -0
  11. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/__init__.py +0 -0
  12. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_actor.py +0 -0
  13. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_configuration.py +0 -0
  14. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_consts.py +0 -0
  15. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_models.py +0 -0
  16. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/_utils.py +0 -0
  17. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/__init__.py +0 -0
  18. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/_apify_storage_client.py +0 -0
  19. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/_dataset_client.py +0 -0
  20. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/_dataset_collection_client.py +0 -0
  21. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/_key_value_store_client.py +0 -0
  22. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/_key_value_store_collection_client.py +0 -0
  23. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/_request_queue_client.py +0 -0
  24. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/_request_queue_collection_client.py +0 -0
  25. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/apify_storage_client/py.typed +0 -0
  26. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/py.typed +0 -0
  27. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/__init__.py +0 -0
  28. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/middlewares/__init__.py +0 -0
  29. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/middlewares/apify_proxy.py +0 -0
  30. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/middlewares/py.typed +0 -0
  31. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/pipelines/__init__.py +0 -0
  32. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/pipelines/actor_dataset_push.py +0 -0
  33. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/pipelines/py.typed +0 -0
  34. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/py.typed +0 -0
  35. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/scrapy/scheduler.py +0 -0
  36. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/storages/__init__.py +0 -0
  37. {apify-2.0.1b5 → apify-2.0.1b6}/src/apify/storages/py.typed +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apify
3
- Version: 2.0.1b5
3
+ Version: 2.0.1b6
4
4
  Summary: Apify SDK for Python
5
5
  License: Apache-2.0
6
6
  Keywords: apify,sdk,automation,chrome,crawlee,crawler,headless,scraper,scraping
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "apify"
7
- version = "2.0.1b5"
7
+ version = "2.0.1b6"
8
8
  description = "Apify SDK for Python"
9
9
  authors = ["Apify Technologies s.r.o. <support@apify.com>"]
10
10
  license = "Apache-2.0"
@@ -114,6 +114,7 @@ def private_decrypt(
114
114
  return decipher_bytes.decode('utf-8')
115
115
 
116
116
 
117
+ @ignore_docs
117
118
  def load_private_key(private_key_file_base64: str, private_key_password: str) -> rsa.RSAPrivateKey:
118
119
  private_key = serialization.load_pem_private_key(
119
120
  base64.b64decode(private_key_file_base64.encode('utf-8')),
@@ -133,6 +134,7 @@ def _load_public_key(public_key_file_base64: str) -> rsa.RSAPublicKey:
133
134
  return public_key
134
135
 
135
136
 
137
+ @ignore_docs
136
138
  def decrypt_input_secrets(private_key: rsa.RSAPrivateKey, input_data: Any) -> Any:
137
139
  """Decrypt input secrets."""
138
140
  if not isinstance(input_data, dict):
@@ -8,7 +8,6 @@ import websockets.client
8
8
  from pydantic import BaseModel, Discriminator, Field, TypeAdapter
9
9
  from typing_extensions import Self, Unpack, override
10
10
 
11
- from apify_shared.utils import ignore_docs
12
11
  from crawlee.events._event_manager import EventManager, EventManagerOptions
13
12
  from crawlee.events._local_event_manager import LocalEventManager
14
13
  from crawlee.events._types import (
@@ -126,7 +125,6 @@ event_data_adapter: TypeAdapter[EventMessage | DeprecatedEvent | UnknownEvent] =
126
125
  )
127
126
 
128
127
 
129
- @ignore_docs
130
128
  class PlatformEventManager(EventManager):
131
129
  """A class for managing Actor events.
132
130
 
@@ -27,6 +27,7 @@ COUNTRY_CODE_REGEX = re.compile(r'^[A-Z]{2}$')
27
27
  SESSION_ID_MAX_LENGTH = 50
28
28
 
29
29
 
30
+ @ignore_docs
30
31
  def is_url(url: str) -> bool:
31
32
  """Check if the given string is a valid URL."""
32
33
  try:
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import logging
4
4
  from typing import TYPE_CHECKING
5
5
 
6
+ from apify_shared.utils import ignore_docs
6
7
  from crawlee._log_config import CrawleeLogFormatter, configure_logger, get_configured_log_level
7
8
 
8
9
  if TYPE_CHECKING:
@@ -15,6 +16,7 @@ logger_name = __name__.split('.')[0]
15
16
  logger = logging.getLogger(logger_name)
16
17
 
17
18
 
19
+ @ignore_docs
18
20
  class ActorLogFormatter(CrawleeLogFormatter): # noqa: D101 Inherited from parent class
19
21
  pass
20
22
 
@@ -4,6 +4,8 @@ import codecs
4
4
  import pickle
5
5
  from typing import Any, cast
6
6
 
7
+ from apify_shared.utils import ignore_docs
8
+
7
9
  try:
8
10
  from scrapy import Request, Spider
9
11
  from scrapy.http.headers import Headers
@@ -28,6 +30,7 @@ def _is_request_produced_by_middleware(scrapy_request: Request) -> bool:
28
30
  return bool(scrapy_request.meta.get('redirect_times')) or bool(scrapy_request.meta.get('retry_times'))
29
31
 
30
32
 
33
+ @ignore_docs
31
34
  def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest | None:
32
35
  """Convert a Scrapy request to an Apify request.
33
36
 
@@ -98,6 +101,7 @@ def to_apify_request(scrapy_request: Request, spider: Spider) -> CrawleeRequest
98
101
  return apify_request
99
102
 
100
103
 
104
+ @ignore_docs
101
105
  def to_scrapy_request(apify_request: CrawleeRequest, spider: Spider) -> Request:
102
106
  """Convert an Apify request to a Scrapy request.
103
107
 
@@ -4,6 +4,8 @@ import asyncio
4
4
  from base64 import b64encode
5
5
  from urllib.parse import unquote
6
6
 
7
+ from apify_shared.utils import ignore_docs
8
+
7
9
  try:
8
10
  from scrapy.settings import Settings # noqa: TCH002
9
11
  from scrapy.utils.project import get_project_settings
@@ -18,6 +20,7 @@ except ImportError as exc:
18
20
  nested_event_loop: asyncio.AbstractEventLoop = asyncio.new_event_loop()
19
21
 
20
22
 
23
+ @ignore_docs
21
24
  def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'latin-1') -> bytes:
22
25
  """Generate a basic authentication header for the given username and password."""
23
26
  string = f'{unquote(username)}:{unquote(password)}'
@@ -25,6 +28,7 @@ def get_basic_auth_header(username: str, password: str, auth_encoding: str = 'la
25
28
  return b'Basic ' + b64encode(user_pass)
26
29
 
27
30
 
31
+ @ignore_docs
28
32
  def get_running_event_loop_id() -> int:
29
33
  """Get the ID of the currently running event loop.
30
34
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes