apify 2.3.1b2__py3-none-any.whl → 2.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apify might be problematic. Click here for more details.
- apify/_actor.py +40 -11
- apify/_crypto.py +38 -0
- apify/_platform_event_manager.py +3 -3
- apify/apify_storage_client/_apify_storage_client.py +3 -3
- apify/apify_storage_client/_dataset_client.py +1 -1
- apify/apify_storage_client/_dataset_collection_client.py +1 -1
- apify/apify_storage_client/_key_value_store_client.py +18 -3
- apify/apify_storage_client/_key_value_store_collection_client.py +1 -1
- apify/apify_storage_client/_request_queue_client.py +1 -1
- apify/apify_storage_client/_request_queue_collection_client.py +1 -1
- apify/storages/_request_list.py +4 -4
- {apify-2.3.1b2.dist-info → apify-2.4.0.dist-info}/METADATA +4 -4
- {apify-2.3.1b2.dist-info → apify-2.4.0.dist-info}/RECORD +15 -15
- {apify-2.3.1b2.dist-info → apify-2.4.0.dist-info}/WHEEL +0 -0
- {apify-2.3.1b2.dist-info → apify-2.4.0.dist-info}/licenses/LICENSE +0 -0
apify/_actor.py
CHANGED
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import os
|
|
5
5
|
import sys
|
|
6
|
+
from contextlib import suppress
|
|
6
7
|
from datetime import timedelta
|
|
7
8
|
from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, cast, overload
|
|
8
9
|
|
|
@@ -43,7 +44,7 @@ if TYPE_CHECKING:
|
|
|
43
44
|
from typing_extensions import Self
|
|
44
45
|
|
|
45
46
|
from crawlee.proxy_configuration import _NewUrlFunction
|
|
46
|
-
from crawlee.storage_clients import
|
|
47
|
+
from crawlee.storage_clients import StorageClient
|
|
47
48
|
|
|
48
49
|
from apify._models import Webhook
|
|
49
50
|
|
|
@@ -64,6 +65,7 @@ class _ActorType:
|
|
|
64
65
|
configuration: Configuration | None = None,
|
|
65
66
|
*,
|
|
66
67
|
configure_logging: bool = True,
|
|
68
|
+
exit_process: bool | None = None,
|
|
67
69
|
) -> None:
|
|
68
70
|
"""Create an Actor instance.
|
|
69
71
|
|
|
@@ -74,7 +76,10 @@ class _ActorType:
|
|
|
74
76
|
configuration: The Actor configuration to be used. If not passed, a new Configuration instance will
|
|
75
77
|
be created.
|
|
76
78
|
configure_logging: Should the default logging configuration be configured?
|
|
79
|
+
exit_process: Whether the Actor should call `sys.exit` when the context manager exits. The default is
|
|
80
|
+
True except for the IPython, Pytest and Scrapy environments.
|
|
77
81
|
"""
|
|
82
|
+
self._exit_process = self._get_default_exit_process() if exit_process is None else exit_process
|
|
78
83
|
self._is_exiting = False
|
|
79
84
|
|
|
80
85
|
self._configuration = configuration or Configuration.get_global_configuration()
|
|
@@ -141,9 +146,19 @@ class _ActorType:
|
|
|
141
146
|
|
|
142
147
|
return super().__repr__()
|
|
143
148
|
|
|
144
|
-
def __call__(
|
|
149
|
+
def __call__(
|
|
150
|
+
self,
|
|
151
|
+
configuration: Configuration | None = None,
|
|
152
|
+
*,
|
|
153
|
+
configure_logging: bool = True,
|
|
154
|
+
exit_process: bool | None = None,
|
|
155
|
+
) -> Self:
|
|
145
156
|
"""Make a new Actor instance with a non-default configuration."""
|
|
146
|
-
return self.__class__(
|
|
157
|
+
return self.__class__(
|
|
158
|
+
configuration=configuration,
|
|
159
|
+
configure_logging=configure_logging,
|
|
160
|
+
exit_process=exit_process,
|
|
161
|
+
)
|
|
147
162
|
|
|
148
163
|
@property
|
|
149
164
|
def apify_client(self) -> ApifyClientAsync:
|
|
@@ -171,7 +186,7 @@ class _ActorType:
|
|
|
171
186
|
return logger
|
|
172
187
|
|
|
173
188
|
@property
|
|
174
|
-
def _local_storage_client(self) ->
|
|
189
|
+
def _local_storage_client(self) -> StorageClient:
|
|
175
190
|
"""The local storage client the Actor instance uses."""
|
|
176
191
|
return service_locator.get_storage_client()
|
|
177
192
|
|
|
@@ -281,13 +296,7 @@ class _ActorType:
|
|
|
281
296
|
await asyncio.wait_for(finalize(), cleanup_timeout.total_seconds())
|
|
282
297
|
self._is_initialized = False
|
|
283
298
|
|
|
284
|
-
if
|
|
285
|
-
self.log.debug(f'Not calling sys.exit({exit_code}) because Actor is running in IPython')
|
|
286
|
-
elif os.getenv('PYTEST_CURRENT_TEST', default=False): # noqa: PLW1508
|
|
287
|
-
self.log.debug(f'Not calling sys.exit({exit_code}) because Actor is running in an unit test')
|
|
288
|
-
elif os.getenv('SCRAPY_SETTINGS_MODULE'):
|
|
289
|
-
self.log.debug(f'Not calling sys.exit({exit_code}) because Actor is running with Scrapy')
|
|
290
|
-
else:
|
|
299
|
+
if self._exit_process:
|
|
291
300
|
sys.exit(exit_code)
|
|
292
301
|
|
|
293
302
|
async def fail(
|
|
@@ -1128,6 +1137,26 @@ class _ActorType:
|
|
|
1128
1137
|
|
|
1129
1138
|
return proxy_configuration
|
|
1130
1139
|
|
|
1140
|
+
def _get_default_exit_process(self) -> bool:
|
|
1141
|
+
"""Returns False for IPython, Pytest, and Scrapy environments, True otherwise."""
|
|
1142
|
+
if is_running_in_ipython():
|
|
1143
|
+
self.log.debug('Running in IPython, setting default `exit_process` to False.')
|
|
1144
|
+
return False
|
|
1145
|
+
|
|
1146
|
+
# Check if running in Pytest by detecting the relevant environment variable.
|
|
1147
|
+
if os.getenv('PYTEST_CURRENT_TEST'):
|
|
1148
|
+
self.log.debug('Running in Pytest, setting default `exit_process` to False.')
|
|
1149
|
+
return False
|
|
1150
|
+
|
|
1151
|
+
# Check if running in Scrapy by attempting to import it.
|
|
1152
|
+
with suppress(ImportError):
|
|
1153
|
+
import scrapy # noqa: F401
|
|
1154
|
+
|
|
1155
|
+
self.log.debug('Running in Scrapy, setting default `exit_process` to False.')
|
|
1156
|
+
return False
|
|
1157
|
+
|
|
1158
|
+
return True
|
|
1159
|
+
|
|
1131
1160
|
|
|
1132
1161
|
Actor = cast(_ActorType, Proxy(_ActorType))
|
|
1133
1162
|
"""The entry point of the SDK, through which all the Actor operations should be done."""
|
apify/_crypto.py
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import base64
|
|
4
|
+
import hashlib
|
|
5
|
+
import hmac
|
|
6
|
+
import string
|
|
4
7
|
from typing import Any
|
|
5
8
|
|
|
6
9
|
from cryptography.exceptions import InvalidTag as InvalidTagException
|
|
@@ -153,3 +156,38 @@ def decrypt_input_secrets(private_key: rsa.RSAPrivateKey, input_data: Any) -> An
|
|
|
153
156
|
)
|
|
154
157
|
|
|
155
158
|
return input_data
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
CHARSET = string.digits + string.ascii_letters
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def encode_base62(num: int) -> str:
|
|
165
|
+
"""Encode the given number to base62."""
|
|
166
|
+
if num == 0:
|
|
167
|
+
return CHARSET[0]
|
|
168
|
+
|
|
169
|
+
res = ''
|
|
170
|
+
while num > 0:
|
|
171
|
+
num, remainder = divmod(num, 62)
|
|
172
|
+
res = CHARSET[remainder] + res
|
|
173
|
+
return res
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@ignore_docs
|
|
177
|
+
def create_hmac_signature(secret_key: str, message: str) -> str:
|
|
178
|
+
"""Generate an HMAC signature and encodes it using Base62. Base62 encoding reduces the signature length.
|
|
179
|
+
|
|
180
|
+
HMAC signature is truncated to 30 characters to make it shorter.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
secret_key: Secret key used for signing signatures.
|
|
184
|
+
message: Message to be signed.
|
|
185
|
+
|
|
186
|
+
Returns:
|
|
187
|
+
Base62 encoded signature.
|
|
188
|
+
"""
|
|
189
|
+
signature = hmac.new(secret_key.encode('utf-8'), message.encode('utf-8'), hashlib.sha256).hexdigest()[:30]
|
|
190
|
+
|
|
191
|
+
decimal_signature = int(signature, 16)
|
|
192
|
+
|
|
193
|
+
return encode_base62(decimal_signature)
|
apify/_platform_event_manager.py
CHANGED
|
@@ -4,7 +4,7 @@ import asyncio
|
|
|
4
4
|
from datetime import datetime
|
|
5
5
|
from typing import TYPE_CHECKING, Annotated, Any, Literal, Union
|
|
6
6
|
|
|
7
|
-
import websockets.client
|
|
7
|
+
import websockets.asyncio.client
|
|
8
8
|
from pydantic import BaseModel, Discriminator, Field, TypeAdapter
|
|
9
9
|
from typing_extensions import Self, Unpack, override
|
|
10
10
|
|
|
@@ -143,7 +143,7 @@ class PlatformEventManager(EventManager):
|
|
|
143
143
|
but instead use it via the `Actor.on()` and `Actor.off()` methods.
|
|
144
144
|
"""
|
|
145
145
|
|
|
146
|
-
_platform_events_websocket: websockets.client.
|
|
146
|
+
_platform_events_websocket: websockets.asyncio.client.ClientConnection | None = None
|
|
147
147
|
_process_platform_messages_task: asyncio.Task | None = None
|
|
148
148
|
_send_system_info_interval_task: asyncio.Task | None = None
|
|
149
149
|
_connected_to_platform_websocket: asyncio.Future = asyncio.Future()
|
|
@@ -196,7 +196,7 @@ class PlatformEventManager(EventManager):
|
|
|
196
196
|
|
|
197
197
|
async def _process_platform_messages(self, ws_url: str) -> None:
|
|
198
198
|
try:
|
|
199
|
-
async with websockets.client.connect(ws_url) as websocket:
|
|
199
|
+
async with websockets.asyncio.client.connect(ws_url) as websocket:
|
|
200
200
|
self._platform_events_websocket = websocket
|
|
201
201
|
self._connected_to_platform_websocket.set_result(True)
|
|
202
202
|
|
|
@@ -6,7 +6,7 @@ from typing_extensions import override
|
|
|
6
6
|
|
|
7
7
|
from apify_client import ApifyClientAsync
|
|
8
8
|
from crawlee._utils.crypto import crypto_random_object_id
|
|
9
|
-
from crawlee.storage_clients import
|
|
9
|
+
from crawlee.storage_clients import StorageClient
|
|
10
10
|
|
|
11
11
|
from apify._utils import docs_group
|
|
12
12
|
from apify.apify_storage_client._dataset_client import DatasetClient
|
|
@@ -21,7 +21,7 @@ if TYPE_CHECKING:
|
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
@docs_group('Classes')
|
|
24
|
-
class ApifyStorageClient(
|
|
24
|
+
class ApifyStorageClient(StorageClient):
|
|
25
25
|
"""A storage client implementation based on the Apify platform storage."""
|
|
26
26
|
|
|
27
27
|
def __init__(self, *, configuration: Configuration) -> None:
|
|
@@ -68,5 +68,5 @@ class ApifyStorageClient(BaseStorageClient):
|
|
|
68
68
|
pass
|
|
69
69
|
|
|
70
70
|
@override
|
|
71
|
-
def get_rate_limit_errors(self) -> dict[int, int]:
|
|
71
|
+
def get_rate_limit_errors(self) -> dict[int, int]:
|
|
72
72
|
return self._apify_client.stats.rate_limit_errors
|
|
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING
|
|
|
4
4
|
|
|
5
5
|
from typing_extensions import override
|
|
6
6
|
|
|
7
|
-
from crawlee.storage_clients._base import BaseDatasetClient
|
|
7
|
+
from crawlee.storage_clients._base import DatasetClient as BaseDatasetClient
|
|
8
8
|
from crawlee.storage_clients.models import DatasetItemsListPage, DatasetMetadata
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING
|
|
|
4
4
|
|
|
5
5
|
from typing_extensions import override
|
|
6
6
|
|
|
7
|
-
from crawlee.storage_clients._base import BaseDatasetCollectionClient
|
|
7
|
+
from crawlee.storage_clients._base import DatasetCollectionClient as BaseDatasetCollectionClient
|
|
8
8
|
from crawlee.storage_clients.models import DatasetListPage, DatasetMetadata
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
@@ -4,10 +4,13 @@ from contextlib import asynccontextmanager
|
|
|
4
4
|
from typing import TYPE_CHECKING, Any
|
|
5
5
|
|
|
6
6
|
from typing_extensions import override
|
|
7
|
+
from yarl import URL
|
|
7
8
|
|
|
8
|
-
from crawlee.storage_clients._base import BaseKeyValueStoreClient
|
|
9
|
+
from crawlee.storage_clients._base import KeyValueStoreClient as BaseKeyValueStoreClient
|
|
9
10
|
from crawlee.storage_clients.models import KeyValueStoreListKeysPage, KeyValueStoreMetadata, KeyValueStoreRecord
|
|
10
11
|
|
|
12
|
+
from apify._crypto import create_hmac_signature
|
|
13
|
+
|
|
11
14
|
if TYPE_CHECKING:
|
|
12
15
|
from collections.abc import AsyncIterator
|
|
13
16
|
from contextlib import AbstractAsyncContextManager
|
|
@@ -89,6 +92,18 @@ class KeyValueStoreClient(BaseKeyValueStoreClient):
|
|
|
89
92
|
Args:
|
|
90
93
|
key: The key for which the URL should be generated.
|
|
91
94
|
"""
|
|
92
|
-
|
|
95
|
+
if self._client.resource_id is None:
|
|
96
|
+
raise ValueError('resource_id cannot be None when generating a public URL')
|
|
97
|
+
|
|
98
|
+
public_url = (
|
|
99
|
+
URL(self._api_public_base_url) / 'v2' / 'key-value-stores' / self._client.resource_id / 'records' / key
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
key_value_store = await self.get()
|
|
103
|
+
|
|
104
|
+
if key_value_store is not None and isinstance(key_value_store.model_extra, dict):
|
|
105
|
+
url_signing_secret_key = key_value_store.model_extra.get('urlSigningSecretKey')
|
|
106
|
+
if url_signing_secret_key:
|
|
107
|
+
public_url = public_url.with_query(signature=create_hmac_signature(url_signing_secret_key, key))
|
|
93
108
|
|
|
94
|
-
return
|
|
109
|
+
return str(public_url)
|
|
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING
|
|
|
4
4
|
|
|
5
5
|
from typing_extensions import override
|
|
6
6
|
|
|
7
|
-
from crawlee.storage_clients._base import BaseKeyValueStoreCollectionClient
|
|
7
|
+
from crawlee.storage_clients._base import KeyValueStoreCollectionClient as BaseKeyValueStoreCollectionClient
|
|
8
8
|
from crawlee.storage_clients.models import KeyValueStoreListPage, KeyValueStoreMetadata
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING
|
|
|
5
5
|
from typing_extensions import override
|
|
6
6
|
|
|
7
7
|
from crawlee import Request
|
|
8
|
-
from crawlee.storage_clients._base import BaseRequestQueueClient
|
|
8
|
+
from crawlee.storage_clients._base import RequestQueueClient as BaseRequestQueueClient
|
|
9
9
|
from crawlee.storage_clients.models import (
|
|
10
10
|
BatchRequestsOperationResponse,
|
|
11
11
|
ProcessedRequest,
|
|
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING
|
|
|
4
4
|
|
|
5
5
|
from typing_extensions import override
|
|
6
6
|
|
|
7
|
-
from crawlee.storage_clients._base import BaseRequestQueueCollectionClient
|
|
7
|
+
from crawlee.storage_clients._base import RequestQueueCollectionClient as BaseRequestQueueCollectionClient
|
|
8
8
|
from crawlee.storage_clients.models import RequestQueueListPage, RequestQueueMetadata
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
apify/storages/_request_list.py
CHANGED
|
@@ -10,7 +10,7 @@ from pydantic import BaseModel, Field, TypeAdapter
|
|
|
10
10
|
|
|
11
11
|
from crawlee import Request
|
|
12
12
|
from crawlee._types import HttpMethod
|
|
13
|
-
from crawlee.http_clients import
|
|
13
|
+
from crawlee.http_clients import HttpClient, HttpxHttpClient
|
|
14
14
|
from crawlee.request_loaders import RequestList as CrawleeRequestList
|
|
15
15
|
|
|
16
16
|
from apify._utils import docs_group
|
|
@@ -49,7 +49,7 @@ class RequestList(CrawleeRequestList):
|
|
|
49
49
|
async def open(
|
|
50
50
|
name: str | None = None,
|
|
51
51
|
request_list_sources_input: list[dict[str, Any]] | None = None,
|
|
52
|
-
http_client:
|
|
52
|
+
http_client: HttpClient | None = None,
|
|
53
53
|
) -> RequestList:
|
|
54
54
|
"""Creates RequestList from Actor input requestListSources.
|
|
55
55
|
|
|
@@ -78,7 +78,7 @@ class RequestList(CrawleeRequestList):
|
|
|
78
78
|
|
|
79
79
|
@staticmethod
|
|
80
80
|
async def _create_request_list(
|
|
81
|
-
name: str | None, request_list_sources_input: list[dict[str, Any]], http_client:
|
|
81
|
+
name: str | None, request_list_sources_input: list[dict[str, Any]], http_client: HttpClient | None
|
|
82
82
|
) -> RequestList:
|
|
83
83
|
if not http_client:
|
|
84
84
|
http_client = HttpxHttpClient()
|
|
@@ -108,7 +108,7 @@ class RequestList(CrawleeRequestList):
|
|
|
108
108
|
|
|
109
109
|
@staticmethod
|
|
110
110
|
async def _fetch_requests_from_url(
|
|
111
|
-
remote_url_requests_inputs: list[_RequestsFromUrlInput], http_client:
|
|
111
|
+
remote_url_requests_inputs: list[_RequestsFromUrlInput], http_client: HttpClient
|
|
112
112
|
) -> list[Request]:
|
|
113
113
|
"""Crete list of requests from url.
|
|
114
114
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apify
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.4.0
|
|
4
4
|
Summary: Apify SDK for Python
|
|
5
5
|
Project-URL: Homepage, https://docs.apify.com/sdk/python/
|
|
6
6
|
Project-URL: Apify homepage, https://apify.com
|
|
@@ -224,14 +224,14 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
224
224
|
Classifier: Topic :: Software Development :: Libraries
|
|
225
225
|
Requires-Python: >=3.9
|
|
226
226
|
Requires-Dist: apify-client>=1.9.2
|
|
227
|
-
Requires-Dist: apify-shared>=1.
|
|
228
|
-
Requires-Dist: crawlee~=0.
|
|
227
|
+
Requires-Dist: apify-shared>=1.3.0
|
|
228
|
+
Requires-Dist: crawlee~=0.6.0
|
|
229
229
|
Requires-Dist: cryptography>=42.0.0
|
|
230
230
|
Requires-Dist: httpx>=0.27.0
|
|
231
231
|
Requires-Dist: lazy-object-proxy>=1.10.0
|
|
232
232
|
Requires-Dist: more-itertools>=10.2.0
|
|
233
233
|
Requires-Dist: typing-extensions>=4.1.0
|
|
234
|
-
Requires-Dist: websockets
|
|
234
|
+
Requires-Dist: websockets>=14.0
|
|
235
235
|
Provides-Extra: scrapy
|
|
236
236
|
Requires-Dist: scrapy>=2.11.0; extra == 'scrapy'
|
|
237
237
|
Description-Content-Type: text/markdown
|
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
apify/__init__.py,sha256=HpgKg2FZWJuSPfDygzJ62psylhw4NN4tKFnoYUIhcd4,838
|
|
2
|
-
apify/_actor.py,sha256=
|
|
2
|
+
apify/_actor.py,sha256=PQqFDpAqSbh_aP3EjD8yLGYOmLZMo1qLzqrbVT2KjWE,49697
|
|
3
3
|
apify/_charging.py,sha256=m7hJIQde4M7vS4g_4hsNRP5xHNXjYQ8MyqOEGeNb7VY,12267
|
|
4
4
|
apify/_configuration.py,sha256=yidcWHsu-IJ2mmLmXStKq_HHcdfQxZq7koYjlZfRnQ8,11128
|
|
5
5
|
apify/_consts.py,sha256=_Xq4hOfOA1iZ3n1P967YWdyncKivpbX6RTlp_qanUoE,330
|
|
6
|
-
apify/_crypto.py,sha256=
|
|
6
|
+
apify/_crypto.py,sha256=8BgeQC0ZhYP5KdmLxxLQAW87Gq-Z4HlREbYGXr46w0U,6607
|
|
7
7
|
apify/_models.py,sha256=-Y0rljBJWxMMCp8iDCTG4UV3bEvNZzp-kx2SYbPfeIY,7919
|
|
8
|
-
apify/_platform_event_manager.py,sha256=
|
|
8
|
+
apify/_platform_event_manager.py,sha256=k1e5ruSJdcMKr6j-_XIF-gfhrgyMzdSenYW0QoJROu8,7916
|
|
9
9
|
apify/_proxy_configuration.py,sha256=c-O6_PZ9pUD-i4J0RFEKTtfyJPP2rTRJJA1TH8NVsV8,13189
|
|
10
10
|
apify/_utils.py,sha256=92byxeXTpDFwhBq7ZS-obeXKtKWvVzCZMV0Drg3EjhQ,1634
|
|
11
11
|
apify/log.py,sha256=j-E4t-WeA93bc1NCQRG8sTntehQCiiN8ia-MdQe3_Ts,1291
|
|
12
12
|
apify/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
13
|
apify/apify_storage_client/__init__.py,sha256=-UbR68bFsDR6ln8OFs4t50eqcnY36hujO-SeOt-KmcA,114
|
|
14
|
-
apify/apify_storage_client/_apify_storage_client.py,sha256=
|
|
15
|
-
apify/apify_storage_client/_dataset_client.py,sha256=
|
|
16
|
-
apify/apify_storage_client/_dataset_collection_client.py,sha256=
|
|
17
|
-
apify/apify_storage_client/_key_value_store_client.py,sha256=
|
|
18
|
-
apify/apify_storage_client/_key_value_store_collection_client.py,sha256=
|
|
19
|
-
apify/apify_storage_client/_request_queue_client.py,sha256=
|
|
20
|
-
apify/apify_storage_client/_request_queue_collection_client.py,sha256=
|
|
14
|
+
apify/apify_storage_client/_apify_storage_client.py,sha256=qeWYsEQGeyyhJzS9TZTQFNqdSl8JzHz_4_HDKGY4I_Y,2736
|
|
15
|
+
apify/apify_storage_client/_dataset_client.py,sha256=9RxxhrJMic5QRJn2Vl4J-FnSlEigIpYW5Z_2B1dcRzM,5597
|
|
16
|
+
apify/apify_storage_client/_dataset_collection_client.py,sha256=gf5skMTkfpGhEscRy5bgo13vznxGZrSd7w9Ivh3Usyc,1516
|
|
17
|
+
apify/apify_storage_client/_key_value_store_client.py,sha256=OCFUAW0o-8KQvUpL8zmlZrpU3yRmDKdsO2529H2v40I,4002
|
|
18
|
+
apify/apify_storage_client/_key_value_store_collection_client.py,sha256=zjsbRW4zjme6dIzxxlHyCW3voBA5489MUhdjl5YMaro,1596
|
|
19
|
+
apify/apify_storage_client/_request_queue_client.py,sha256=cNMhXz85s1ZtjLpVqkduYl1y6o9QyNdcIGoy6ccD-h0,5178
|
|
20
|
+
apify/apify_storage_client/_request_queue_collection_client.py,sha256=MTLM2cG0txAe3cSjkGbXyq2Ek0R7wlsMbGGULmQGD3I,1603
|
|
21
21
|
apify/apify_storage_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
22
|
apify/scrapy/__init__.py,sha256=m2a0ts_JY9xJkBy4JU5mV8PJqjA3GGKLXBFu4nl-n-A,1048
|
|
23
23
|
apify/scrapy/_actor_runner.py,sha256=rXWSnlQWGskDUH8PtLCv5SkOIx4AiVa4QbCYeCett5c,938
|
|
@@ -34,9 +34,9 @@ apify/scrapy/pipelines/__init__.py,sha256=GWPeLN_Zwj8vRBWtXW6DaxdB7mvyQ7Jw5Tz1cc
|
|
|
34
34
|
apify/scrapy/pipelines/actor_dataset_push.py,sha256=XUUyznQTD-E3wYUUFt2WAOnWhbnRrY0WuedlfYfYhDI,846
|
|
35
35
|
apify/scrapy/pipelines/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
36
|
apify/storages/__init__.py,sha256=FW-z6ubuPnHGM-Wp15T8mR5q6lnpDGrCW-IkgZd5L30,177
|
|
37
|
-
apify/storages/_request_list.py,sha256
|
|
37
|
+
apify/storages/_request_list.py,sha256=7WpcdWvT3QxEBthynBpTVCSNDLXq6UbpQQmfUVyJ1jE,5849
|
|
38
38
|
apify/storages/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
|
-
apify-2.
|
|
40
|
-
apify-2.
|
|
41
|
-
apify-2.
|
|
42
|
-
apify-2.
|
|
39
|
+
apify-2.4.0.dist-info/METADATA,sha256=u-SDIKoBk3as6apTeCKo4zCMdB3jvH26c9QauKoCdCk,21556
|
|
40
|
+
apify-2.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
41
|
+
apify-2.4.0.dist-info/licenses/LICENSE,sha256=AsFjHssKjj4LGd2ZCqXn6FBzMqcWdjQre1byPPSypVw,11355
|
|
42
|
+
apify-2.4.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|