aioscrapper 0.1.1__tar.gz → 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {aioscrapper-0.1.1/src/aioscrapper.egg-info → aioscrapper-0.2.0}/PKG-INFO +18 -8
  2. {aioscrapper-0.1.1 → aioscrapper-0.2.0}/README.md +6 -6
  3. aioscrapper-0.2.0/aioscrapper/pipeline/__init__.py +1 -0
  4. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/pipeline/base.py +2 -6
  5. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/pipeline/dispatcher.py +1 -1
  6. aioscrapper-0.2.0/aioscrapper/scrapper/base.py +30 -0
  7. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/scrapper/executor.py +4 -5
  8. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/types/__init__.py +1 -0
  9. aioscrapper-0.2.0/aioscrapper/types/pipeline.py +10 -0
  10. {aioscrapper-0.1.1 → aioscrapper-0.2.0/aioscrapper.egg-info}/PKG-INFO +18 -8
  11. aioscrapper-0.2.0/aioscrapper.egg-info/SOURCES.txt +28 -0
  12. aioscrapper-0.2.0/aioscrapper.egg-info/requires.txt +14 -0
  13. aioscrapper-0.2.0/pyproject.toml +63 -0
  14. aioscrapper-0.2.0/tests/test_error.py +37 -0
  15. aioscrapper-0.2.0/tests/test_success.py +29 -0
  16. aioscrapper-0.1.1/pyproject.toml +0 -24
  17. aioscrapper-0.1.1/src/aioscrapper/pipeline/__init__.py +0 -2
  18. aioscrapper-0.1.1/src/aioscrapper/scrapper/base.py +0 -10
  19. aioscrapper-0.1.1/src/aioscrapper.egg-info/SOURCES.txt +0 -25
  20. aioscrapper-0.1.1/src/aioscrapper.egg-info/requires.txt +0 -2
  21. {aioscrapper-0.1.1 → aioscrapper-0.2.0}/LICENSE +0 -0
  22. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/__init__.py +0 -0
  23. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/config.py +0 -0
  24. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/exceptions.py +0 -0
  25. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/helpers.py +0 -0
  26. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/scrapper/__init__.py +0 -0
  27. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/scrapper/request_manager.py +0 -0
  28. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/session/__init__.py +0 -0
  29. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/session/aiohttp.py +0 -0
  30. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/session/base.py +0 -0
  31. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/types/middleware.py +0 -0
  32. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper/types/session.py +0 -0
  33. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper.egg-info/dependency_links.txt +0 -0
  34. {aioscrapper-0.1.1/src → aioscrapper-0.2.0}/aioscrapper.egg-info/top_level.txt +0 -0
  35. {aioscrapper-0.1.1 → aioscrapper-0.2.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aioscrapper
3
- Version: 0.1.1
3
+ Version: 0.2.0
4
4
  Summary: Async framework for building modular and scalable web scrapers.
5
5
  Author: darkstussy
6
6
  Project-URL: Homepage, https://github.com/darkstussy/aioscrapper
@@ -12,18 +12,28 @@ Classifier: Intended Audience :: Developers
12
12
  Classifier: Operating System :: OS Independent
13
13
  Classifier: Topic :: Internet :: WWW/HTTP :: Indexing/Search
14
14
  Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
15
- Requires-Python: >=3.12
15
+ Requires-Python: >=3.10
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
18
  Requires-Dist: aiohttp[speedups]~=3.11.16
19
19
  Requires-Dist: aiojobs~=1.4.0
20
+ Provides-Extra: dev
21
+ Requires-Dist: flake8~=7.1.2; extra == "dev"
22
+ Requires-Dist: black~=25.1.0; extra == "dev"
23
+ Requires-Dist: pyright~=1.1.399; extra == "dev"
24
+ Requires-Dist: aiohttp[speedups]~=3.11.16; extra == "dev"
25
+ Requires-Dist: aiojobs~=1.4.0; extra == "dev"
26
+ Provides-Extra: test
27
+ Requires-Dist: pytest~=8.3.5; extra == "test"
28
+ Requires-Dist: pytest-asyncio~=0.26.0; extra == "test"
29
+ Requires-Dist: aresponses~=3.0.0; extra == "test"
20
30
  Dynamic: license-file
21
31
 
22
32
  # aioscrapper
23
33
 
24
34
  **Asynchronous framework for building modular and scalable web scrapers.**
25
35
 
26
- ![Python](https://img.shields.io/badge/python-3.12%2B-blue)
36
+ ![Python](https://img.shields.io/badge/python-3.10%2B-blue)
27
37
  ![License](https://img.shields.io/github/license/darkstussy/aioscrapper)
28
38
  ![Version](https://img.shields.io/github/v/tag/darkstussy/aioscrapper?label=version)
29
39
 
@@ -44,7 +54,7 @@ pip install aioscrapper
44
54
 
45
55
  ## Requirements
46
56
 
47
- - Python 3.12 or higher
57
+ - Python 3.10 or higher
48
58
  - aiohttp
49
59
  - aiojobs
50
60
 
@@ -53,13 +63,13 @@ pip install aioscrapper
53
63
  ```python
54
64
  import asyncio
55
65
 
56
- from aioscrapper import BaseScrapper, AIOScrapper, RequestSender
57
- from aioscrapper.types import Response
66
+ from aioscrapper import BaseScrapper, AIOScrapper
67
+ from aioscrapper.types import Response, RequestSender
58
68
 
59
69
 
60
70
  class Scrapper(BaseScrapper):
61
- async def start(self, request_sender: RequestSender) -> None:
62
- await request_sender(url="https://example.com", callback=self.parse)
71
+ async def start(self, send_request: RequestSender) -> None:
72
+ await send_request(url="https://example.com", callback=self.parse)
63
73
 
64
74
  async def parse(self, response: Response) -> None:
65
75
  # handle response
@@ -2,7 +2,7 @@
2
2
 
3
3
  **Asynchronous framework for building modular and scalable web scrapers.**
4
4
 
5
- ![Python](https://img.shields.io/badge/python-3.12%2B-blue)
5
+ ![Python](https://img.shields.io/badge/python-3.10%2B-blue)
6
6
  ![License](https://img.shields.io/github/license/darkstussy/aioscrapper)
7
7
  ![Version](https://img.shields.io/github/v/tag/darkstussy/aioscrapper?label=version)
8
8
 
@@ -23,7 +23,7 @@ pip install aioscrapper
23
23
 
24
24
  ## Requirements
25
25
 
26
- - Python 3.12 or higher
26
+ - Python 3.10 or higher
27
27
  - aiohttp
28
28
  - aiojobs
29
29
 
@@ -32,13 +32,13 @@ pip install aioscrapper
32
32
  ```python
33
33
  import asyncio
34
34
 
35
- from aioscrapper import BaseScrapper, AIOScrapper, RequestSender
36
- from aioscrapper.types import Response
35
+ from aioscrapper import BaseScrapper, AIOScrapper
36
+ from aioscrapper.types import Response, RequestSender
37
37
 
38
38
 
39
39
  class Scrapper(BaseScrapper):
40
- async def start(self, request_sender: RequestSender) -> None:
41
- await request_sender(url="https://example.com", callback=self.parse)
40
+ async def start(self, send_request: RequestSender) -> None:
41
+ await send_request(url="https://example.com", callback=self.parse)
42
42
 
43
43
  async def parse(self, response: Response) -> None:
44
44
  # handle response
@@ -0,0 +1 @@
1
+ from .base import BasePipeline
@@ -1,11 +1,7 @@
1
1
  import abc
2
- from typing import TypeVar, Generic, Protocol
3
-
4
-
5
- class BaseItem(Protocol):
6
- @property
7
- def pipeline_name(self) -> str: ...
2
+ from typing import TypeVar, Generic
8
3
 
4
+ from ..types import BaseItem
9
5
 
10
6
  ItemType = TypeVar("ItemType", bound=BaseItem)
11
7
 
@@ -10,7 +10,7 @@ class PipelineDispatcher:
10
10
  self._logger = logger
11
11
  self._pipelines = pipelines
12
12
 
13
- async def put_item(self, item: BaseItem) -> BaseItem:
13
+ async def __call__(self, item: BaseItem) -> BaseItem:
14
14
  self._logger.debug(f"pipeline item received: {item}")
15
15
  try:
16
16
  pipelines = self._pipelines[item.pipeline_name]
@@ -0,0 +1,30 @@
1
+ import abc
2
+
3
+
4
+ class BaseScrapper(abc.ABC):
5
+ @abc.abstractmethod
6
+ async def start(self, *args, **kwargs) -> None:
7
+ """
8
+ Starts the scrapper.
9
+
10
+ This method is called to start the scraper by sending the initial requests required for its operation.
11
+ """
12
+ ...
13
+
14
+ async def initialize(self, *args, **kwargs) -> None:
15
+ """
16
+ Initializes the scrapper.
17
+
18
+ This method is called before starting the scrapper. It should be used to initialize any
19
+ necessary state or resources required by the scrapper.
20
+ """
21
+ ...
22
+
23
+ async def close(self, *args, **kwargs) -> None:
24
+ """
25
+ Closes the scrapper.
26
+
27
+ This method is called to clean up any resources created by the scrapper after it has finished
28
+ running.
29
+ """
30
+ ...
@@ -9,7 +9,8 @@ from aiojobs import Scheduler
9
9
  from .request_manager import RequestManager
10
10
  from ..config import Config
11
11
  from ..helpers import get_func_kwargs
12
- from ..pipeline import PipelineDispatcher, BasePipeline
12
+ from ..pipeline import BasePipeline
13
+ from ..pipeline.dispatcher import PipelineDispatcher
13
14
  from ..scrapper import BaseScrapper
14
15
  from ..session.aiohttp import AiohttpSession
15
16
  from ..types import RequestMiddleware, ResponseMiddleware
@@ -32,9 +33,7 @@ class AIOScrapper:
32
33
  self._response_middlewares = []
33
34
 
34
35
  self._pipelines: dict[str, list[BasePipeline]] = {}
35
- self._pipeline_dispatcher = PipelineDispatcher(
36
- logger=self._logger.getChild("pipeline"), pipelines=self._pipelines
37
- )
36
+ self._pipeline_dispatcher = PipelineDispatcher(self._logger.getChild("pipeline"), pipelines=self._pipelines)
38
37
 
39
38
  def _exception_handler(_, context: dict[str, Any]):
40
39
  if "job" in context:
@@ -96,7 +95,7 @@ class AIOScrapper:
96
95
  await self._pipeline_dispatcher.initialize()
97
96
  self._request_manager.listen_queue()
98
97
 
99
- scrapper_kwargs = {"request_sender": self._request_manager.sender, "pipeline": self._pipeline_dispatcher}
98
+ scrapper_kwargs = {"send_request": self._request_manager.sender, "pipeline": self._pipeline_dispatcher}
100
99
  for scrapper in self._scrappers:
101
100
  await scrapper.initialize(**get_func_kwargs(scrapper.initialize, scrapper_kwargs))
102
101
 
@@ -1,4 +1,5 @@
1
1
  from .middleware import RequestMiddleware, ResponseMiddleware
2
+ from .pipeline import BaseItem, Pipeline
2
3
  from .session import (
3
4
  QueryParams,
4
5
  Cookies,
@@ -0,0 +1,10 @@
1
+ from typing import Protocol
2
+
3
+
4
+ class BaseItem(Protocol):
5
+ @property
6
+ def pipeline_name(self) -> str: ...
7
+
8
+
9
+ class Pipeline(Protocol):
10
+ async def __call__(self, item: BaseItem) -> BaseItem: ...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aioscrapper
3
- Version: 0.1.1
3
+ Version: 0.2.0
4
4
  Summary: Async framework for building modular and scalable web scrapers.
5
5
  Author: darkstussy
6
6
  Project-URL: Homepage, https://github.com/darkstussy/aioscrapper
@@ -12,18 +12,28 @@ Classifier: Intended Audience :: Developers
12
12
  Classifier: Operating System :: OS Independent
13
13
  Classifier: Topic :: Internet :: WWW/HTTP :: Indexing/Search
14
14
  Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
15
- Requires-Python: >=3.12
15
+ Requires-Python: >=3.10
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
18
  Requires-Dist: aiohttp[speedups]~=3.11.16
19
19
  Requires-Dist: aiojobs~=1.4.0
20
+ Provides-Extra: dev
21
+ Requires-Dist: flake8~=7.1.2; extra == "dev"
22
+ Requires-Dist: black~=25.1.0; extra == "dev"
23
+ Requires-Dist: pyright~=1.1.399; extra == "dev"
24
+ Requires-Dist: aiohttp[speedups]~=3.11.16; extra == "dev"
25
+ Requires-Dist: aiojobs~=1.4.0; extra == "dev"
26
+ Provides-Extra: test
27
+ Requires-Dist: pytest~=8.3.5; extra == "test"
28
+ Requires-Dist: pytest-asyncio~=0.26.0; extra == "test"
29
+ Requires-Dist: aresponses~=3.0.0; extra == "test"
20
30
  Dynamic: license-file
21
31
 
22
32
  # aioscrapper
23
33
 
24
34
  **Asynchronous framework for building modular and scalable web scrapers.**
25
35
 
26
- ![Python](https://img.shields.io/badge/python-3.12%2B-blue)
36
+ ![Python](https://img.shields.io/badge/python-3.10%2B-blue)
27
37
  ![License](https://img.shields.io/github/license/darkstussy/aioscrapper)
28
38
  ![Version](https://img.shields.io/github/v/tag/darkstussy/aioscrapper?label=version)
29
39
 
@@ -44,7 +54,7 @@ pip install aioscrapper
44
54
 
45
55
  ## Requirements
46
56
 
47
- - Python 3.12 or higher
57
+ - Python 3.10 or higher
48
58
  - aiohttp
49
59
  - aiojobs
50
60
 
@@ -53,13 +63,13 @@ pip install aioscrapper
53
63
  ```python
54
64
  import asyncio
55
65
 
56
- from aioscrapper import BaseScrapper, AIOScrapper, RequestSender
57
- from aioscrapper.types import Response
66
+ from aioscrapper import BaseScrapper, AIOScrapper
67
+ from aioscrapper.types import Response, RequestSender
58
68
 
59
69
 
60
70
  class Scrapper(BaseScrapper):
61
- async def start(self, request_sender: RequestSender) -> None:
62
- await request_sender(url="https://example.com", callback=self.parse)
71
+ async def start(self, send_request: RequestSender) -> None:
72
+ await send_request(url="https://example.com", callback=self.parse)
63
73
 
64
74
  async def parse(self, response: Response) -> None:
65
75
  # handle response
@@ -0,0 +1,28 @@
1
+ LICENSE
2
+ README.md
3
+ pyproject.toml
4
+ aioscrapper/__init__.py
5
+ aioscrapper/config.py
6
+ aioscrapper/exceptions.py
7
+ aioscrapper/helpers.py
8
+ aioscrapper.egg-info/PKG-INFO
9
+ aioscrapper.egg-info/SOURCES.txt
10
+ aioscrapper.egg-info/dependency_links.txt
11
+ aioscrapper.egg-info/requires.txt
12
+ aioscrapper.egg-info/top_level.txt
13
+ aioscrapper/pipeline/__init__.py
14
+ aioscrapper/pipeline/base.py
15
+ aioscrapper/pipeline/dispatcher.py
16
+ aioscrapper/scrapper/__init__.py
17
+ aioscrapper/scrapper/base.py
18
+ aioscrapper/scrapper/executor.py
19
+ aioscrapper/scrapper/request_manager.py
20
+ aioscrapper/session/__init__.py
21
+ aioscrapper/session/aiohttp.py
22
+ aioscrapper/session/base.py
23
+ aioscrapper/types/__init__.py
24
+ aioscrapper/types/middleware.py
25
+ aioscrapper/types/pipeline.py
26
+ aioscrapper/types/session.py
27
+ tests/test_error.py
28
+ tests/test_success.py
@@ -0,0 +1,14 @@
1
+ aiohttp[speedups]~=3.11.16
2
+ aiojobs~=1.4.0
3
+
4
+ [dev]
5
+ flake8~=7.1.2
6
+ black~=25.1.0
7
+ pyright~=1.1.399
8
+ aiohttp[speedups]~=3.11.16
9
+ aiojobs~=1.4.0
10
+
11
+ [test]
12
+ pytest~=8.3.5
13
+ pytest-asyncio~=0.26.0
14
+ aresponses~=3.0.0
@@ -0,0 +1,63 @@
1
+ [project]
2
+ name = "aioscrapper"
3
+ version = "0.2.0"
4
+ authors = [{ name = "darkstussy" }, ]
5
+ description = "Async framework for building modular and scalable web scrapers."
6
+ readme = "README.md"
7
+ requires-python = ">=3.10"
8
+ dependencies = [
9
+ "aiohttp[speedups]~=3.11.16",
10
+ "aiojobs~=1.4.0",
11
+ ]
12
+ classifiers = [
13
+ "Programming Language :: Python :: 3",
14
+ "Programming Language :: Python :: 3.12",
15
+ "Framework :: AsyncIO",
16
+ "Intended Audience :: Developers",
17
+ "Operating System :: OS Independent",
18
+ "Topic :: Internet :: WWW/HTTP :: Indexing/Search",
19
+ "Topic :: Software Development :: Libraries :: Application Frameworks",
20
+ ]
21
+
22
+ [project.urls]
23
+ Homepage = "https://github.com/darkstussy/aioscrapper"
24
+ Issues = "https://github.com/darkstussy/aioscrapper/issues"
25
+
26
+ [project.optional-dependencies]
27
+ dev = [
28
+ "flake8~=7.1.2",
29
+ "black~=25.1.0",
30
+ "pyright~=1.1.399",
31
+ "aiohttp[speedups]~=3.11.16",
32
+ "aiojobs~=1.4.0",
33
+ ]
34
+ test = [
35
+ "pytest~=8.3.5",
36
+ "pytest-asyncio~=0.26.0",
37
+ "aresponses~=3.0.0"
38
+ ]
39
+
40
+ [tool.black]
41
+ line-length = 120
42
+ include = '\.pyi?$'
43
+ exclude = '''
44
+ /(
45
+ \.eggs/|
46
+ \.git/|
47
+ \.hg/|
48
+ \.mypy_cache/|
49
+ \.tox/|
50
+ \.venv/|
51
+ \venv/|
52
+ _build/|
53
+ buck-out/|
54
+ build/|
55
+ dist/
56
+ )'''
57
+
58
+ [tool.pytest.ini_options]
59
+ asyncio_mode = "auto"
60
+ testpaths = [
61
+ "tests",
62
+ ]
63
+ asyncio_default_fixture_loop_scope = "function"
@@ -0,0 +1,37 @@
1
+ import pytest
2
+ from aresponses import ResponsesMockServer
3
+
4
+ from aioscrapper import AIOScrapper
5
+ from aioscrapper.exceptions import ClientException, HTTPException
6
+ from aioscrapper.scrapper.base import BaseScrapper
7
+ from aioscrapper.types import RequestSender
8
+
9
+
10
+ class Scrapper(BaseScrapper):
11
+ def __init__(self):
12
+ self.status = None
13
+ self.response_data = None
14
+
15
+ async def start(self, send_request: RequestSender) -> None:
16
+ await send_request(url="https://api.test.com/v1", errback=self.errback)
17
+
18
+ async def errback(self, exc: ClientException) -> None:
19
+ if isinstance(exc, HTTPException):
20
+ self.status = exc.status_code
21
+ self.response_data = exc.message
22
+
23
+
24
+ @pytest.mark.asyncio
25
+ async def test_error(aresponses: ResponsesMockServer):
26
+ def handle_request(request):
27
+ return aresponses.Response(status=500, text="Internal Server Error")
28
+
29
+ aresponses.add("api.test.com", "/v1", "GET", response=handle_request) # pyright: ignore
30
+
31
+ scrapper = Scrapper()
32
+ async with AIOScrapper(scrappers=[scrapper]) as executor:
33
+ await executor.start()
34
+
35
+ assert scrapper.status == 500
36
+ assert scrapper.response_data == "Internal Server Error"
37
+ aresponses.assert_plan_strictly_followed()
@@ -0,0 +1,29 @@
1
+ import pytest
2
+ from aresponses import ResponsesMockServer
3
+
4
+ from aioscrapper import AIOScrapper
5
+ from aioscrapper.scrapper.base import BaseScrapper
6
+ from aioscrapper.types import Response, RequestSender
7
+
8
+
9
+ class Scrapper(BaseScrapper):
10
+ def __init__(self):
11
+ self.response_data = None
12
+
13
+ async def start(self, send_request: RequestSender) -> None:
14
+ await send_request(url="https://api.test.com/v1", callback=self.parse)
15
+
16
+ async def parse(self, response: Response) -> None:
17
+ self.response_data = response.json()
18
+
19
+
20
+ @pytest.mark.asyncio
21
+ async def test_success(aresponses: ResponsesMockServer):
22
+ aresponses.add("api.test.com", "/v1", "GET", response={"status": "OK"}) # pyright: ignore
23
+
24
+ scrapper = Scrapper()
25
+ async with AIOScrapper(scrappers=[scrapper]) as executor:
26
+ await executor.start()
27
+
28
+ assert scrapper.response_data == {"status": "OK"}
29
+ aresponses.assert_plan_strictly_followed()
@@ -1,24 +0,0 @@
1
- [project]
2
- name = "aioscrapper"
3
- version = "0.1.1"
4
- authors = [{ name = "darkstussy" }, ]
5
- description = "Async framework for building modular and scalable web scrapers."
6
- readme = "README.md"
7
- requires-python = ">=3.12"
8
- dependencies = [
9
- "aiohttp[speedups] ~= 3.11.16",
10
- "aiojobs ~= 1.4.0",
11
- ]
12
- classifiers = [
13
- "Programming Language :: Python :: 3",
14
- "Programming Language :: Python :: 3.12",
15
- "Framework :: AsyncIO",
16
- "Intended Audience :: Developers",
17
- "Operating System :: OS Independent",
18
- "Topic :: Internet :: WWW/HTTP :: Indexing/Search",
19
- "Topic :: Software Development :: Libraries :: Application Frameworks",
20
- ]
21
-
22
- [project.urls]
23
- Homepage = "https://github.com/darkstussy/aioscrapper"
24
- Issues = "https://github.com/darkstussy/aioscrapper/issues"
@@ -1,2 +0,0 @@
1
- from .base import BasePipeline, BaseItem
2
- from .dispatcher import PipelineDispatcher
@@ -1,10 +0,0 @@
1
- import abc
2
-
3
-
4
- class BaseScrapper(abc.ABC):
5
- @abc.abstractmethod
6
- async def start(self, *args, **kwargs) -> None: ...
7
-
8
- async def initialize(self, *args, **kwargs) -> None: ...
9
-
10
- async def close(self, *args, **kwargs) -> None: ...
@@ -1,25 +0,0 @@
1
- LICENSE
2
- README.md
3
- pyproject.toml
4
- src/aioscrapper/__init__.py
5
- src/aioscrapper/config.py
6
- src/aioscrapper/exceptions.py
7
- src/aioscrapper/helpers.py
8
- src/aioscrapper.egg-info/PKG-INFO
9
- src/aioscrapper.egg-info/SOURCES.txt
10
- src/aioscrapper.egg-info/dependency_links.txt
11
- src/aioscrapper.egg-info/requires.txt
12
- src/aioscrapper.egg-info/top_level.txt
13
- src/aioscrapper/pipeline/__init__.py
14
- src/aioscrapper/pipeline/base.py
15
- src/aioscrapper/pipeline/dispatcher.py
16
- src/aioscrapper/scrapper/__init__.py
17
- src/aioscrapper/scrapper/base.py
18
- src/aioscrapper/scrapper/executor.py
19
- src/aioscrapper/scrapper/request_manager.py
20
- src/aioscrapper/session/__init__.py
21
- src/aioscrapper/session/aiohttp.py
22
- src/aioscrapper/session/base.py
23
- src/aioscrapper/types/__init__.py
24
- src/aioscrapper/types/middleware.py
25
- src/aioscrapper/types/session.py
@@ -1,2 +0,0 @@
1
- aiohttp[speedups]~=3.11.16
2
- aiojobs~=1.4.0
File without changes
File without changes