aio-scrapy 2.0.10__tar.gz → 2.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aio-scrapy-2.0.10/aio_scrapy.egg-info → aio-scrapy-2.1.0}/PKG-INFO +4 -1
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0/aio_scrapy.egg-info}/PKG-INFO +4 -1
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aio_scrapy.egg-info/SOURCES.txt +1 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aio_scrapy.egg-info/requires.txt +4 -0
- aio-scrapy-2.1.0/aioscrapy/VERSION +1 -0
- aio-scrapy-2.1.0/aioscrapy/core/downloader/handlers/curl_cffi.py +61 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/engine.py +2 -27
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/scheduler.py +46 -5
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/crawler.py +5 -2
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/downloader/retry.py +8 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/pipelines/__init__.py +3 -6
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/settings/default_settings.py +4 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/spiders/__init__.py +2 -2
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/setup.py +2 -1
- aio-scrapy-2.0.10/aioscrapy/VERSION +0 -1
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/LICENSE +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/MANIFEST.in +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/README.md +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aio_scrapy.egg-info/dependency_links.txt +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aio_scrapy.egg-info/entry_points.txt +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aio_scrapy.egg-info/not-zip-safe +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aio_scrapy.egg-info/top_level.txt +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/__main__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/cmdline.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/crawl.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/genspider.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/list.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/runspider.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/settings.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/startproject.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/commands/version.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/aiohttp.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/httpx.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/playwright/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/playwright/driverpool.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/playwright/webdriver.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/pyhttpx.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/requests.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/scraper.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/db/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/db/absmanager.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/db/aiomongo.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/db/aiomysql.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/db/aiopg.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/db/aiorabbitmq.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/db/aioredis.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/dupefilters/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/dupefilters/disk.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/dupefilters/redis.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/exceptions.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/headers.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/request/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/request/form.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/request/json_request.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/response/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/response/html.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/response/playwright.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/response/text.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/http/response/xml.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/downloader/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/downloader/defaultheaders.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/downloader/downloadtimeout.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/downloader/ja3fingerprint.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/downloader/stats.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/downloader/useragent.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/extensions/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/extensions/closespider.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/extensions/corestats.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/extensions/logstats.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/extensions/metric.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/extensions/throttle.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/pipelines/csv.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/pipelines/execl.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/pipelines/mongo.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/pipelines/mysql.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/pipelines/pg.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/spider/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/spider/depth.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/spider/httperror.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/spider/offsite.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/spider/referer.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/libs/spider/urllength.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/link.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/logformatter.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/middleware/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/middleware/absmanager.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/middleware/downloader.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/middleware/extension.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/middleware/itempipeline.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/middleware/spider.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/process.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/proxy/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/proxy/redis.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/queue/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/queue/memory.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/queue/rabbitmq.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/queue/redis.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/scrapyd/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/scrapyd/runner.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/serializer.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/settings/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/signalmanager.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/signals.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/spiderloader.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/statscollectors.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/aioscrapy.cfg +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/module/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/module/middlewares.py.tmpl +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/module/pipelines.py.tmpl +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/module/settings.py.tmpl +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/module/spiders/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/spiders/basic.tmpl +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/spiders/single.tmpl +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/__init__.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/conf.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/curl.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/decorators.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/deprecate.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/httpobj.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/log.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/misc.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/ossignal.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/project.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/python.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/reqser.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/request.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/response.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/signal.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/spider.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/template.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/tools.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/trackref.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/utils/url.py +0 -0
- {aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: aio-scrapy
|
|
3
|
-
Version: 2.0
|
|
3
|
+
Version: 2.1.0
|
|
4
4
|
Summary: A high-level Web Crawling and Web Scraping framework based on Asyncio
|
|
5
5
|
Home-page: https://github.com/conlin-huang/aio-scrapy.git
|
|
6
6
|
Author: conlin
|
|
@@ -39,6 +39,7 @@ Requires-Dist: asyncpg>=0.27.0; extra == "all"
|
|
|
39
39
|
Requires-Dist: XlsxWriter>=3.1.2; extra == "all"
|
|
40
40
|
Requires-Dist: pillow>=9.4.0; extra == "all"
|
|
41
41
|
Requires-Dist: requests>=2.28.2; extra == "all"
|
|
42
|
+
Requires-Dist: curl_cffi; extra == "all"
|
|
42
43
|
Provides-Extra: aiomysql
|
|
43
44
|
Requires-Dist: aiomysql>=0.1.1; extra == "aiomysql"
|
|
44
45
|
Requires-Dist: cryptography; extra == "aiomysql"
|
|
@@ -52,6 +53,8 @@ Provides-Extra: playwright
|
|
|
52
53
|
Requires-Dist: playwright>=1.31.1; extra == "playwright"
|
|
53
54
|
Provides-Extra: pyhttpx
|
|
54
55
|
Requires-Dist: pyhttpx>=2.10.4; extra == "pyhttpx"
|
|
56
|
+
Provides-Extra: curl-cffi
|
|
57
|
+
Requires-Dist: curl_cffi>=0.6.1; extra == "curl-cffi"
|
|
55
58
|
Provides-Extra: requests
|
|
56
59
|
Requires-Dist: requests>=2.28.2; extra == "requests"
|
|
57
60
|
Provides-Extra: pg
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: aio-scrapy
|
|
3
|
-
Version: 2.0
|
|
3
|
+
Version: 2.1.0
|
|
4
4
|
Summary: A high-level Web Crawling and Web Scraping framework based on Asyncio
|
|
5
5
|
Home-page: https://github.com/conlin-huang/aio-scrapy.git
|
|
6
6
|
Author: conlin
|
|
@@ -39,6 +39,7 @@ Requires-Dist: asyncpg>=0.27.0; extra == "all"
|
|
|
39
39
|
Requires-Dist: XlsxWriter>=3.1.2; extra == "all"
|
|
40
40
|
Requires-Dist: pillow>=9.4.0; extra == "all"
|
|
41
41
|
Requires-Dist: requests>=2.28.2; extra == "all"
|
|
42
|
+
Requires-Dist: curl_cffi; extra == "all"
|
|
42
43
|
Provides-Extra: aiomysql
|
|
43
44
|
Requires-Dist: aiomysql>=0.1.1; extra == "aiomysql"
|
|
44
45
|
Requires-Dist: cryptography; extra == "aiomysql"
|
|
@@ -52,6 +53,8 @@ Provides-Extra: playwright
|
|
|
52
53
|
Requires-Dist: playwright>=1.31.1; extra == "playwright"
|
|
53
54
|
Provides-Extra: pyhttpx
|
|
54
55
|
Requires-Dist: pyhttpx>=2.10.4; extra == "pyhttpx"
|
|
56
|
+
Provides-Extra: curl-cffi
|
|
57
|
+
Requires-Dist: curl_cffi>=0.6.1; extra == "curl-cffi"
|
|
55
58
|
Provides-Extra: requests
|
|
56
59
|
Requires-Dist: requests>=2.28.2; extra == "requests"
|
|
57
60
|
Provides-Extra: pg
|
|
@@ -38,6 +38,7 @@ aioscrapy/core/scraper.py
|
|
|
38
38
|
aioscrapy/core/downloader/__init__.py
|
|
39
39
|
aioscrapy/core/downloader/handlers/__init__.py
|
|
40
40
|
aioscrapy/core/downloader/handlers/aiohttp.py
|
|
41
|
+
aioscrapy/core/downloader/handlers/curl_cffi.py
|
|
41
42
|
aioscrapy/core/downloader/handlers/httpx.py
|
|
42
43
|
aioscrapy/core/downloader/handlers/pyhttpx.py
|
|
43
44
|
aioscrapy/core/downloader/handlers/requests.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
2.1.0
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import ssl
|
|
2
|
+
|
|
3
|
+
from curl_cffi.requests import AsyncSession
|
|
4
|
+
|
|
5
|
+
from aioscrapy import Request
|
|
6
|
+
from aioscrapy.core.downloader.handlers import BaseDownloadHandler
|
|
7
|
+
from aioscrapy.http import HtmlResponse
|
|
8
|
+
from aioscrapy.settings import Settings
|
|
9
|
+
from aioscrapy.utils.log import logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CurlCffiDownloadHandler(BaseDownloadHandler):
|
|
13
|
+
|
|
14
|
+
def __init__(self, settings):
|
|
15
|
+
self.settings: Settings = settings
|
|
16
|
+
self.httpx_client_session_args: dict = self.settings.get('CURL_CFFI_CLIENT_SESSION_ARGS', {})
|
|
17
|
+
self.verify_ssl: bool = self.settings.get("VERIFY_SSL", True)
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def from_settings(cls, settings: Settings):
|
|
21
|
+
return cls(settings)
|
|
22
|
+
|
|
23
|
+
async def download_request(self, request: Request, _) -> HtmlResponse:
|
|
24
|
+
kwargs = {
|
|
25
|
+
'timeout': self.settings.get('DOWNLOAD_TIMEOUT'),
|
|
26
|
+
'cookies': dict(request.cookies),
|
|
27
|
+
'verify': request.meta.get('verify_ssl', self.verify_ssl),
|
|
28
|
+
'allow_redirects': self.settings.getbool('REDIRECT_ENABLED', True) if request.meta.get(
|
|
29
|
+
'dont_redirect') is None else request.meta.get('dont_redirect'),
|
|
30
|
+
'impersonate': request.meta.get('impersonate'),
|
|
31
|
+
}
|
|
32
|
+
post_data = request.body or None
|
|
33
|
+
if isinstance(post_data, dict):
|
|
34
|
+
kwargs['json'] = post_data
|
|
35
|
+
else:
|
|
36
|
+
kwargs['data'] = post_data
|
|
37
|
+
|
|
38
|
+
headers = request.headers or self.settings.get('DEFAULT_REQUEST_HEADERS')
|
|
39
|
+
kwargs['headers'] = headers
|
|
40
|
+
|
|
41
|
+
proxy = request.meta.get("proxy")
|
|
42
|
+
if proxy:
|
|
43
|
+
kwargs["proxies"] = {'http': proxy, 'https': proxy}
|
|
44
|
+
logger.debug(f"use proxy {proxy}: {request.url}")
|
|
45
|
+
|
|
46
|
+
session_args = self.httpx_client_session_args.copy()
|
|
47
|
+
|
|
48
|
+
async with AsyncSession(**session_args) as session:
|
|
49
|
+
response = await session.request(request.method, request.url, **kwargs)
|
|
50
|
+
|
|
51
|
+
return HtmlResponse(
|
|
52
|
+
str(response.url),
|
|
53
|
+
status=response.status_code,
|
|
54
|
+
headers=response.headers,
|
|
55
|
+
body=response.text,
|
|
56
|
+
cookies=dict(response.cookies),
|
|
57
|
+
encoding=response.encoding
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
async def close(self):
|
|
61
|
+
pass
|
|
@@ -42,8 +42,6 @@ class ExecutionEngine(object):
|
|
|
42
42
|
self.signals = crawler.signals
|
|
43
43
|
self.logformatter = crawler.logformatter
|
|
44
44
|
|
|
45
|
-
self.enqueue_cache_num = self.settings.getint("ENQUEUE_CACHE_NUM")
|
|
46
|
-
self.enqueue_cache: Queue = Queue(self.enqueue_cache_num)
|
|
47
45
|
self.slot: Optional[Slot] = None
|
|
48
46
|
self.spider: Optional[Spider] = None
|
|
49
47
|
self.downloader: Optional[DownloaderTV] = None
|
|
@@ -53,7 +51,6 @@ class ExecutionEngine(object):
|
|
|
53
51
|
self.running: bool = False
|
|
54
52
|
self.unlock: bool = True
|
|
55
53
|
self.finish: bool = False
|
|
56
|
-
self.enqueue_unlock: bool = True
|
|
57
54
|
|
|
58
55
|
async def start(
|
|
59
56
|
self,
|
|
@@ -70,7 +67,6 @@ class ExecutionEngine(object):
|
|
|
70
67
|
while not self.finish:
|
|
71
68
|
self.running and await self._next_request()
|
|
72
69
|
await asyncio.sleep(1)
|
|
73
|
-
self.enqueue_cache_num != 1 and create_task(self._crawl())
|
|
74
70
|
self.running and await self._spider_idle(self.spider)
|
|
75
71
|
|
|
76
72
|
async def stop(self, reason: str = 'shutdown') -> None:
|
|
@@ -81,7 +77,6 @@ class ExecutionEngine(object):
|
|
|
81
77
|
|
|
82
78
|
while not self.is_idle():
|
|
83
79
|
await asyncio.sleep(0.2)
|
|
84
|
-
self.enqueue_cache_num != 1 and create_task(self._crawl())
|
|
85
80
|
await self.close_spider(self.spider, reason=reason)
|
|
86
81
|
await self.signals.send_catch_log_deferred(signal=signals.engine_stopped)
|
|
87
82
|
self.finish = True
|
|
@@ -212,27 +207,8 @@ class ExecutionEngine(object):
|
|
|
212
207
|
return True
|
|
213
208
|
|
|
214
209
|
async def crawl(self, request: Request) -> None:
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
create_task(self._next_request())
|
|
218
|
-
else:
|
|
219
|
-
await self.enqueue_cache.put(request)
|
|
220
|
-
|
|
221
|
-
async def _crawl(self) -> None:
|
|
222
|
-
if not self.enqueue_unlock:
|
|
223
|
-
return
|
|
224
|
-
self.enqueue_unlock = False
|
|
225
|
-
requests = []
|
|
226
|
-
for _ in range(self.enqueue_cache.qsize()):
|
|
227
|
-
try:
|
|
228
|
-
request = self.enqueue_cache.get_nowait()
|
|
229
|
-
requests.append(request)
|
|
230
|
-
except QueueEmpty:
|
|
231
|
-
break
|
|
232
|
-
if requests:
|
|
233
|
-
await call_helper(self.scheduler.enqueue_request_batch, requests)
|
|
234
|
-
create_task(self._next_request())
|
|
235
|
-
self.enqueue_unlock = True
|
|
210
|
+
await self.scheduler.enqueue_request(request)
|
|
211
|
+
# create_task(self._next_request())
|
|
236
212
|
|
|
237
213
|
async def close_spider(self, spider: Spider, reason: str = 'cancelled') -> None:
|
|
238
214
|
"""Close (cancel) spider and clear all its outstanding requests"""
|
|
@@ -276,7 +252,6 @@ class ExecutionEngine(object):
|
|
|
276
252
|
# method of 'has_pending_requests' has IO, so method of 'is_idle' execute twice
|
|
277
253
|
if self.is_idle() \
|
|
278
254
|
and self.slot.start_requests is None \
|
|
279
|
-
and self.enqueue_unlock and self.enqueue_cache.empty() \
|
|
280
255
|
and not await self.scheduler.has_pending_requests() \
|
|
281
256
|
and self.is_idle():
|
|
282
257
|
await self.stop(reason='finished')
|
|
@@ -31,7 +31,7 @@ class BaseScheduler(metaclass=BaseSchedulerMeta):
|
|
|
31
31
|
@classmethod
|
|
32
32
|
async def from_crawler(cls, crawler: "aioscrapy.Crawler") -> "BaseScheduler":
|
|
33
33
|
"""
|
|
34
|
-
Factory method which receives the current :class:`~
|
|
34
|
+
Factory method which receives the current :class:`~aioscrapy.crawler.Crawler` object as argument.
|
|
35
35
|
"""
|
|
36
36
|
return cls()
|
|
37
37
|
|
|
@@ -103,20 +103,27 @@ class Scheduler(BaseScheduler):
|
|
|
103
103
|
queue: AbsQueue,
|
|
104
104
|
spider: aioscrapy.Spider,
|
|
105
105
|
stats=Optional[StatsCollector],
|
|
106
|
-
persist: bool = True
|
|
106
|
+
persist: bool = True,
|
|
107
|
+
cache_queue: Optional[AbsQueue] = None
|
|
107
108
|
):
|
|
109
|
+
|
|
108
110
|
self.queue = queue
|
|
111
|
+
self.cache_queue = cache_queue
|
|
109
112
|
self.spider = spider
|
|
110
113
|
self.stats = stats
|
|
111
114
|
self.persist = persist
|
|
112
115
|
|
|
113
116
|
@classmethod
|
|
114
117
|
async def from_crawler(cls: Type[SchedulerTV], crawler: "aioscrapy.Crawler") -> SchedulerTV:
|
|
118
|
+
cache_queue = None
|
|
119
|
+
if crawler.settings.getbool('USE_SCHEDULER_QUEUE_CACHE', False):
|
|
120
|
+
cache_queue = await load_instance('aioscrapy.queue.memory.SpiderPriorityQueue', spider=crawler.spider)
|
|
115
121
|
instance = cls(
|
|
116
122
|
await load_instance(crawler.settings['SCHEDULER_QUEUE_CLASS'], spider=crawler.spider),
|
|
117
123
|
crawler.spider,
|
|
118
124
|
stats=crawler.stats,
|
|
119
|
-
persist=crawler.settings.getbool('SCHEDULER_PERSIST', True)
|
|
125
|
+
persist=crawler.settings.getbool('SCHEDULER_PERSIST', True),
|
|
126
|
+
cache_queue=cache_queue
|
|
120
127
|
)
|
|
121
128
|
|
|
122
129
|
if crawler.settings.getbool('SCHEDULER_FLUSH_ON_START', False):
|
|
@@ -128,8 +135,20 @@ class Scheduler(BaseScheduler):
|
|
|
128
135
|
return instance
|
|
129
136
|
|
|
130
137
|
async def close(self, reason: str) -> None:
|
|
138
|
+
|
|
131
139
|
if not self.persist:
|
|
132
140
|
await self.flush()
|
|
141
|
+
return
|
|
142
|
+
|
|
143
|
+
# 如果持久化,将缓存中的任务放回到redis等分布式队列中
|
|
144
|
+
if self.cache_queue is not None:
|
|
145
|
+
while True:
|
|
146
|
+
temp = []
|
|
147
|
+
async for request in self.cache_queue.pop(2000):
|
|
148
|
+
temp.append(request)
|
|
149
|
+
temp and await self.queue.push_batch(temp)
|
|
150
|
+
if len(temp) < 2000:
|
|
151
|
+
break
|
|
133
152
|
|
|
134
153
|
async def flush(self) -> None:
|
|
135
154
|
await call_helper(self.queue.clear)
|
|
@@ -141,16 +160,38 @@ class Scheduler(BaseScheduler):
|
|
|
141
160
|
return True
|
|
142
161
|
|
|
143
162
|
async def enqueue_request(self, request: aioscrapy.Request) -> bool:
|
|
144
|
-
|
|
163
|
+
"""
|
|
164
|
+
如果启用了缓存队列(USE_SCHEDULER_QUEUE_CACHE),则优先将任务放到缓存队列中
|
|
165
|
+
"""
|
|
166
|
+
if self.cache_queue is not None:
|
|
167
|
+
await call_helper(self.cache_queue.push, request)
|
|
168
|
+
else:
|
|
169
|
+
await call_helper(self.queue.push, request)
|
|
145
170
|
if self.stats:
|
|
146
171
|
self.stats.inc_value(self.queue.inc_key, spider=self.spider)
|
|
147
172
|
return True
|
|
148
173
|
|
|
149
174
|
async def next_request(self, count: int = 1) -> Optional[aioscrapy.Request]:
|
|
175
|
+
"""
|
|
176
|
+
如果启用了缓存队列(USE_SCHEDULER_QUEUE_CACHE),则优先从缓存队列中获取任务,然后从redis等分布式队列中获取任务
|
|
177
|
+
"""
|
|
178
|
+
flag = False
|
|
179
|
+
if self.cache_queue is not None:
|
|
180
|
+
async for request in self.cache_queue.pop(count):
|
|
181
|
+
if request and self.stats:
|
|
182
|
+
self.stats.inc_value(self.queue.inc_key, spider=self.spider)
|
|
183
|
+
yield request
|
|
184
|
+
flag = True
|
|
185
|
+
|
|
186
|
+
if flag:
|
|
187
|
+
return
|
|
188
|
+
|
|
150
189
|
async for request in self.queue.pop(count):
|
|
151
190
|
if request and self.stats:
|
|
152
191
|
self.stats.inc_value(self.queue.inc_key, spider=self.spider)
|
|
153
192
|
yield request
|
|
154
193
|
|
|
194
|
+
|
|
155
195
|
async def has_pending_requests(self) -> bool:
|
|
156
|
-
return await call_helper(self.queue.len)
|
|
196
|
+
return await call_helper(self.queue.len) if self.cache_queue is None \
|
|
197
|
+
else (await call_helper(self.queue.len) + await call_helper(self.cache_queue.len)) > 0
|
|
@@ -234,9 +234,12 @@ class CrawlerProcess(CrawlerRunner):
|
|
|
234
234
|
finally:
|
|
235
235
|
await self.recycle_db_connect()
|
|
236
236
|
|
|
237
|
-
def start(self) -> None:
|
|
237
|
+
def start(self, use_windows_selector_eventLoop: bool = False) -> None:
|
|
238
238
|
if sys.platform.startswith('win'):
|
|
239
|
-
|
|
239
|
+
if use_windows_selector_eventLoop:
|
|
240
|
+
asyncio.set_event_loop_policy(asyncio.windows_events.WindowsSelectorEventLoopPolicy())
|
|
241
|
+
else:
|
|
242
|
+
asyncio.set_event_loop(asyncio.windows_events.ProactorEventLoop())
|
|
240
243
|
else:
|
|
241
244
|
try:
|
|
242
245
|
import uvloop
|
|
@@ -61,6 +61,14 @@ try:
|
|
|
61
61
|
except ImportError:
|
|
62
62
|
pass
|
|
63
63
|
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
from curl_cffi.curl import CurlError
|
|
67
|
+
|
|
68
|
+
NEED_RETRY_ERROR += (CurlError,)
|
|
69
|
+
except ImportError:
|
|
70
|
+
pass
|
|
71
|
+
|
|
64
72
|
from aioscrapy.exceptions import NotConfigured
|
|
65
73
|
from aioscrapy.http.request import Request
|
|
66
74
|
from aioscrapy.spiders import Spider
|
|
@@ -8,15 +8,13 @@ class SqlFormat:
|
|
|
8
8
|
|
|
9
9
|
@staticmethod
|
|
10
10
|
def pg_insert(table: str, fields: list, *args) -> str:
|
|
11
|
-
fields = ','.join(fields)
|
|
12
11
|
placeholder = ','.join([f'${i + 1}' for i in range(len(fields))])
|
|
13
|
-
return f'''INSERT INTO {table} ({fields}) VALUES ({placeholder})'''
|
|
12
|
+
return f'''INSERT INTO {table} ({",".join(fields)}) VALUES ({placeholder})'''
|
|
14
13
|
|
|
15
14
|
@staticmethod
|
|
16
15
|
def pg_ignore_insert(table: str, fields: list, *args) -> str:
|
|
17
16
|
placeholder = ','.join([f'${i + 1}' for i in range(len(fields))])
|
|
18
|
-
|
|
19
|
-
return f'INSERT INTO {table} ({fields}) VALUES ({placeholder}) ON CONFLICT DO NOTHING'
|
|
17
|
+
return f'''INSERT INTO {table} ({",".join(fields)}) VALUES ({placeholder}) ON CONFLICT DO NOTHING'''
|
|
20
18
|
|
|
21
19
|
@staticmethod
|
|
22
20
|
def pg_update_insert(table: str, fields: list, update_fields: list, on_conflict: str, *args) -> str:
|
|
@@ -25,8 +23,7 @@ class SqlFormat:
|
|
|
25
23
|
if not update_fields:
|
|
26
24
|
update_fields = fields
|
|
27
25
|
update_fields = ','.join([f"{key} = excluded.{key}" for key in update_fields])
|
|
28
|
-
|
|
29
|
-
return f'INSERT INTO {table} ({fields}) VALUES ({placeholder}) ON CONFLICT({on_conflict}) DO UPDATE SET {update_fields}'
|
|
26
|
+
return f'''INSERT INTO {table} ({",".join(fields)}) VALUES ({placeholder}) ON CONFLICT({on_conflict}) DO UPDATE SET {update_fields}'''
|
|
30
27
|
|
|
31
28
|
@staticmethod
|
|
32
29
|
def mysql_insert(table: str, fields: list, *args) -> str:
|
|
@@ -72,6 +72,10 @@ DOWNLOAD_HANDLERS_MAP = {
|
|
|
72
72
|
'http': 'aioscrapy.core.downloader.handlers.playwright.PlaywrightHandler',
|
|
73
73
|
'https': 'aioscrapy.core.downloader.handlers.playwright.PlaywrightHandler',
|
|
74
74
|
},
|
|
75
|
+
'curl_cffi': {
|
|
76
|
+
'http': 'aioscrapy.core.downloader.handlers.curl_cffi.CurlCffiDownloadHandler',
|
|
77
|
+
'https': 'aioscrapy.core.downloader.handlers.curl_cffi.CurlCffiDownloadHandler',
|
|
78
|
+
},
|
|
75
79
|
}
|
|
76
80
|
|
|
77
81
|
DOWNLOAD_TIMEOUT = 180 # 3mins
|
|
@@ -106,7 +106,7 @@ class Spider(object):
|
|
|
106
106
|
__repr__ = __str__
|
|
107
107
|
|
|
108
108
|
@classmethod
|
|
109
|
-
def start(cls, setting_path=None):
|
|
109
|
+
def start(cls, setting_path=None, use_windows_selector_eventLoop: bool = False):
|
|
110
110
|
from aioscrapy.crawler import CrawlerProcess
|
|
111
111
|
from aioscrapy.utils.project import get_project_settings
|
|
112
112
|
|
|
@@ -115,7 +115,7 @@ class Spider(object):
|
|
|
115
115
|
settings.setmodule(setting_path)
|
|
116
116
|
cp = CrawlerProcess(settings)
|
|
117
117
|
cp.crawl(cls)
|
|
118
|
-
cp.start()
|
|
118
|
+
cp.start(use_windows_selector_eventLoop)
|
|
119
119
|
|
|
120
120
|
def spider_idle(self):
|
|
121
121
|
if not self.close_on_idle:
|
|
@@ -19,7 +19,7 @@ extras_require = {
|
|
|
19
19
|
"all": [
|
|
20
20
|
"aiomysql>=0.1.1", "httpx[http2]>=0.23.0", "aio-pika>=8.1.1",
|
|
21
21
|
"cryptography", "motor>=3.1.1", "pyhttpx>=2.10.1", "asyncpg>=0.27.0",
|
|
22
|
-
"XlsxWriter>=3.1.2", "pillow>=9.4.0", "requests>=2.28.2"
|
|
22
|
+
"XlsxWriter>=3.1.2", "pillow>=9.4.0", "requests>=2.28.2", "curl_cffi"
|
|
23
23
|
],
|
|
24
24
|
"aiomysql": ["aiomysql>=0.1.1", "cryptography"],
|
|
25
25
|
"httpx": ["httpx[http2]>=0.23.0"],
|
|
@@ -27,6 +27,7 @@ extras_require = {
|
|
|
27
27
|
"mongo": ["motor>=3.1.1"],
|
|
28
28
|
"playwright": ["playwright>=1.31.1"],
|
|
29
29
|
"pyhttpx": ["pyhttpx>=2.10.4"],
|
|
30
|
+
"curl_cffi": ["curl_cffi>=0.6.1"],
|
|
30
31
|
"requests": ["requests>=2.28.2"],
|
|
31
32
|
"pg": ["asyncpg>=0.27.0"],
|
|
32
33
|
"execl": ["XlsxWriter>=3.1.2", "pillow>=9.4.0"],
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
2.0.10
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/playwright/__init__.py
RENAMED
|
File without changes
|
{aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/playwright/driverpool.py
RENAMED
|
File without changes
|
{aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/core/downloader/handlers/playwright/webdriver.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/module/middlewares.py.tmpl
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{aio-scrapy-2.0.10 → aio-scrapy-2.1.0}/aioscrapy/templates/project/module/spiders/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|