aio-scrapy 2.1.0__py3-none-any.whl → 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: aio-scrapy
3
- Version: 2.1.0
3
+ Version: 2.1.2
4
4
  Summary: A high-level Web Crawling and Web Scraping framework based on Asyncio
5
5
  Home-page: https://github.com/conlin-huang/aio-scrapy.git
6
6
  Author: conlin
@@ -38,7 +38,7 @@ Requires-Dist: aiomysql >=0.1.1 ; extra == 'all'
38
38
  Requires-Dist: httpx[http2] >=0.23.0 ; extra == 'all'
39
39
  Requires-Dist: aio-pika >=8.1.1 ; extra == 'all'
40
40
  Requires-Dist: cryptography ; extra == 'all'
41
- Requires-Dist: motor >=3.1.1 ; extra == 'all'
41
+ Requires-Dist: motor >=2.1.0 ; extra == 'all'
42
42
  Requires-Dist: pyhttpx >=2.10.1 ; extra == 'all'
43
43
  Requires-Dist: asyncpg >=0.27.0 ; extra == 'all'
44
44
  Requires-Dist: XlsxWriter >=3.1.2 ; extra == 'all'
@@ -53,7 +53,7 @@ Requires-Dist: pillow >=9.4.0 ; extra == 'execl'
53
53
  Provides-Extra: httpx
54
54
  Requires-Dist: httpx[http2] >=0.23.0 ; extra == 'httpx'
55
55
  Provides-Extra: mongo
56
- Requires-Dist: motor >=3.1.1 ; extra == 'mongo'
56
+ Requires-Dist: motor >=2.1.0 ; extra == 'mongo'
57
57
  Provides-Extra: pg
58
58
  Requires-Dist: asyncpg >=0.27.0 ; extra == 'pg'
59
59
  Provides-Extra: playwright
@@ -89,7 +89,7 @@ The quick way:
89
89
 
90
90
  ```shell
91
91
  # Install the latest aio-scrapy
92
- pip install git+https://github.com/conlin-huang/aio-scrapy
92
+ pip install git+https://github.com/ConlinH/aio-scrapy
93
93
 
94
94
  # default
95
95
  pip install aio-scrapy
@@ -1,9 +1,9 @@
1
- aioscrapy/VERSION,sha256=gkj3dyaHr_CxA7NomJCN64ISYzf5M-SaWsBIKQk8WP8,5
1
+ aioscrapy/VERSION,sha256=P_yDzF7mglQfHrNrG3774VSyqYLfjq7rdsUEPwejXH0,5
2
2
  aioscrapy/__init__.py,sha256=esJeH66Mz9WV7XbotvZEjNn49jc589YZ_L2DKoD0JvA,858
3
3
  aioscrapy/__main__.py,sha256=rvTdJ0cQwbi29aucPj3jJRpccx5SBzvRcV7qvxvX2NQ,80
4
4
  aioscrapy/cmdline.py,sha256=1qhNg2Edl-Obmf2re2K4V8pJG7ubGfZZCzcHdKtdE_s,5159
5
5
  aioscrapy/crawler.py,sha256=6-ptivIjIGKdojOlZqXV0hV3x1Gont81tOC5u5JqIME,10330
6
- aioscrapy/exceptions.py,sha256=NjA2Rx1KZsjMgH7IOdNpxuRkh-RwylRCYvEhwgXKIb8,2027
6
+ aioscrapy/exceptions.py,sha256=k1daw1hV_aqsaIKKibdyqcNPyVn5oUb07wmB2DRxfjs,2111
7
7
  aioscrapy/link.py,sha256=fXMqsHvYEzsuYi-sNDcElS7jV6Lusq0tjPkPUGOlyZw,1867
8
8
  aioscrapy/logformatter.py,sha256=y3etd28ACbpTbcGprJ_cQ086gxQY3k_QX_yxYFoF1AU,3028
9
9
  aioscrapy/process.py,sha256=uFkj2wzaBu0Vs3pGFKdJ4R-0Gn7hROX6EU-B5zddnyQ,1603
@@ -22,28 +22,28 @@ aioscrapy/commands/startproject.py,sha256=Rcc7JkN75Jp2t2aZIxBzPsWbLXChNAUSByDhcW
22
22
  aioscrapy/commands/version.py,sha256=yqqTMlZkkiQhtbU9w_IqUWLMOAjqYlv24friEkPRQYM,485
23
23
  aioscrapy/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  aioscrapy/core/engine.py,sha256=h02-K2lQqlCxvNIlURgPpnhHCbyiJRIWrFJt5Ys7vZY,9843
25
- aioscrapy/core/scheduler.py,sha256=sHrTfzSkqCVZTVw7zWAyv3vDd6iUwE9xbIsnePUVeZk,7408
26
- aioscrapy/core/scraper.py,sha256=M_bcizLUzWuECe7sIIZ_HJLNrPzL7dX2o-tN5nvFnCs,10304
27
- aioscrapy/core/downloader/__init__.py,sha256=22TC0z49BX3YvDUPl6DKMrOonECpY5tjaWJGGEV7RbU,9574
25
+ aioscrapy/core/scheduler.py,sha256=czCx5oHknXuHadpISTfoEMSKXXrlwJTmLTUQtHdtaTc,7407
26
+ aioscrapy/core/scraper.py,sha256=dh06xcSI5SHC15psF41Y7RiDxq7V59E2VtUqjInmM8g,10539
27
+ aioscrapy/core/downloader/__init__.py,sha256=QqBDokvvEgJMRJuQ7Xs_HJpAbbS2A0Z_75HOwVXyxPo,9747
28
28
  aioscrapy/core/downloader/handlers/__init__.py,sha256=CriaX2Cp4jUqzDDGZDB7HiIEgUWt2pnYVho6HMV6sJ0,3198
29
- aioscrapy/core/downloader/handlers/aiohttp.py,sha256=dFVVeGgJ1WZcE1zI4fQOZIzmrkC6l1WZcYstHmB3qYg,3942
30
- aioscrapy/core/downloader/handlers/curl_cffi.py,sha256=6jBp9WrGU0PCWd3HfXLD6P3MkMIG_zmKNCKieORVPas,2250
31
- aioscrapy/core/downloader/handlers/httpx.py,sha256=-DfjYgfrjxMhaMpTgEOFlQRONasCXV0g6UgH3WmWcfs,3041
32
- aioscrapy/core/downloader/handlers/pyhttpx.py,sha256=fgD6Kz_gfB17KHbnkFtUHJDjfYR-c9P2LhuYX4hcva8,2228
33
- aioscrapy/core/downloader/handlers/requests.py,sha256=I49YnAxFGf-_a_YR-1AOG8vPLMmKiMtdmP4Xn-c0dPw,1996
34
- aioscrapy/core/downloader/handlers/playwright/__init__.py,sha256=lTI7Strp7SSZxM5IfZVgJGVwv9UfhTA4LdFpsfvzZ5k,4160
29
+ aioscrapy/core/downloader/handlers/aiohttp.py,sha256=hoQhdsOsj77HVx7Lf1CEzmwf07C1pRdO87xiazQQ5dE,4263
30
+ aioscrapy/core/downloader/handlers/curl_cffi.py,sha256=emvIuOgW8m4dXHVchBdP-lRlQwd6c-KZ7KEq3rxHkEw,2576
31
+ aioscrapy/core/downloader/handlers/httpx.py,sha256=aMgqlQEBaDfwNCz9uzqn4DTcs-vqy8WVRmLoHL8tSSI,3384
32
+ aioscrapy/core/downloader/handlers/pyhttpx.py,sha256=XWe838pJxwRocIV5x1qbWXSmaczdFI_cOvHIeB0lYdg,2559
33
+ aioscrapy/core/downloader/handlers/requests.py,sha256=XB6XAa91NUhxYJHUA0z-E-QTW8nU6BcLVZgYgD0vKA8,2362
34
+ aioscrapy/core/downloader/handlers/playwright/__init__.py,sha256=6r8ieFGkKGqJzPMs-vjDcpbJNWzHof-dhu2qVLaf7AE,4501
35
35
  aioscrapy/core/downloader/handlers/playwright/driverpool.py,sha256=qfIdGjORdn1MookO-ucIJ8NOeLrIQ0y0UJY_xuMzM_8,1374
36
36
  aioscrapy/core/downloader/handlers/playwright/webdriver.py,sha256=QFtAT--2Ea_Gg4x1EhMidyOwQjbqljUl4sKGB_hAA00,3530
37
37
  aioscrapy/db/__init__.py,sha256=ISBXM_-cCf5CgTLc3i_emLxV163-ZAbgttkQiRxokD0,2456
38
38
  aioscrapy/db/absmanager.py,sha256=6vlPcjDHOtZCHePiUYPe6ezRnM-TB4XLhmuw7APaWDk,1162
39
- aioscrapy/db/aiomongo.py,sha256=A9NjJy7_jI1J3hj8rw-o1PuXhXwFghHs4xSnsO5-ZfE,2745
39
+ aioscrapy/db/aiomongo.py,sha256=t4JpRPBBisF7_rz02Kp6AejrphLvLWg5rF-yYLIe2MI,3071
40
40
  aioscrapy/db/aiomysql.py,sha256=-xCLfeH7RzvghY1jqREAb_Qnz9q_dVjxoHGfz7sCqbU,3799
41
41
  aioscrapy/db/aiopg.py,sha256=WG4s_2X0b8LQHbZpoIrwZeuGHNolKj-SvmvAZQlCk00,3213
42
42
  aioscrapy/db/aiorabbitmq.py,sha256=tNKl4Kx7KM7H_lOj8xfeA0uD8PuBTVzySApTEn5TyAE,5583
43
43
  aioscrapy/db/aioredis.py,sha256=UOoTRTQUvghnq29bVL8v1HvksMXYOzHaS8Btgbpn0bY,2966
44
- aioscrapy/dupefilters/__init__.py,sha256=17s6Hyr_lWDFPto6wLEvRfT2TbGU2RIssTDuChzrDNA,1498
44
+ aioscrapy/dupefilters/__init__.py,sha256=KPLIs9RMcl_8oD9gb3wQMRtbmRv_rTdfCp-qlekh1Qg,1684
45
45
  aioscrapy/dupefilters/disk.py,sha256=EMgxeC2a6aYCGKgp4QOs5xwHp33LUsOZ8pliKBTFx1c,1551
46
- aioscrapy/dupefilters/redis.py,sha256=cUuM68dEM1_ki2eOzZ6pAvmLZlAP_tC4lx73Ufmg_Bs,4812
46
+ aioscrapy/dupefilters/redis.py,sha256=KrI_SjH--yTNlLmJRGVp1N1BgyKsy6JtT9p02LxTWTc,6969
47
47
  aioscrapy/http/__init__.py,sha256=yeQTT5W1iwr6dKznTS5d9vnx2hsB47i9roPM57wQp_0,597
48
48
  aioscrapy/http/headers.py,sha256=H-RJ6KqOsFFFAXORfvoyz3V-ud0I8TAj5Jt5fAACcLc,1573
49
49
  aioscrapy/http/request/__init__.py,sha256=PFoFU3ncTN-gj6Rx01rjVa_744Qfv3EH29mooW6JX9U,7121
@@ -59,7 +59,7 @@ aioscrapy/libs/downloader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
59
59
  aioscrapy/libs/downloader/defaultheaders.py,sha256=tg_ULA0Y-41bZKG607mowFJQGVfnZ45LdR044DsjA_A,563
60
60
  aioscrapy/libs/downloader/downloadtimeout.py,sha256=hNh3OEj7rC0ceQrv_yrhR5lb5AvfxJ6cspj3qsQWj4o,704
61
61
  aioscrapy/libs/downloader/ja3fingerprint.py,sha256=DgTw74GXC_Bp94eD_bwoG6A_DphUHTt7bH4glBNXyV8,1058
62
- aioscrapy/libs/downloader/retry.py,sha256=nNhAqudTBhYJES1CEuzo0a-ucmS2WKcj8bOvs3PwPjw,5306
62
+ aioscrapy/libs/downloader/retry.py,sha256=0670bPz5lc4wUsWmYlhYdGZdeflsQdFhJbnwK1g0c84,4441
63
63
  aioscrapy/libs/downloader/stats.py,sha256=FlkS8Zm4j3SBjHb6caXwq08HvvZ37VKORGCAjlA2U38,1376
64
64
  aioscrapy/libs/downloader/useragent.py,sha256=E5x5dk9AxsSCGDDICJlTXwWXRkqAibWgesqG0VhAG8M,743
65
65
  aioscrapy/libs/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -71,7 +71,7 @@ aioscrapy/libs/extensions/throttle.py,sha256=yos2D3XZgH40G52kltMKv5_GeAK4MqpRwTu
71
71
  aioscrapy/libs/pipelines/__init__.py,sha256=XW5Ur6bhvGLo-w-tdUeIB4jkFpZxqUU9mbajfAAztb0,5642
72
72
  aioscrapy/libs/pipelines/csv.py,sha256=-PEZOt-3ndF0ePO7EnqjEqeCYMJR9wHv3XcpSq6QswI,2454
73
73
  aioscrapy/libs/pipelines/execl.py,sha256=a8sfgQCHUc0MIja9cPP4TZ6ghfkxYZuAzLDIK4_nQuo,6284
74
- aioscrapy/libs/pipelines/mongo.py,sha256=jiPyC3C0mNb-zlS0ecEBgl883gBtBQBFEeBR8DOcmmI,2001
74
+ aioscrapy/libs/pipelines/mongo.py,sha256=B3dhvspxc4lmPh2noqARYV-rFuHfivdSfZ7ZlPKnk7c,2323
75
75
  aioscrapy/libs/pipelines/mysql.py,sha256=gN4DnyuXTQvDvy9Gu-v8F6sT8l7GZEa45AD0d-Ckv8s,1022
76
76
  aioscrapy/libs/pipelines/pg.py,sha256=la-SflXtGFw4IQYlOn75Brw2IfmtOUcCh0gUSz_Jg-0,990
77
77
  aioscrapy/libs/spider/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -96,7 +96,7 @@ aioscrapy/scrapyd/__init__.py,sha256=Ey14RVLUP7typ2XqP8RWcUum2fuFyigdhuhBBiEheIo
96
96
  aioscrapy/scrapyd/runner.py,sha256=tewEkdNTMrBoredCbhmdrswSrF-GWsU3MLgC__ntnzQ,1777
97
97
  aioscrapy/settings/__init__.py,sha256=GuiVhezV8U2J1B-WJwSvxxeH_1YWYD_Wighr9owC4HU,15781
98
98
  aioscrapy/settings/default_settings.py,sha256=PrUOFYNnPIS8eCdqvRylMLBK-4tT-2MYuU6Nn8dQrx0,5639
99
- aioscrapy/spiders/__init__.py,sha256=KoM3RMOtvWqN5Qfh6AATTWFmj9DIVmhQqrU_EhE1EdI,4010
99
+ aioscrapy/spiders/__init__.py,sha256=oM_FzqWa46P6cjzarOO1cfDTQD2AuIPgaWZrmdMcuTI,4085
100
100
  aioscrapy/templates/project/aioscrapy.cfg,sha256=_nRHP5wtPnZaBi7wCmjWv5BgUu5NYFJZhvCTRVSipyM,112
101
101
  aioscrapy/templates/project/module/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
102
102
  aioscrapy/templates/project/module/middlewares.py.tmpl,sha256=0eEf2LC0vYcWPH82HNqieYSORyUuIo3Bgl5t-neRAJ4,3469
@@ -104,7 +104,7 @@ aioscrapy/templates/project/module/pipelines.py.tmpl,sha256=-MYA7MFAffH8FTG1VGAk
104
104
  aioscrapy/templates/project/module/settings.py.tmpl,sha256=AO2jmyokUhuhFqxMvsMihPgSY4ZrldsMs-BuOEVfvQY,1421
105
105
  aioscrapy/templates/project/module/spiders/__init__.py,sha256=Zg1uss1vaNjvld9s9Ccua50SxVZwpFTPwqpBHoCrWdU,164
106
106
  aioscrapy/templates/spiders/basic.tmpl,sha256=oO1vh7-TZLjvpwdrYC49TGe-A6Kulc8UIG4Sa0QhDfI,375
107
- aioscrapy/templates/spiders/single.tmpl,sha256=nqB7vP_L9icsxJWiSpsSSRZ1nTxHWowTjieRIcYaYIw,942
107
+ aioscrapy/templates/spiders/single.tmpl,sha256=Ptmo_uFDGEffvpEMyxec7sxIyBbP05x0Grhn5u6lZbQ,1011
108
108
  aioscrapy/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
109
109
  aioscrapy/utils/conf.py,sha256=NkSmKjOE7xVvrAWQu4ne3jOzNGucgZdWHPhGbpz8dPU,7208
110
110
  aioscrapy/utils/curl.py,sha256=I8eZWFNgvyUiJ2YS9-s3HltGNVG8XMMU0HPhlMxuxdA,3295
@@ -115,7 +115,7 @@ aioscrapy/utils/log.py,sha256=NRDivw8w21J77qEUeqqLdC4sgdIKaj2UAP6lDvWGotM,1697
115
115
  aioscrapy/utils/misc.py,sha256=9NOssEl7CP_c6R9skxyXwmz4bd-nZ_gkw6F0EybeLTQ,3509
116
116
  aioscrapy/utils/ossignal.py,sha256=jAsCIKu17KV45-9dZwEkFJHF31Y13KP_zxY0x49j1jo,896
117
117
  aioscrapy/utils/project.py,sha256=cT98HaR5JaNmm-Y1UzSuzXj6B5S7GlmMshUfMhjpjJY,2905
118
- aioscrapy/utils/python.py,sha256=fMV3Y2s7AnbQ7TChBoQodqPNzGEdVA3J89W-arwswd4,4577
118
+ aioscrapy/utils/python.py,sha256=38oD-OSjeGb3XZFJn3bt74PwGbejnBfLWC5-lkUL0g8,4462
119
119
  aioscrapy/utils/reqser.py,sha256=qjrYut6KtvGpLLd-HDM0cncNzWCtXgpH6NyERu_5A9g,487
120
120
  aioscrapy/utils/request.py,sha256=bkFaLDeebAOp7pF-7vta9LKOB2OR2s7V9jVKfA-XlqA,2418
121
121
  aioscrapy/utils/response.py,sha256=UPR1wTTAYZkLGiiIs28kJLhlF7WPrgLuW31l9LZuYKM,1341
@@ -125,9 +125,9 @@ aioscrapy/utils/template.py,sha256=HR97X4lpv2WuqhuPfzTgaBN66fYnzHVpP6zQ5IoTwcI,8
125
125
  aioscrapy/utils/tools.py,sha256=WJowViZB8XEs2CFqjVvbqXK3H5Uvf4BgWgBD_RcHMaM,2319
126
126
  aioscrapy/utils/trackref.py,sha256=0nIpelT1d5WYxALl8SGA8vHNYsh-jS0Z2lwVEAhwx8E,2019
127
127
  aioscrapy/utils/url.py,sha256=8W8tAhU7lgfPOfzKp3ejJGEcLj1i_PnA_53Jv5LpxiY,5464
128
- aio_scrapy-2.1.0.dist-info/LICENSE,sha256=L-UoAEM3fQSjKA7FVWxQM7gwSCbeue6gZRAnpRS_UCo,1088
129
- aio_scrapy-2.1.0.dist-info/METADATA,sha256=ZZlawN0H5Ngxljj10IgIy9O7RU-y_RxJ9iTEkc_YNR8,6511
130
- aio_scrapy-2.1.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
131
- aio_scrapy-2.1.0.dist-info/entry_points.txt,sha256=WWhoVHZvqhW8a5uFg97K0EP_GjG3uuCIFLkyqDICgaw,56
132
- aio_scrapy-2.1.0.dist-info/top_level.txt,sha256=8l08KyMt22wfX_5BmhrGH0PgwZdzZIPq-hBUa1GNir4,10
133
- aio_scrapy-2.1.0.dist-info/RECORD,,
128
+ aio_scrapy-2.1.2.dist-info/LICENSE,sha256=L-UoAEM3fQSjKA7FVWxQM7gwSCbeue6gZRAnpRS_UCo,1088
129
+ aio_scrapy-2.1.2.dist-info/METADATA,sha256=jsrkNFC96CZ99taB-pViCiU0SVOa4FOqBbSXziaXwNg,6506
130
+ aio_scrapy-2.1.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
131
+ aio_scrapy-2.1.2.dist-info/entry_points.txt,sha256=WWhoVHZvqhW8a5uFg97K0EP_GjG3uuCIFLkyqDICgaw,56
132
+ aio_scrapy-2.1.2.dist-info/top_level.txt,sha256=8l08KyMt22wfX_5BmhrGH0PgwZdzZIPq-hBUa1GNir4,10
133
+ aio_scrapy-2.1.2.dist-info/RECORD,,
aioscrapy/VERSION CHANGED
@@ -1 +1 @@
1
- 2.1.0
1
+ 2.1.2
@@ -138,14 +138,14 @@ class Downloader(BaseDownloader):
138
138
 
139
139
  @classmethod
140
140
  async def from_crawler(cls, crawler) -> "Downloader":
141
+ df = crawler.settings.get('DUPEFILTER_CLASS') and await load_instance(crawler.settings['DUPEFILTER_CLASS'], crawler=crawler)
142
+ crawler.spider.dupefilter = df # 将指纹绑定到Spider 在解析成功的时候 调用DUPEFILTER_CLASS的success方法
141
143
  return cls(
142
144
  crawler,
143
145
  await call_helper(DownloadHandlerManager.for_crawler, crawler),
144
146
  await call_helper(DownloaderMiddlewareManager.from_crawler, crawler),
145
- proxy=crawler.settings.get("PROXY_HANDLER") and await load_instance(crawler.settings["PROXY_HANDLER"],
146
- crawler=crawler),
147
- dupefilter=crawler.settings.get('DUPEFILTER_CLASS') and await load_instance(
148
- crawler.settings['DUPEFILTER_CLASS'], crawler=crawler)
147
+ proxy=crawler.settings.get("PROXY_HANDLER") and await load_instance(crawler.settings["PROXY_HANDLER"], crawler=crawler),
148
+ dupefilter=df
149
149
  )
150
150
 
151
151
  async def fetch(self, request: Request) -> None:
@@ -204,6 +204,7 @@ class Downloader(BaseDownloader):
204
204
  slot.transferring.remove(request)
205
205
  slot.active.remove(request)
206
206
  self.active.remove(request)
207
+ self.dupefilter and not request.dont_filter and await self.dupefilter.done(request, done_type="request_done")
207
208
  if isinstance(result, Response):
208
209
  await self.signals.send_catch_log(signal=signals.response_downloaded,
209
210
  response=result,
@@ -4,9 +4,11 @@ import ssl
4
4
  from typing import Optional
5
5
 
6
6
  import aiohttp
7
+ from aiohttp.client_exceptions import ClientError
7
8
 
8
9
  from aioscrapy import Request
9
10
  from aioscrapy.core.downloader.handlers import BaseDownloadHandler
11
+ from aioscrapy.exceptions import DownloadError
10
12
  from aioscrapy.http import HtmlResponse
11
13
  from aioscrapy.settings import Settings
12
14
  from aioscrapy.utils.log import logger
@@ -32,6 +34,12 @@ class AioHttpDownloadHandler(BaseDownloadHandler):
32
34
  return self.session
33
35
 
34
36
  async def download_request(self, request: Request, _) -> HtmlResponse:
37
+ try:
38
+ return await self._download_request(request)
39
+ except ClientError as e:
40
+ raise DownloadError from e
41
+
42
+ async def _download_request(self, request: Request) -> HtmlResponse:
35
43
  kwargs = {
36
44
  'verify_ssl': request.meta.get('verify_ssl', self.verify_ssl),
37
45
  'timeout': request.meta.get('download_timeout', 180),
@@ -1,9 +1,9 @@
1
- import ssl
2
-
1
+ from curl_cffi.curl import CurlError
3
2
  from curl_cffi.requests import AsyncSession
4
3
 
5
4
  from aioscrapy import Request
6
5
  from aioscrapy.core.downloader.handlers import BaseDownloadHandler
6
+ from aioscrapy.exceptions import DownloadError
7
7
  from aioscrapy.http import HtmlResponse
8
8
  from aioscrapy.settings import Settings
9
9
  from aioscrapy.utils.log import logger
@@ -21,6 +21,12 @@ class CurlCffiDownloadHandler(BaseDownloadHandler):
21
21
  return cls(settings)
22
22
 
23
23
  async def download_request(self, request: Request, _) -> HtmlResponse:
24
+ try:
25
+ return await self._download_request(request)
26
+ except CurlError as e:
27
+ raise DownloadError from e
28
+
29
+ async def _download_request(self, request: Request) -> HtmlResponse:
24
30
  kwargs = {
25
31
  'timeout': self.settings.get('DOWNLOAD_TIMEOUT'),
26
32
  'cookies': dict(request.cookies),
@@ -52,8 +58,8 @@ class CurlCffiDownloadHandler(BaseDownloadHandler):
52
58
  str(response.url),
53
59
  status=response.status_code,
54
60
  headers=response.headers,
55
- body=response.text,
56
- cookies=dict(response.cookies),
61
+ body=response.content,
62
+ cookies={j.name: j.value or '' for j in response.cookies.jar},
57
63
  encoding=response.encoding
58
64
  )
59
65
 
@@ -1,9 +1,11 @@
1
1
  import ssl
2
2
 
3
3
  import httpx
4
+ from httpx import HTTPError as HttpxError
4
5
 
5
6
  from aioscrapy import Request
6
7
  from aioscrapy.core.downloader.handlers import BaseDownloadHandler
8
+ from aioscrapy.exceptions import DownloadError
7
9
  from aioscrapy.http import HtmlResponse
8
10
  from aioscrapy.settings import Settings
9
11
  from aioscrapy.utils.log import logger
@@ -27,6 +29,12 @@ class HttpxDownloadHandler(BaseDownloadHandler):
27
29
  return cls(settings)
28
30
 
29
31
  async def download_request(self, request: Request, _) -> HtmlResponse:
32
+ try:
33
+ return await self._download_request(request)
34
+ except HttpxError as e:
35
+ raise DownloadError from e
36
+
37
+ async def _download_request(self, request: Request) -> HtmlResponse:
30
38
  kwargs = {
31
39
  'timeout': self.settings.get('DOWNLOAD_TIMEOUT'),
32
40
  'cookies': dict(request.cookies),
@@ -68,7 +76,7 @@ class HttpxDownloadHandler(BaseDownloadHandler):
68
76
  status=response.status_code,
69
77
  headers=response.headers,
70
78
  body=content,
71
- cookies=dict(response.cookies),
79
+ cookies={j.name: j.value or '' for j in response.cookies.jar},
72
80
  encoding=response.encoding
73
81
  )
74
82
 
@@ -1,11 +1,13 @@
1
1
  from functools import wraps
2
2
 
3
+ from playwright._impl._api_types import Error
3
4
  from playwright.async_api._generated import Response as EventResponse
4
5
 
5
- from aioscrapy import Request
6
+ from aioscrapy import Request, Spider
6
7
  from aioscrapy.core.downloader.handlers import BaseDownloadHandler
7
8
  from aioscrapy.core.downloader.handlers.playwright.driverpool import WebDriverPool
8
9
  from aioscrapy.core.downloader.handlers.playwright.webdriver import PlaywrightDriver
10
+ from aioscrapy.exceptions import DownloadError
9
11
  from aioscrapy.http import PlaywrightResponse
10
12
  from aioscrapy.settings import Settings
11
13
  from aioscrapy.utils.tools import call_helper
@@ -24,7 +26,13 @@ class PlaywrightHandler(BaseDownloadHandler):
24
26
  def from_settings(cls, settings: Settings):
25
27
  return cls(settings)
26
28
 
27
- async def download_request(self, request: Request, spider) -> PlaywrightResponse:
29
+ async def download_request(self, request: Request, spider: Spider) -> PlaywrightResponse:
30
+ try:
31
+ return await self._download_request(request, spider)
32
+ except Error as e:
33
+ raise DownloadError from e
34
+
35
+ async def _download_request(self, request: Request, spider) -> PlaywrightResponse:
28
36
  cookies = dict(request.cookies)
29
37
  timeout = request.meta.get('download_timeout', 30) * 1000
30
38
  user_agent = request.headers.get("User-Agent")
@@ -1,9 +1,11 @@
1
1
  import asyncio
2
2
 
3
3
  import pyhttpx
4
+ from pyhttpx.exception import BaseExpetion as PyHttpxError
4
5
 
5
6
  from aioscrapy import Request
6
7
  from aioscrapy.core.downloader.handlers import BaseDownloadHandler
8
+ from aioscrapy.exceptions import DownloadError
7
9
  from aioscrapy.http import HtmlResponse
8
10
  from aioscrapy.settings import Settings
9
11
  from aioscrapy.utils.log import logger
@@ -22,6 +24,12 @@ class PyhttpxDownloadHandler(BaseDownloadHandler):
22
24
  return cls(settings)
23
25
 
24
26
  async def download_request(self, request: Request, _) -> HtmlResponse:
27
+ try:
28
+ return await self._download_request(request)
29
+ except PyHttpxError as e:
30
+ raise DownloadError from e
31
+
32
+ async def _download_request(self, request: Request) -> HtmlResponse:
25
33
  kwargs = {
26
34
  'timeout': self.settings.get('DOWNLOAD_TIMEOUT'),
27
35
  'cookies': dict(request.cookies),
@@ -1,9 +1,11 @@
1
1
  import asyncio
2
2
 
3
3
  import requests
4
+ from requests.exceptions import RequestException as RequestsError
4
5
 
5
6
  from aioscrapy import Request
6
7
  from aioscrapy.core.downloader.handlers import BaseDownloadHandler
8
+ from aioscrapy.exceptions import DownloadError
7
9
  from aioscrapy.http import HtmlResponse
8
10
  from aioscrapy.settings import Settings
9
11
  from aioscrapy.utils.log import logger
@@ -21,6 +23,12 @@ class RequestsDownloadHandler(BaseDownloadHandler):
21
23
  return cls(settings)
22
24
 
23
25
  async def download_request(self, request: Request, _) -> HtmlResponse:
26
+ try:
27
+ return await self._download_request(request)
28
+ except RequestsError as e:
29
+ raise DownloadError from e
30
+
31
+ async def _download_request(self, request: Request) -> HtmlResponse:
24
32
  kwargs = {
25
33
  'timeout': self.settings.get('DOWNLOAD_TIMEOUT'),
26
34
  'cookies': dict(request.cookies),
@@ -48,7 +56,7 @@ class RequestsDownloadHandler(BaseDownloadHandler):
48
56
  status=response.status_code,
49
57
  headers=response.headers,
50
58
  body=response.content,
51
- cookies=dict(response.cookies),
59
+ cookies={k: v or '' for k, v in response.cookies.items()},
52
60
  encoding=response.encoding
53
61
  )
54
62
 
@@ -191,7 +191,6 @@ class Scheduler(BaseScheduler):
191
191
  self.stats.inc_value(self.queue.inc_key, spider=self.spider)
192
192
  yield request
193
193
 
194
-
195
194
  async def has_pending_requests(self) -> bool:
196
195
  return await call_helper(self.queue.len) if self.cache_queue is None \
197
196
  else (await call_helper(self.queue.len) + await call_helper(self.cache_queue.len)) > 0
aioscrapy/core/scraper.py CHANGED
@@ -110,8 +110,8 @@ class Scraper:
110
110
  await self.handle_spider_error(e, request, result)
111
111
  else:
112
112
  await self.handle_spider_output(output, request, result)
113
- except BaseException:
114
- logger.exception('Scraper bug processing %(request)s' % {'request': request})
113
+ except BaseException as e:
114
+ await self.handle_spider_error(e, request, result)
115
115
  finally:
116
116
  if isinstance(result, PlaywrightResponse):
117
117
  await result.release()
@@ -161,17 +161,23 @@ class Scraper:
161
161
  """Iter each Request/Item (given in the output parameter) returned from the given spider"""
162
162
  if not result:
163
163
  return
164
-
164
+ parser_successful = True
165
165
  while True:
166
166
  try:
167
167
  output = await result.__anext__()
168
168
  except StopAsyncIteration:
169
169
  break
170
170
  except Exception as e:
171
+ parser_successful = False
171
172
  await self.handle_spider_error(e, request, response)
172
173
  else:
173
174
  await self._process_spidermw_output(output, request, response)
174
175
 
176
+ self.spider.dupefilter and \
177
+ not request.dont_filter and \
178
+ parser_successful and \
179
+ await self.spider.dupefilter.done(request, done_type="parse_done")
180
+
175
181
  async def _process_spidermw_output(self, output: Any, request: Request, response: Response) -> None:
176
182
  """Process each Request/Item (given in the output parameter) returned from the given spider"""
177
183
 
aioscrapy/db/aiomongo.py CHANGED
@@ -1,7 +1,9 @@
1
1
  from motor.motor_asyncio import AsyncIOMotorClient
2
+ from pymongo.errors import NetworkTimeout
2
3
 
3
4
  import aioscrapy
4
5
  from aioscrapy.db.absmanager import AbsDBPoolManager
6
+ from loguru import logger
5
7
 
6
8
 
7
9
  class MongoExecutor:
@@ -9,10 +11,16 @@ class MongoExecutor:
9
11
  self.alias = alias
10
12
  self.pool_manager = pool_manager
11
13
 
12
- async def insert(self, table_name, values, db_name=None):
14
+ async def insert(self, table_name, values, db_name=None, ordered=False, retry_times=3):
13
15
  client, db_name_default = self.pool_manager.get_pool(self.alias)
14
16
  db_name = db_name or db_name_default
15
- return await client[f'{db_name}'][f'{table_name}'].insert_many(values)
17
+ for _ in range(retry_times):
18
+ try:
19
+ return await client[f'{db_name}'][f'{table_name}'].insert_many(values, ordered=ordered)
20
+ except NetworkTimeout:
21
+ logger.warning("mongo insert error by NetworkTimeout, retrying...")
22
+
23
+ raise NetworkTimeout
16
24
 
17
25
  def __getattr__(self, table_name: str):
18
26
  client, db_name_default = self.pool_manager.get_pool(self.alias)
@@ -1,3 +1,4 @@
1
+ from typing import Literal
1
2
  from abc import ABCMeta, abstractmethod
2
3
 
3
4
  from aioscrapy import Request, Spider
@@ -37,3 +38,6 @@ class DupeFilterBase(metaclass=ABCMeta):
37
38
  self.logdupes = False
38
39
 
39
40
  spider.crawler.stats.inc_value('dupefilter/filtered', spider=spider)
41
+
42
+ async def done(self, request: Request, done_type: Literal["request_done", "parse_done"]) -> None:
43
+ """ deal fingerprint on task successful """
@@ -1,3 +1,5 @@
1
+ from typing import Literal
2
+
1
3
  from aioscrapy import Request
2
4
  from aioscrapy.db import db_manager
3
5
  from aioscrapy.dupefilters import DupeFilterBase
@@ -128,5 +130,50 @@ class RedisBloomDupeFilter(RedisRFPDupeFilter):
128
130
  return False
129
131
 
130
132
 
133
+ class RedisBloomSetDupeFilter(RedisBloomDupeFilter):
134
+
135
+ def __init__(self, server, key, key_set, ttl, debug, bit, hash_number, keep_on_close, info):
136
+ super().__init__(server, key, debug, bit, hash_number, keep_on_close, info)
137
+ self.key_set = key_set
138
+ self.ttl = ttl
139
+
140
+ @classmethod
141
+ async def from_crawler(cls, crawler: "aioscrapy.crawler.Crawler"):
142
+ server = db_manager.redis.queue
143
+ dupefilter_key = crawler.settings.get("SCHEDULER_DUPEFILTER_KEY", '%(spider)s:bloomfilter')
144
+ keep_on_close = crawler.settings.getbool("KEEP_DUPEFILTER_DATA_ON_CLOSE", True)
145
+ key = dupefilter_key % {'spider': crawler.spider.name}
146
+ debug = crawler.settings.getbool('DUPEFILTER_DEBUG', False)
147
+ info = crawler.settings.getbool('DUPEFILTER_INFO', False)
148
+ bit = crawler.settings.getint('BLOOMFILTER_BIT', 30)
149
+ hash_number = crawler.settings.getint('BLOOMFILTER_HASH_NUMBER', 6)
150
+ ttl = crawler.settings.getint('DUPEFILTER_SET_KEY_TTL', 180)
151
+ return cls(server, key=key, key_set=key + "_set", ttl=ttl, debug=debug, bit=bit, hash_number=hash_number,
152
+ keep_on_close=keep_on_close, info=info)
153
+
154
+ async def request_seen(self, request: Request) -> bool:
155
+ fp = await self.bf.exists(request.fingerprint)
156
+ if fp:
157
+ return True
158
+ async with self.server.pipeline() as pipe:
159
+ pipe.sadd(self.key_set, request.fingerprint)
160
+ pipe.expire(self.key_set, self.ttl)
161
+ ret, _ = await pipe.execute()
162
+ return ret == 0
163
+
164
+ async def done(self, request: Request, done_type: Literal["request_done", "parse_done"]):
165
+ print(done_type)
166
+ if done_type == "request_done":
167
+ await self.server.srem(self.key_set, request.fingerprint)
168
+ elif done_type == "parse_done":
169
+ await self.bf.insert(request.fingerprint)
170
+
171
+ async def close(self, reason=''):
172
+ if not self.keep_on_close:
173
+ await self.clear()
174
+ await self.server.delete(self.key_set)
175
+
176
+
131
177
  RFPDupeFilter = RedisRFPDupeFilter
132
178
  BloomDupeFilter = RedisBloomDupeFilter
179
+ BloomSetDupeFilter = RedisBloomSetDupeFilter
aioscrapy/exceptions.py CHANGED
@@ -91,3 +91,8 @@ class ContractFail(AssertionError):
91
91
 
92
92
  class ProxyException(Exception):
93
93
  pass
94
+
95
+
96
+ class DownloadError(Exception):
97
+ """下载页面时发生的错误"""
98
+ pass
@@ -10,70 +10,21 @@ Failed pages are collected on the scraping process and rescheduled at the end,
10
10
  once the spider has finished crawling all regular (non failed) pages.
11
11
  """
12
12
  from typing import Optional, Union
13
- from aioscrapy.exceptions import ProxyException
13
+
14
+ from anyio import EndOfStream
14
15
 
15
16
  try:
16
17
  from asyncio.exceptions import TimeoutError
17
18
  except:
18
19
  from concurrent.futures._base import TimeoutError
19
20
 
20
- NEED_RETRY_ERROR = (TimeoutError, ConnectionRefusedError, IOError, ProxyException)
21
-
22
- try:
23
- from aiohttp.client_exceptions import ClientError
24
-
25
- NEED_RETRY_ERROR += (ClientError,)
26
- except ImportError:
27
- pass
28
-
29
- try:
30
- from anyio import EndOfStream
31
-
32
- NEED_RETRY_ERROR += (EndOfStream,)
33
- except ImportError:
34
- pass
35
-
36
- try:
37
- from httpx import HTTPError as HttpxError
38
-
39
- NEED_RETRY_ERROR += (HttpxError,)
40
- except ImportError:
41
- pass
42
-
43
- try:
44
- from pyhttpx.exception import BaseExpetion as PyHttpxError
45
-
46
- NEED_RETRY_ERROR += (PyHttpxError,)
47
- except ImportError:
48
- pass
49
-
50
- try:
51
- from requests.exceptions import RequestException as RequestsError
52
-
53
- NEED_RETRY_ERROR += (RequestsError,)
54
- except ImportError:
55
- pass
56
-
57
- try:
58
- from playwright._impl._api_types import Error as PlaywrightError
59
-
60
- NEED_RETRY_ERROR += (PlaywrightError,)
61
- except ImportError:
62
- pass
63
-
64
-
65
- try:
66
- from curl_cffi.curl import CurlError
67
-
68
- NEED_RETRY_ERROR += (CurlError,)
69
- except ImportError:
70
- pass
71
-
72
- from aioscrapy.exceptions import NotConfigured
21
+ from aioscrapy.exceptions import ProxyException, DownloadError, NotConfigured
73
22
  from aioscrapy.http.request import Request
74
23
  from aioscrapy.spiders import Spider
75
- from aioscrapy.utils.python import global_object_name
76
24
  from aioscrapy.utils.log import logger as retry_logger
25
+ from aioscrapy.utils.python import global_object_name
26
+
27
+ NEED_RETRY_ERROR = (TimeoutError, ConnectionRefusedError, IOError, ProxyException, DownloadError, EndOfStream)
77
28
 
78
29
 
79
30
  def get_retry_request(
@@ -9,6 +9,8 @@ class MongoPipeline(DBPipelineBase):
9
9
  def __init__(self, settings, db_type: str):
10
10
  super().__init__(settings, db_type)
11
11
  self.db_cache = {}
12
+ self.ordered_cache = {}
13
+ self.retry_times = settings.getint("MONGO_TIMEOUT_RETRY_TIMES", 3)
12
14
 
13
15
  @classmethod
14
16
  def from_settings(cls, settings):
@@ -17,17 +19,19 @@ class MongoPipeline(DBPipelineBase):
17
19
  def parse_item_to_cache(self, item: dict, save_info: dict):
18
20
  db_name = save_info.get('db_name')
19
21
  table_name = save_info.get('table_name')
22
+ ordered = save_info.get('ordered', False)
20
23
  assert table_name is not None, 'please set table_name'
21
24
  db_alias = save_info.get('db_alias', ['default'])
22
25
  if isinstance(db_alias, str):
23
26
  db_alias = [db_alias]
24
27
 
25
- cache_key = ''.join(db_alias) + (db_name or '') + table_name
28
+ cache_key = ''.join(db_alias) + (db_name or '') + table_name + str(ordered)
26
29
 
27
30
  if self.table_cache.get(cache_key) is None:
28
31
  self.db_alias_cache[cache_key] = db_alias
29
32
  self.table_cache[cache_key] = table_name
30
33
  self.db_cache[cache_key] = db_name
34
+ self.ordered_cache[cache_key] = ordered
31
35
  self.item_cache[cache_key] = []
32
36
 
33
37
  self.item_cache[cache_key].append(item)
@@ -40,7 +44,8 @@ class MongoPipeline(DBPipelineBase):
40
44
  try:
41
45
  executor = db_manager.mongo.executor(alias)
42
46
  result = await executor.insert(
43
- table_name, self.item_cache[cache_key], db_name=self.db_cache[cache_key]
47
+ table_name, self.item_cache[cache_key], db_name=self.db_cache[cache_key],
48
+ ordered=self.ordered_cache[cache_key], retry_times=self.retry_times
44
49
  )
45
50
  logger.info(
46
51
  f'table:{alias}->{table_name} sum:{len(self.item_cache[cache_key])} ok:{len(result.inserted_ids)}'
@@ -22,6 +22,7 @@ class Spider(object):
22
22
 
23
23
  name: Optional[str] = None
24
24
  proxy: Optional["aioscrapy.proxy.AbsProxy"] = None
25
+ dupefilter: Optional["aioscrapy.dupefilters.DupeFilterBase"] = None
25
26
  custom_settings: Optional[dict] = None
26
27
  stats: Optional[StatsCollector] = None
27
28
 
@@ -77,7 +78,7 @@ class Spider(object):
77
78
  yield Request(url)
78
79
 
79
80
  async def request_from_dict(self, d: dict):
80
- """集成后重写改方法,将队列中的json根据情况构建成Request对象"""
81
+ """继承成后重写改方法,将队列中的json根据情况构建成Request对象"""
81
82
  pass
82
83
 
83
84
  async def _parse(self, response: Response, **kwargs):
@@ -24,11 +24,12 @@ class $classname(Spider):
24
24
  pass
25
25
 
26
26
  async def parse(self, response):
27
- item = {
28
- 'author': quote.xpath('span/small/text()').get(),
29
- 'text': quote.css('span.text::text').get(),
30
- }
31
- yield item
27
+ for quote in response.css('div.quote'):
28
+ item = {
29
+ 'author': quote.xpath('span/small/text()').get(),
30
+ 'text': quote.css('span.text::text').get(),
31
+ }
32
+ yield item
32
33
 
33
34
  async def process_item(self, item):
34
35
  logger.info(item)
aioscrapy/utils/python.py CHANGED
@@ -1,16 +1,12 @@
1
1
  """
2
2
  This module contains essential stuff that should've come with Python itself ;)
3
3
  """
4
- import errno
5
4
  import gc
6
- import inspect
7
5
  import re
8
6
  import sys
9
- import warnings
10
7
  import weakref
11
- from functools import partial, wraps
8
+ from functools import wraps
12
9
 
13
- from aioscrapy.exceptions import AioScrapyDeprecationWarning
14
10
  from aioscrapy.utils.decorators import deprecated
15
11
 
16
12
 
@@ -150,4 +146,3 @@ if hasattr(sys, "pypy_version_info"):
150
146
  else:
151
147
  def garbage_collect():
152
148
  gc.collect()
153
-