crawlo 1.0.9__py3-none-any.whl → 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +33 -24
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -106
- crawlo/commands/genspider.py +125 -110
- crawlo/commands/list.py +147 -92
- crawlo/commands/run.py +286 -181
- crawlo/commands/startproject.py +111 -101
- crawlo/commands/stats.py +188 -59
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +158 -158
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +57 -57
- crawlo/crawler.py +494 -492
- crawlo/downloader/__init__.py +78 -78
- crawlo/downloader/aiohttp_downloader.py +199 -199
- crawlo/downloader/cffi_downloader.py +242 -277
- crawlo/downloader/httpx_downloader.py +246 -246
- crawlo/event.py +11 -11
- crawlo/exceptions.py +78 -78
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +34 -34
- crawlo/filters/__init__.py +37 -37
- crawlo/filters/aioredis_filter.py +150 -150
- crawlo/filters/memory_filter.py +202 -202
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +245 -245
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +90 -90
- crawlo/network/__init__.py +7 -7
- crawlo/network/request.py +203 -203
- crawlo/network/response.py +166 -166
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_batch_pipline.py +272 -272
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/project.py +153 -0
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +166 -168
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +129 -129
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +27 -27
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +75 -75
- crawlo/templates/project/pipelines.py.tmpl +63 -63
- crawlo/templates/project/settings.py.tmpl +54 -54
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +31 -31
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/log.py +128 -128
- crawlo/utils/pqueue.py +173 -173
- crawlo/utils/request.py +267 -267
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.1.dist-info/METADATA +220 -0
- crawlo-1.1.1.dist-info/RECORD +100 -0
- examples/__init__.py +7 -0
- examples/baidu_spider/__init__.py +7 -0
- examples/baidu_spider/demo.py +94 -0
- examples/baidu_spider/items.py +46 -0
- examples/baidu_spider/middleware.py +49 -0
- examples/baidu_spider/pipeline.py +55 -0
- examples/baidu_spider/run.py +27 -0
- examples/baidu_spider/settings.py +121 -0
- examples/baidu_spider/spiders/__init__.py +7 -0
- examples/baidu_spider/spiders/bai_du.py +61 -0
- examples/baidu_spider/spiders/miit.py +159 -0
- examples/baidu_spider/spiders/sina.py +79 -0
- tests/__init__.py +7 -7
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- crawlo/utils/concurrency_manager.py +0 -125
- crawlo/utils/project.py +0 -197
- crawlo-1.0.9.dist-info/METADATA +0 -49
- crawlo-1.0.9.dist-info/RECORD +0 -97
- examples/gxb/__init__.py +0 -0
- examples/gxb/items.py +0 -36
- examples/gxb/run.py +0 -16
- examples/gxb/settings.py +0 -72
- examples/gxb/spider/__init__.py +0 -0
- examples/gxb/spider/miit_spider.py +0 -180
- examples/gxb/spider/telecom_device.py +0 -129
- {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
- {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
- {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
|
@@ -1,150 +1,150 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
import aioredis
|
|
4
|
-
from typing import Optional
|
|
5
|
-
from crawlo import Request
|
|
6
|
-
from crawlo.filters import BaseFilter
|
|
7
|
-
from crawlo.utils.log import get_logger
|
|
8
|
-
from crawlo.utils.request import request_fingerprint
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class AioRedisFilter(BaseFilter):
|
|
12
|
-
"""基于Redis集合实现的异步请求去重过滤器(支持分布式爬虫),提供TTL和清理控制"""
|
|
13
|
-
|
|
14
|
-
def __init__(
|
|
15
|
-
self,
|
|
16
|
-
redis_key: str,
|
|
17
|
-
client: aioredis.Redis,
|
|
18
|
-
stats: dict,
|
|
19
|
-
debug: bool,
|
|
20
|
-
log_level: str,
|
|
21
|
-
cleanup_fp: bool = False,
|
|
22
|
-
ttl: Optional[int] = None
|
|
23
|
-
):
|
|
24
|
-
"""初始化过滤器"""
|
|
25
|
-
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
26
|
-
super().__init__(self.logger, stats, debug)
|
|
27
|
-
|
|
28
|
-
self.redis_key = redis_key
|
|
29
|
-
self.redis = client
|
|
30
|
-
self.cleanup_fp = cleanup_fp
|
|
31
|
-
self.ttl = ttl
|
|
32
|
-
|
|
33
|
-
@classmethod
|
|
34
|
-
def create_instance(cls, crawler) -> 'BaseFilter':
|
|
35
|
-
"""从爬虫配置创建过滤器实例"""
|
|
36
|
-
redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
|
|
37
|
-
decode_responses = crawler.settings.get_bool('DECODE_RESPONSES', False)
|
|
38
|
-
ttl_setting = crawler.settings.get_int('REDIS_TTL')
|
|
39
|
-
|
|
40
|
-
# 处理TTL设置
|
|
41
|
-
ttl = None
|
|
42
|
-
if ttl_setting is not None:
|
|
43
|
-
ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
|
|
44
|
-
|
|
45
|
-
try:
|
|
46
|
-
redis_client = aioredis.from_url(
|
|
47
|
-
redis_url,
|
|
48
|
-
decode_responses=decode_responses,
|
|
49
|
-
max_connections=20,
|
|
50
|
-
encoding='utf-8'
|
|
51
|
-
)
|
|
52
|
-
except Exception as e:
|
|
53
|
-
raise RuntimeError(f"Redis连接失败: {redis_url} - {str(e)}")
|
|
54
|
-
|
|
55
|
-
return cls(
|
|
56
|
-
redis_key=f"{crawler.settings.get('PROJECT_NAME', 'default')}:{crawler.settings.get('REDIS_KEY', 'request_fingerprints')}",
|
|
57
|
-
client=redis_client,
|
|
58
|
-
stats=crawler.stats,
|
|
59
|
-
cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
|
|
60
|
-
ttl=ttl,
|
|
61
|
-
debug=crawler.settings.get_bool('FILTER_DEBUG', False),
|
|
62
|
-
log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
|
|
63
|
-
)
|
|
64
|
-
|
|
65
|
-
async def requested(self, request: Request) -> bool:
|
|
66
|
-
"""检查请求是否已存在"""
|
|
67
|
-
try:
|
|
68
|
-
fp = str(request_fingerprint(request))
|
|
69
|
-
|
|
70
|
-
# 1. 检查指纹是否存在
|
|
71
|
-
pipe = self.redis.pipeline()
|
|
72
|
-
pipe.sismember(self.redis_key, fp) # 不单独 await
|
|
73
|
-
exists = (await pipe.execute())[0] # 执行并获取结果
|
|
74
|
-
|
|
75
|
-
if exists: # 如果已存在,返回 True
|
|
76
|
-
return True
|
|
77
|
-
|
|
78
|
-
# 2. 如果不存在,添加指纹并设置 TTL
|
|
79
|
-
pipe = self.redis.pipeline()
|
|
80
|
-
pipe.sadd(self.redis_key, fp) # 不单独 await
|
|
81
|
-
if self.ttl and self.ttl > 0:
|
|
82
|
-
pipe.expire(self.redis_key, self.ttl) # 不单独 await
|
|
83
|
-
await pipe.execute() # 一次性执行所有命令
|
|
84
|
-
|
|
85
|
-
return False # 表示是新请求
|
|
86
|
-
|
|
87
|
-
except Exception as e:
|
|
88
|
-
self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')}")
|
|
89
|
-
raise
|
|
90
|
-
|
|
91
|
-
async def add_fingerprint(self, fp: str) -> bool:
|
|
92
|
-
"""添加新指纹到Redis集合"""
|
|
93
|
-
try:
|
|
94
|
-
fp = str(fp)
|
|
95
|
-
added = await self.redis.sadd(self.redis_key, fp)
|
|
96
|
-
|
|
97
|
-
if self.ttl and self.ttl > 0:
|
|
98
|
-
await self.redis.expire(self.redis_key, self.ttl)
|
|
99
|
-
|
|
100
|
-
return added == 1
|
|
101
|
-
except Exception as e:
|
|
102
|
-
self.logger.error("添加指纹失败")
|
|
103
|
-
raise
|
|
104
|
-
|
|
105
|
-
async def get_stats(self) -> dict:
|
|
106
|
-
"""获取过滤器统计信息"""
|
|
107
|
-
try:
|
|
108
|
-
count = await self.redis.scard(self.redis_key)
|
|
109
|
-
stats = {
|
|
110
|
-
'指纹总数': count,
|
|
111
|
-
'Redis键名': self.redis_key,
|
|
112
|
-
'TTL配置': f"{self.ttl}秒" if self.ttl else "持久化"
|
|
113
|
-
}
|
|
114
|
-
stats.update(self.stats)
|
|
115
|
-
return stats
|
|
116
|
-
except Exception as e:
|
|
117
|
-
self.logger.error("获取统计信息失败")
|
|
118
|
-
return self.stats
|
|
119
|
-
|
|
120
|
-
async def clear_all(self) -> int:
|
|
121
|
-
"""清空所有指纹数据"""
|
|
122
|
-
try:
|
|
123
|
-
deleted = await self.redis.delete(self.redis_key)
|
|
124
|
-
self.logger.info(f"已清除指纹数: {deleted}")
|
|
125
|
-
return deleted
|
|
126
|
-
except Exception as e:
|
|
127
|
-
self.logger.error("清空指纹失败")
|
|
128
|
-
raise
|
|
129
|
-
|
|
130
|
-
async def closed(self, reason: Optional[str] = None) -> None:
|
|
131
|
-
"""爬虫关闭时的清理操作"""
|
|
132
|
-
try:
|
|
133
|
-
if self.cleanup_fp:
|
|
134
|
-
deleted = await self.redis.delete(self.redis_key)
|
|
135
|
-
self.logger.info(f"爬虫关闭清理: 已删除{deleted}个指纹")
|
|
136
|
-
else:
|
|
137
|
-
count = await self.redis.scard(self.redis_key)
|
|
138
|
-
ttl_info = f"{self.ttl}秒" if self.ttl else "持久化"
|
|
139
|
-
self.logger.info(f"保留指纹数: {count} (TTL: {ttl_info})")
|
|
140
|
-
finally:
|
|
141
|
-
await self._close_redis()
|
|
142
|
-
|
|
143
|
-
async def _close_redis(self) -> None:
|
|
144
|
-
"""安全关闭Redis连接"""
|
|
145
|
-
try:
|
|
146
|
-
if hasattr(self.redis, 'close'):
|
|
147
|
-
await self.redis.close()
|
|
148
|
-
self.logger.debug("Redis连接已关闭")
|
|
149
|
-
except Exception as e:
|
|
150
|
-
self.logger.warning(f"Redis关闭时出错:{e}")
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
import aioredis
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from crawlo import Request
|
|
6
|
+
from crawlo.filters import BaseFilter
|
|
7
|
+
from crawlo.utils.log import get_logger
|
|
8
|
+
from crawlo.utils.request import request_fingerprint
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AioRedisFilter(BaseFilter):
|
|
12
|
+
"""基于Redis集合实现的异步请求去重过滤器(支持分布式爬虫),提供TTL和清理控制"""
|
|
13
|
+
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
redis_key: str,
|
|
17
|
+
client: aioredis.Redis,
|
|
18
|
+
stats: dict,
|
|
19
|
+
debug: bool,
|
|
20
|
+
log_level: str,
|
|
21
|
+
cleanup_fp: bool = False,
|
|
22
|
+
ttl: Optional[int] = None
|
|
23
|
+
):
|
|
24
|
+
"""初始化过滤器"""
|
|
25
|
+
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
26
|
+
super().__init__(self.logger, stats, debug)
|
|
27
|
+
|
|
28
|
+
self.redis_key = redis_key
|
|
29
|
+
self.redis = client
|
|
30
|
+
self.cleanup_fp = cleanup_fp
|
|
31
|
+
self.ttl = ttl
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def create_instance(cls, crawler) -> 'BaseFilter':
|
|
35
|
+
"""从爬虫配置创建过滤器实例"""
|
|
36
|
+
redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
|
|
37
|
+
decode_responses = crawler.settings.get_bool('DECODE_RESPONSES', False)
|
|
38
|
+
ttl_setting = crawler.settings.get_int('REDIS_TTL')
|
|
39
|
+
|
|
40
|
+
# 处理TTL设置
|
|
41
|
+
ttl = None
|
|
42
|
+
if ttl_setting is not None:
|
|
43
|
+
ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
redis_client = aioredis.from_url(
|
|
47
|
+
redis_url,
|
|
48
|
+
decode_responses=decode_responses,
|
|
49
|
+
max_connections=20,
|
|
50
|
+
encoding='utf-8'
|
|
51
|
+
)
|
|
52
|
+
except Exception as e:
|
|
53
|
+
raise RuntimeError(f"Redis连接失败: {redis_url} - {str(e)}")
|
|
54
|
+
|
|
55
|
+
return cls(
|
|
56
|
+
redis_key=f"{crawler.settings.get('PROJECT_NAME', 'default')}:{crawler.settings.get('REDIS_KEY', 'request_fingerprints')}",
|
|
57
|
+
client=redis_client,
|
|
58
|
+
stats=crawler.stats,
|
|
59
|
+
cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
|
|
60
|
+
ttl=ttl,
|
|
61
|
+
debug=crawler.settings.get_bool('FILTER_DEBUG', False),
|
|
62
|
+
log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
async def requested(self, request: Request) -> bool:
|
|
66
|
+
"""检查请求是否已存在"""
|
|
67
|
+
try:
|
|
68
|
+
fp = str(request_fingerprint(request))
|
|
69
|
+
|
|
70
|
+
# 1. 检查指纹是否存在
|
|
71
|
+
pipe = self.redis.pipeline()
|
|
72
|
+
pipe.sismember(self.redis_key, fp) # 不单独 await
|
|
73
|
+
exists = (await pipe.execute())[0] # 执行并获取结果
|
|
74
|
+
|
|
75
|
+
if exists: # 如果已存在,返回 True
|
|
76
|
+
return True
|
|
77
|
+
|
|
78
|
+
# 2. 如果不存在,添加指纹并设置 TTL
|
|
79
|
+
pipe = self.redis.pipeline()
|
|
80
|
+
pipe.sadd(self.redis_key, fp) # 不单独 await
|
|
81
|
+
if self.ttl and self.ttl > 0:
|
|
82
|
+
pipe.expire(self.redis_key, self.ttl) # 不单独 await
|
|
83
|
+
await pipe.execute() # 一次性执行所有命令
|
|
84
|
+
|
|
85
|
+
return False # 表示是新请求
|
|
86
|
+
|
|
87
|
+
except Exception as e:
|
|
88
|
+
self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')}")
|
|
89
|
+
raise
|
|
90
|
+
|
|
91
|
+
async def add_fingerprint(self, fp: str) -> bool:
|
|
92
|
+
"""添加新指纹到Redis集合"""
|
|
93
|
+
try:
|
|
94
|
+
fp = str(fp)
|
|
95
|
+
added = await self.redis.sadd(self.redis_key, fp)
|
|
96
|
+
|
|
97
|
+
if self.ttl and self.ttl > 0:
|
|
98
|
+
await self.redis.expire(self.redis_key, self.ttl)
|
|
99
|
+
|
|
100
|
+
return added == 1
|
|
101
|
+
except Exception as e:
|
|
102
|
+
self.logger.error("添加指纹失败")
|
|
103
|
+
raise
|
|
104
|
+
|
|
105
|
+
async def get_stats(self) -> dict:
|
|
106
|
+
"""获取过滤器统计信息"""
|
|
107
|
+
try:
|
|
108
|
+
count = await self.redis.scard(self.redis_key)
|
|
109
|
+
stats = {
|
|
110
|
+
'指纹总数': count,
|
|
111
|
+
'Redis键名': self.redis_key,
|
|
112
|
+
'TTL配置': f"{self.ttl}秒" if self.ttl else "持久化"
|
|
113
|
+
}
|
|
114
|
+
stats.update(self.stats)
|
|
115
|
+
return stats
|
|
116
|
+
except Exception as e:
|
|
117
|
+
self.logger.error("获取统计信息失败")
|
|
118
|
+
return self.stats
|
|
119
|
+
|
|
120
|
+
async def clear_all(self) -> int:
|
|
121
|
+
"""清空所有指纹数据"""
|
|
122
|
+
try:
|
|
123
|
+
deleted = await self.redis.delete(self.redis_key)
|
|
124
|
+
self.logger.info(f"已清除指纹数: {deleted}")
|
|
125
|
+
return deleted
|
|
126
|
+
except Exception as e:
|
|
127
|
+
self.logger.error("清空指纹失败")
|
|
128
|
+
raise
|
|
129
|
+
|
|
130
|
+
async def closed(self, reason: Optional[str] = None) -> None:
|
|
131
|
+
"""爬虫关闭时的清理操作"""
|
|
132
|
+
try:
|
|
133
|
+
if self.cleanup_fp:
|
|
134
|
+
deleted = await self.redis.delete(self.redis_key)
|
|
135
|
+
self.logger.info(f"爬虫关闭清理: 已删除{deleted}个指纹")
|
|
136
|
+
else:
|
|
137
|
+
count = await self.redis.scard(self.redis_key)
|
|
138
|
+
ttl_info = f"{self.ttl}秒" if self.ttl else "持久化"
|
|
139
|
+
self.logger.info(f"保留指纹数: {count} (TTL: {ttl_info})")
|
|
140
|
+
finally:
|
|
141
|
+
await self._close_redis()
|
|
142
|
+
|
|
143
|
+
async def _close_redis(self) -> None:
|
|
144
|
+
"""安全关闭Redis连接"""
|
|
145
|
+
try:
|
|
146
|
+
if hasattr(self.redis, 'close'):
|
|
147
|
+
await self.redis.close()
|
|
148
|
+
self.logger.debug("Redis连接已关闭")
|
|
149
|
+
except Exception as e:
|
|
150
|
+
self.logger.warning(f"Redis关闭时出错:{e}")
|