crawlo 1.0.9__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (111) hide show
  1. crawlo/__init__.py +33 -24
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -106
  6. crawlo/commands/genspider.py +125 -110
  7. crawlo/commands/list.py +147 -92
  8. crawlo/commands/run.py +286 -181
  9. crawlo/commands/startproject.py +111 -101
  10. crawlo/commands/stats.py +188 -59
  11. crawlo/core/__init__.py +2 -2
  12. crawlo/core/engine.py +158 -158
  13. crawlo/core/processor.py +40 -40
  14. crawlo/core/scheduler.py +57 -57
  15. crawlo/crawler.py +494 -492
  16. crawlo/downloader/__init__.py +78 -78
  17. crawlo/downloader/aiohttp_downloader.py +199 -199
  18. crawlo/downloader/cffi_downloader.py +242 -277
  19. crawlo/downloader/httpx_downloader.py +246 -246
  20. crawlo/event.py +11 -11
  21. crawlo/exceptions.py +78 -78
  22. crawlo/extension/__init__.py +31 -31
  23. crawlo/extension/log_interval.py +49 -49
  24. crawlo/extension/log_stats.py +44 -44
  25. crawlo/extension/logging_extension.py +34 -34
  26. crawlo/filters/__init__.py +37 -37
  27. crawlo/filters/aioredis_filter.py +150 -150
  28. crawlo/filters/memory_filter.py +202 -202
  29. crawlo/items/__init__.py +23 -23
  30. crawlo/items/base.py +21 -21
  31. crawlo/items/fields.py +53 -53
  32. crawlo/items/items.py +104 -104
  33. crawlo/middleware/__init__.py +21 -21
  34. crawlo/middleware/default_header.py +32 -32
  35. crawlo/middleware/download_delay.py +28 -28
  36. crawlo/middleware/middleware_manager.py +135 -135
  37. crawlo/middleware/proxy.py +245 -245
  38. crawlo/middleware/request_ignore.py +30 -30
  39. crawlo/middleware/response_code.py +18 -18
  40. crawlo/middleware/response_filter.py +26 -26
  41. crawlo/middleware/retry.py +90 -90
  42. crawlo/network/__init__.py +7 -7
  43. crawlo/network/request.py +203 -203
  44. crawlo/network/response.py +166 -166
  45. crawlo/pipelines/__init__.py +13 -13
  46. crawlo/pipelines/console_pipeline.py +39 -39
  47. crawlo/pipelines/mongo_pipeline.py +116 -116
  48. crawlo/pipelines/mysql_batch_pipline.py +272 -272
  49. crawlo/pipelines/mysql_pipeline.py +195 -195
  50. crawlo/pipelines/pipeline_manager.py +56 -56
  51. crawlo/project.py +153 -0
  52. crawlo/settings/__init__.py +7 -7
  53. crawlo/settings/default_settings.py +166 -168
  54. crawlo/settings/setting_manager.py +99 -99
  55. crawlo/spider/__init__.py +129 -129
  56. crawlo/stats_collector.py +59 -59
  57. crawlo/subscriber.py +106 -106
  58. crawlo/task_manager.py +27 -27
  59. crawlo/templates/crawlo.cfg.tmpl +10 -10
  60. crawlo/templates/project/__init__.py.tmpl +3 -3
  61. crawlo/templates/project/items.py.tmpl +17 -17
  62. crawlo/templates/project/middlewares.py.tmpl +75 -75
  63. crawlo/templates/project/pipelines.py.tmpl +63 -63
  64. crawlo/templates/project/settings.py.tmpl +54 -54
  65. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  66. crawlo/templates/spider/spider.py.tmpl +31 -31
  67. crawlo/utils/__init__.py +7 -7
  68. crawlo/utils/date_tools.py +233 -233
  69. crawlo/utils/db_helper.py +343 -343
  70. crawlo/utils/func_tools.py +82 -82
  71. crawlo/utils/log.py +128 -128
  72. crawlo/utils/pqueue.py +173 -173
  73. crawlo/utils/request.py +267 -267
  74. crawlo/utils/spider_loader.py +62 -62
  75. crawlo/utils/system.py +11 -11
  76. crawlo/utils/tools.py +4 -4
  77. crawlo/utils/url.py +39 -39
  78. crawlo-1.1.1.dist-info/METADATA +220 -0
  79. crawlo-1.1.1.dist-info/RECORD +100 -0
  80. examples/__init__.py +7 -0
  81. examples/baidu_spider/__init__.py +7 -0
  82. examples/baidu_spider/demo.py +94 -0
  83. examples/baidu_spider/items.py +46 -0
  84. examples/baidu_spider/middleware.py +49 -0
  85. examples/baidu_spider/pipeline.py +55 -0
  86. examples/baidu_spider/run.py +27 -0
  87. examples/baidu_spider/settings.py +121 -0
  88. examples/baidu_spider/spiders/__init__.py +7 -0
  89. examples/baidu_spider/spiders/bai_du.py +61 -0
  90. examples/baidu_spider/spiders/miit.py +159 -0
  91. examples/baidu_spider/spiders/sina.py +79 -0
  92. tests/__init__.py +7 -7
  93. tests/test_proxy_health_check.py +32 -32
  94. tests/test_proxy_middleware_integration.py +136 -136
  95. tests/test_proxy_providers.py +56 -56
  96. tests/test_proxy_stats.py +19 -19
  97. tests/test_proxy_strategies.py +59 -59
  98. crawlo/utils/concurrency_manager.py +0 -125
  99. crawlo/utils/project.py +0 -197
  100. crawlo-1.0.9.dist-info/METADATA +0 -49
  101. crawlo-1.0.9.dist-info/RECORD +0 -97
  102. examples/gxb/__init__.py +0 -0
  103. examples/gxb/items.py +0 -36
  104. examples/gxb/run.py +0 -16
  105. examples/gxb/settings.py +0 -72
  106. examples/gxb/spider/__init__.py +0 -0
  107. examples/gxb/spider/miit_spider.py +0 -180
  108. examples/gxb/spider/telecom_device.py +0 -129
  109. {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
  110. {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
  111. {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
@@ -1,150 +1,150 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import aioredis
4
- from typing import Optional
5
- from crawlo import Request
6
- from crawlo.filters import BaseFilter
7
- from crawlo.utils.log import get_logger
8
- from crawlo.utils.request import request_fingerprint
9
-
10
-
11
- class AioRedisFilter(BaseFilter):
12
- """基于Redis集合实现的异步请求去重过滤器(支持分布式爬虫),提供TTL和清理控制"""
13
-
14
- def __init__(
15
- self,
16
- redis_key: str,
17
- client: aioredis.Redis,
18
- stats: dict,
19
- debug: bool,
20
- log_level: str,
21
- cleanup_fp: bool = False,
22
- ttl: Optional[int] = None
23
- ):
24
- """初始化过滤器"""
25
- self.logger = get_logger(self.__class__.__name__, log_level)
26
- super().__init__(self.logger, stats, debug)
27
-
28
- self.redis_key = redis_key
29
- self.redis = client
30
- self.cleanup_fp = cleanup_fp
31
- self.ttl = ttl
32
-
33
- @classmethod
34
- def create_instance(cls, crawler) -> 'BaseFilter':
35
- """从爬虫配置创建过滤器实例"""
36
- redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
37
- decode_responses = crawler.settings.get_bool('DECODE_RESPONSES', False)
38
- ttl_setting = crawler.settings.get_int('REDIS_TTL')
39
-
40
- # 处理TTL设置
41
- ttl = None
42
- if ttl_setting is not None:
43
- ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
44
-
45
- try:
46
- redis_client = aioredis.from_url(
47
- redis_url,
48
- decode_responses=decode_responses,
49
- max_connections=20,
50
- encoding='utf-8'
51
- )
52
- except Exception as e:
53
- raise RuntimeError(f"Redis连接失败: {redis_url} - {str(e)}")
54
-
55
- return cls(
56
- redis_key=f"{crawler.settings.get('PROJECT_NAME', 'default')}:{crawler.settings.get('REDIS_KEY', 'request_fingerprints')}",
57
- client=redis_client,
58
- stats=crawler.stats,
59
- cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
60
- ttl=ttl,
61
- debug=crawler.settings.get_bool('FILTER_DEBUG', False),
62
- log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
63
- )
64
-
65
- async def requested(self, request: Request) -> bool:
66
- """检查请求是否已存在"""
67
- try:
68
- fp = str(request_fingerprint(request))
69
-
70
- # 1. 检查指纹是否存在
71
- pipe = self.redis.pipeline()
72
- pipe.sismember(self.redis_key, fp) # 不单独 await
73
- exists = (await pipe.execute())[0] # 执行并获取结果
74
-
75
- if exists: # 如果已存在,返回 True
76
- return True
77
-
78
- # 2. 如果不存在,添加指纹并设置 TTL
79
- pipe = self.redis.pipeline()
80
- pipe.sadd(self.redis_key, fp) # 不单独 await
81
- if self.ttl and self.ttl > 0:
82
- pipe.expire(self.redis_key, self.ttl) # 不单独 await
83
- await pipe.execute() # 一次性执行所有命令
84
-
85
- return False # 表示是新请求
86
-
87
- except Exception as e:
88
- self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')}")
89
- raise
90
-
91
- async def add_fingerprint(self, fp: str) -> bool:
92
- """添加新指纹到Redis集合"""
93
- try:
94
- fp = str(fp)
95
- added = await self.redis.sadd(self.redis_key, fp)
96
-
97
- if self.ttl and self.ttl > 0:
98
- await self.redis.expire(self.redis_key, self.ttl)
99
-
100
- return added == 1
101
- except Exception as e:
102
- self.logger.error("添加指纹失败")
103
- raise
104
-
105
- async def get_stats(self) -> dict:
106
- """获取过滤器统计信息"""
107
- try:
108
- count = await self.redis.scard(self.redis_key)
109
- stats = {
110
- '指纹总数': count,
111
- 'Redis键名': self.redis_key,
112
- 'TTL配置': f"{self.ttl}秒" if self.ttl else "持久化"
113
- }
114
- stats.update(self.stats)
115
- return stats
116
- except Exception as e:
117
- self.logger.error("获取统计信息失败")
118
- return self.stats
119
-
120
- async def clear_all(self) -> int:
121
- """清空所有指纹数据"""
122
- try:
123
- deleted = await self.redis.delete(self.redis_key)
124
- self.logger.info(f"已清除指纹数: {deleted}")
125
- return deleted
126
- except Exception as e:
127
- self.logger.error("清空指纹失败")
128
- raise
129
-
130
- async def closed(self, reason: Optional[str] = None) -> None:
131
- """爬虫关闭时的清理操作"""
132
- try:
133
- if self.cleanup_fp:
134
- deleted = await self.redis.delete(self.redis_key)
135
- self.logger.info(f"爬虫关闭清理: 已删除{deleted}个指纹")
136
- else:
137
- count = await self.redis.scard(self.redis_key)
138
- ttl_info = f"{self.ttl}秒" if self.ttl else "持久化"
139
- self.logger.info(f"保留指纹数: {count} (TTL: {ttl_info})")
140
- finally:
141
- await self._close_redis()
142
-
143
- async def _close_redis(self) -> None:
144
- """安全关闭Redis连接"""
145
- try:
146
- if hasattr(self.redis, 'close'):
147
- await self.redis.close()
148
- self.logger.debug("Redis连接已关闭")
149
- except Exception as e:
150
- self.logger.warning(f"Redis关闭时出错:{e}")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import aioredis
4
+ from typing import Optional
5
+ from crawlo import Request
6
+ from crawlo.filters import BaseFilter
7
+ from crawlo.utils.log import get_logger
8
+ from crawlo.utils.request import request_fingerprint
9
+
10
+
11
+ class AioRedisFilter(BaseFilter):
12
+ """基于Redis集合实现的异步请求去重过滤器(支持分布式爬虫),提供TTL和清理控制"""
13
+
14
+ def __init__(
15
+ self,
16
+ redis_key: str,
17
+ client: aioredis.Redis,
18
+ stats: dict,
19
+ debug: bool,
20
+ log_level: str,
21
+ cleanup_fp: bool = False,
22
+ ttl: Optional[int] = None
23
+ ):
24
+ """初始化过滤器"""
25
+ self.logger = get_logger(self.__class__.__name__, log_level)
26
+ super().__init__(self.logger, stats, debug)
27
+
28
+ self.redis_key = redis_key
29
+ self.redis = client
30
+ self.cleanup_fp = cleanup_fp
31
+ self.ttl = ttl
32
+
33
+ @classmethod
34
+ def create_instance(cls, crawler) -> 'BaseFilter':
35
+ """从爬虫配置创建过滤器实例"""
36
+ redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
37
+ decode_responses = crawler.settings.get_bool('DECODE_RESPONSES', False)
38
+ ttl_setting = crawler.settings.get_int('REDIS_TTL')
39
+
40
+ # 处理TTL设置
41
+ ttl = None
42
+ if ttl_setting is not None:
43
+ ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
44
+
45
+ try:
46
+ redis_client = aioredis.from_url(
47
+ redis_url,
48
+ decode_responses=decode_responses,
49
+ max_connections=20,
50
+ encoding='utf-8'
51
+ )
52
+ except Exception as e:
53
+ raise RuntimeError(f"Redis连接失败: {redis_url} - {str(e)}")
54
+
55
+ return cls(
56
+ redis_key=f"{crawler.settings.get('PROJECT_NAME', 'default')}:{crawler.settings.get('REDIS_KEY', 'request_fingerprints')}",
57
+ client=redis_client,
58
+ stats=crawler.stats,
59
+ cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
60
+ ttl=ttl,
61
+ debug=crawler.settings.get_bool('FILTER_DEBUG', False),
62
+ log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
63
+ )
64
+
65
+ async def requested(self, request: Request) -> bool:
66
+ """检查请求是否已存在"""
67
+ try:
68
+ fp = str(request_fingerprint(request))
69
+
70
+ # 1. 检查指纹是否存在
71
+ pipe = self.redis.pipeline()
72
+ pipe.sismember(self.redis_key, fp) # 不单独 await
73
+ exists = (await pipe.execute())[0] # 执行并获取结果
74
+
75
+ if exists: # 如果已存在,返回 True
76
+ return True
77
+
78
+ # 2. 如果不存在,添加指纹并设置 TTL
79
+ pipe = self.redis.pipeline()
80
+ pipe.sadd(self.redis_key, fp) # 不单独 await
81
+ if self.ttl and self.ttl > 0:
82
+ pipe.expire(self.redis_key, self.ttl) # 不单独 await
83
+ await pipe.execute() # 一次性执行所有命令
84
+
85
+ return False # 表示是新请求
86
+
87
+ except Exception as e:
88
+ self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')}")
89
+ raise
90
+
91
+ async def add_fingerprint(self, fp: str) -> bool:
92
+ """添加新指纹到Redis集合"""
93
+ try:
94
+ fp = str(fp)
95
+ added = await self.redis.sadd(self.redis_key, fp)
96
+
97
+ if self.ttl and self.ttl > 0:
98
+ await self.redis.expire(self.redis_key, self.ttl)
99
+
100
+ return added == 1
101
+ except Exception as e:
102
+ self.logger.error("添加指纹失败")
103
+ raise
104
+
105
+ async def get_stats(self) -> dict:
106
+ """获取过滤器统计信息"""
107
+ try:
108
+ count = await self.redis.scard(self.redis_key)
109
+ stats = {
110
+ '指纹总数': count,
111
+ 'Redis键名': self.redis_key,
112
+ 'TTL配置': f"{self.ttl}秒" if self.ttl else "持久化"
113
+ }
114
+ stats.update(self.stats)
115
+ return stats
116
+ except Exception as e:
117
+ self.logger.error("获取统计信息失败")
118
+ return self.stats
119
+
120
+ async def clear_all(self) -> int:
121
+ """清空所有指纹数据"""
122
+ try:
123
+ deleted = await self.redis.delete(self.redis_key)
124
+ self.logger.info(f"已清除指纹数: {deleted}")
125
+ return deleted
126
+ except Exception as e:
127
+ self.logger.error("清空指纹失败")
128
+ raise
129
+
130
+ async def closed(self, reason: Optional[str] = None) -> None:
131
+ """爬虫关闭时的清理操作"""
132
+ try:
133
+ if self.cleanup_fp:
134
+ deleted = await self.redis.delete(self.redis_key)
135
+ self.logger.info(f"爬虫关闭清理: 已删除{deleted}个指纹")
136
+ else:
137
+ count = await self.redis.scard(self.redis_key)
138
+ ttl_info = f"{self.ttl}秒" if self.ttl else "持久化"
139
+ self.logger.info(f"保留指纹数: {count} (TTL: {ttl_info})")
140
+ finally:
141
+ await self._close_redis()
142
+
143
+ async def _close_redis(self) -> None:
144
+ """安全关闭Redis连接"""
145
+ try:
146
+ if hasattr(self.redis, 'close'):
147
+ await self.redis.close()
148
+ self.logger.debug("Redis连接已关闭")
149
+ except Exception as e:
150
+ self.logger.warning(f"Redis关闭时出错:{e}")