crawlo 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (96) hide show
  1. crawlo/__init__.py +25 -9
  2. crawlo/__version__.py +1 -1
  3. crawlo/core/__init__.py +2 -2
  4. crawlo/core/engine.py +158 -158
  5. crawlo/core/processor.py +40 -40
  6. crawlo/core/scheduler.py +57 -57
  7. crawlo/crawler.py +424 -242
  8. crawlo/downloader/__init__.py +78 -78
  9. crawlo/downloader/aiohttp_downloader.py +200 -259
  10. crawlo/downloader/cffi_downloader.py +277 -0
  11. crawlo/downloader/httpx_downloader.py +246 -187
  12. crawlo/event.py +11 -11
  13. crawlo/exceptions.py +73 -64
  14. crawlo/extension/__init__.py +31 -31
  15. crawlo/extension/log_interval.py +49 -49
  16. crawlo/extension/log_stats.py +44 -44
  17. crawlo/extension/logging_extension.py +35 -0
  18. crawlo/filters/__init__.py +37 -37
  19. crawlo/filters/aioredis_filter.py +150 -158
  20. crawlo/filters/memory_filter.py +202 -202
  21. crawlo/items/__init__.py +62 -62
  22. crawlo/items/items.py +115 -119
  23. crawlo/middleware/__init__.py +21 -21
  24. crawlo/middleware/default_header.py +32 -32
  25. crawlo/middleware/download_delay.py +28 -28
  26. crawlo/middleware/middleware_manager.py +135 -140
  27. crawlo/middleware/proxy.py +246 -0
  28. crawlo/middleware/request_ignore.py +30 -30
  29. crawlo/middleware/response_code.py +18 -18
  30. crawlo/middleware/response_filter.py +26 -26
  31. crawlo/middleware/retry.py +90 -90
  32. crawlo/network/__init__.py +7 -7
  33. crawlo/network/request.py +203 -204
  34. crawlo/network/response.py +166 -166
  35. crawlo/pipelines/__init__.py +13 -13
  36. crawlo/pipelines/console_pipeline.py +39 -39
  37. crawlo/pipelines/mongo_pipeline.py +116 -116
  38. crawlo/pipelines/mysql_batch_pipline.py +273 -134
  39. crawlo/pipelines/mysql_pipeline.py +195 -195
  40. crawlo/pipelines/pipeline_manager.py +56 -56
  41. crawlo/settings/__init__.py +7 -7
  42. crawlo/settings/default_settings.py +169 -93
  43. crawlo/settings/setting_manager.py +99 -99
  44. crawlo/spider/__init__.py +41 -36
  45. crawlo/stats_collector.py +59 -59
  46. crawlo/subscriber.py +106 -106
  47. crawlo/task_manager.py +27 -27
  48. crawlo/templates/item_template.tmpl +21 -21
  49. crawlo/templates/project_template/main.py +32 -32
  50. crawlo/templates/project_template/setting.py +189 -189
  51. crawlo/templates/spider_template.tmpl +30 -30
  52. crawlo/utils/__init__.py +7 -7
  53. crawlo/utils/concurrency_manager.py +124 -124
  54. crawlo/utils/date_tools.py +233 -177
  55. crawlo/utils/db_helper.py +344 -0
  56. crawlo/utils/func_tools.py +82 -82
  57. crawlo/utils/log.py +129 -39
  58. crawlo/utils/pqueue.py +173 -173
  59. crawlo/utils/project.py +59 -59
  60. crawlo/utils/request.py +267 -122
  61. crawlo/utils/system.py +11 -11
  62. crawlo/utils/tools.py +5 -303
  63. crawlo/utils/url.py +39 -39
  64. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/METADATA +49 -48
  65. crawlo-1.0.5.dist-info/RECORD +84 -0
  66. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/top_level.txt +1 -0
  67. examples/__init__.py +0 -0
  68. examples/gxb/__init__.py +0 -0
  69. examples/gxb/items.py +36 -0
  70. examples/gxb/run.py +15 -0
  71. examples/gxb/settings.py +71 -0
  72. examples/gxb/spider/__init__.py +0 -0
  73. examples/gxb/spider/miit_spider.py +180 -0
  74. examples/gxb/spider/telecom_device_licenses.py +129 -0
  75. tests/__init__.py +7 -7
  76. tests/test_proxy_health_check.py +33 -0
  77. tests/test_proxy_middleware_integration.py +137 -0
  78. tests/test_proxy_providers.py +57 -0
  79. tests/test_proxy_stats.py +20 -0
  80. tests/test_proxy_strategies.py +60 -0
  81. crawlo/downloader/playwright_downloader.py +0 -161
  82. crawlo/filters/redis_filter.py +0 -120
  83. crawlo-1.0.3.dist-info/RECORD +0 -80
  84. tests/baidu_spider/__init__.py +0 -7
  85. tests/baidu_spider/demo.py +0 -94
  86. tests/baidu_spider/items.py +0 -25
  87. tests/baidu_spider/middleware.py +0 -49
  88. tests/baidu_spider/pipeline.py +0 -55
  89. tests/baidu_spider/request_fingerprints.txt +0 -9
  90. tests/baidu_spider/run.py +0 -27
  91. tests/baidu_spider/settings.py +0 -78
  92. tests/baidu_spider/spiders/__init__.py +0 -7
  93. tests/baidu_spider/spiders/bai_du.py +0 -61
  94. tests/baidu_spider/spiders/sina.py +0 -79
  95. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/WHEEL +0 -0
  96. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/entry_points.txt +0 -0
@@ -1,158 +1,150 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from typing import Optional
4
- import aioredis
5
- from crawlo import Request
6
- from crawlo.filters import BaseFilter
7
- from crawlo.utils.log import get_logger
8
- from crawlo.utils.request import request_fingerprint
9
-
10
-
11
- class AioRedisFilter(BaseFilter):
12
- """使用Redis集合实现的异步请求去重过滤器(适用于分布式爬虫)"""
13
-
14
- def __init__(
15
- self,
16
- redis_key: str,
17
- client: aioredis.Redis,
18
- stats: dict,
19
- debug: bool,
20
- log_level: str,
21
- cleanup_fp: bool = False
22
- ):
23
- """初始化过滤器"""
24
- self.logger = get_logger(self.__class__.__name__, log_level)
25
- super().__init__(self.logger, stats, debug)
26
-
27
- self.redis_key = redis_key
28
- self.redis = client
29
- self.cleanup_fp = cleanup_fp
30
-
31
- @classmethod
32
- def create_instance(cls, crawler) -> 'BaseFilter':
33
- """从爬虫配置创建过滤器实例"""
34
- redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
35
- decode_responses = crawler.settings.get_bool('DECODE_RESPONSES', False) # 关键:改为False
36
-
37
- try:
38
- redis_client = aioredis.from_url(
39
- redis_url,
40
- decode_responses=decode_responses,
41
- max_connections=20,
42
- encoding='utf-8'
43
- )
44
- except Exception as e:
45
- raise RuntimeError(f"Redis连接失败 {redis_url}: {str(e)}")
46
-
47
- return cls(
48
- redis_key=f"{crawler.settings.get('PROJECT_NAME', 'default')}:{crawler.settings.get('REDIS_KEY', 'request_fingerprints')}",
49
- client=redis_client,
50
- stats=crawler.stats,
51
- cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
52
- debug=crawler.settings.get_bool('FILTER_DEBUG', False),
53
- log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
54
- )
55
-
56
- async def requested(self, request: Request) -> bool:
57
- """
58
- 检查请求是否重复
59
- """
60
- try:
61
- fp = request_fingerprint(request)
62
- self.logger.debug(f"Checking fingerprint: {fp}")
63
-
64
- # 确保fp是字符串类型
65
- if not isinstance(fp, str):
66
- fp = str(fp)
67
-
68
- # 检查Redis连接状态
69
- if not self.redis:
70
- raise RuntimeError("Redis client is not initialized")
71
-
72
- # 检查指纹是否已存在
73
- is_member = await self.redis.sismember(self.redis_key, fp)
74
- self.logger.debug(f"Fingerprint {fp} exists: {is_member}")
75
-
76
- if is_member:
77
- if self.debug:
78
- self.logger.debug(f"Filtered duplicate request: {fp}")
79
- return True
80
-
81
- # 添加新指纹
82
- result = await self.redis.sadd(self.redis_key, fp)
83
-
84
- if self.debug:
85
- if result == 1:
86
- self.logger.debug(f"Added new fingerprint: {fp}")
87
- else:
88
- self.logger.warning(f"Failed to add fingerprint: {fp}")
89
-
90
- return False
91
-
92
- except Exception as e:
93
- self.logger.error(f"Filter check failed for {getattr(request, 'url', 'unknown')}: {str(e)}")
94
- # 可以选择抛出异常或返回False(不过滤)
95
- raise
96
-
97
- async def add_fingerprint(self, fp: str) -> bool:
98
- """向Redis集合添加新指纹"""
99
- try:
100
- if not isinstance(fp, str):
101
- fp = str(fp)
102
-
103
- result = await self.redis.sadd(self.redis_key, fp)
104
- if self.debug:
105
- self.logger.debug(f"Added fingerprint {fp}, result: {result}")
106
- return result == 1
107
- except Exception as e:
108
- self.logger.error(f"Failed to add fingerprint {fp}: {str(e)}")
109
- raise
110
-
111
- async def get_stats(self) -> dict:
112
- """获取当前过滤器统计信息"""
113
- try:
114
- count = await self.redis.scard(self.redis_key)
115
- return {
116
- 'total_fingerprints': count,
117
- 'redis_key': self.redis_key,
118
- **self.stats
119
- }
120
- except Exception as e:
121
- self.logger.error(f"Failed to get stats: {str(e)}")
122
- return self.stats
123
-
124
- async def clear_all(self) -> int:
125
- """清空所有指纹数据"""
126
- try:
127
- deleted = await self.redis.delete(self.redis_key)
128
- self.logger.info(f"Cleared {deleted} keys")
129
- return deleted
130
- except Exception as e:
131
- self.logger.error(f"Failed to clear fingerprints: {str(e)}")
132
- raise
133
-
134
- async def closed(self, reason: Optional[str] = None) -> None:
135
- """爬虫关闭时的处理"""
136
- try:
137
- if self.cleanup_fp:
138
- deleted = await self.redis.delete(self.redis_key)
139
- self.logger.info(
140
- f"Cleaned {deleted} fingerprints from {self.redis_key} "
141
- f"(reason: {reason or 'manual'})"
142
- )
143
- else:
144
- # 显示统计信息
145
- count = await self.redis.scard(self.redis_key)
146
- self.logger.info(f"Total fingerprints preserved: {count}")
147
- except Exception as e:
148
- self.logger.warning(f"Close operation failed: {e}")
149
- finally:
150
- await self._close_redis()
151
-
152
- async def _close_redis(self) -> None:
153
- """安全关闭Redis连接"""
154
- try:
155
- if hasattr(self.redis, 'close'):
156
- await self.redis.close()
157
- except Exception as e:
158
- self.logger.warning(f"Redis close error: {e}")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import aioredis
4
+ from typing import Optional
5
+ from crawlo import Request
6
+ from crawlo.filters import BaseFilter
7
+ from crawlo.utils.log import get_logger
8
+ from crawlo.utils.request import request_fingerprint
9
+
10
+
11
+ class AioRedisFilter(BaseFilter):
12
+ """基于Redis集合实现的异步请求去重过滤器(支持分布式爬虫),提供TTL和清理控制"""
13
+
14
+ def __init__(
15
+ self,
16
+ redis_key: str,
17
+ client: aioredis.Redis,
18
+ stats: dict,
19
+ debug: bool,
20
+ log_level: str,
21
+ cleanup_fp: bool = False,
22
+ ttl: Optional[int] = None
23
+ ):
24
+ """初始化过滤器"""
25
+ self.logger = get_logger(self.__class__.__name__, log_level)
26
+ super().__init__(self.logger, stats, debug)
27
+
28
+ self.redis_key = redis_key
29
+ self.redis = client
30
+ self.cleanup_fp = cleanup_fp
31
+ self.ttl = ttl
32
+
33
+ @classmethod
34
+ def create_instance(cls, crawler) -> 'BaseFilter':
35
+ """从爬虫配置创建过滤器实例"""
36
+ redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
37
+ decode_responses = crawler.settings.get_bool('DECODE_RESPONSES', False)
38
+ ttl_setting = crawler.settings.get_int('REDIS_TTL')
39
+
40
+ # 处理TTL设置
41
+ ttl = None
42
+ if ttl_setting is not None:
43
+ ttl = max(0, int(ttl_setting)) if ttl_setting > 0 else None
44
+
45
+ try:
46
+ redis_client = aioredis.from_url(
47
+ redis_url,
48
+ decode_responses=decode_responses,
49
+ max_connections=20,
50
+ encoding='utf-8'
51
+ )
52
+ except Exception as e:
53
+ raise RuntimeError(f"Redis连接失败: {redis_url} - {str(e)}")
54
+
55
+ return cls(
56
+ redis_key=f"{crawler.settings.get('PROJECT_NAME', 'default')}:{crawler.settings.get('REDIS_KEY', 'request_fingerprints')}",
57
+ client=redis_client,
58
+ stats=crawler.stats,
59
+ cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
60
+ ttl=ttl,
61
+ debug=crawler.settings.get_bool('FILTER_DEBUG', False),
62
+ log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
63
+ )
64
+
65
+ async def requested(self, request: Request) -> bool:
66
+ """检查请求是否已存在"""
67
+ try:
68
+ fp = str(request_fingerprint(request))
69
+
70
+ # 1. 检查指纹是否存在
71
+ pipe = self.redis.pipeline()
72
+ pipe.sismember(self.redis_key, fp) # 不单独 await
73
+ exists = (await pipe.execute())[0] # 执行并获取结果
74
+
75
+ if exists: # 如果已存在,返回 True
76
+ return True
77
+
78
+ # 2. 如果不存在,添加指纹并设置 TTL
79
+ pipe = self.redis.pipeline()
80
+ pipe.sadd(self.redis_key, fp) # 不单独 await
81
+ if self.ttl and self.ttl > 0:
82
+ pipe.expire(self.redis_key, self.ttl) # 不单独 await
83
+ await pipe.execute() # 一次性执行所有命令
84
+
85
+ return False # 表示是新请求
86
+
87
+ except Exception as e:
88
+ self.logger.error(f"请求检查失败: {getattr(request, 'url', '未知URL')}")
89
+ raise
90
+
91
+ async def add_fingerprint(self, fp: str) -> bool:
92
+ """添加新指纹到Redis集合"""
93
+ try:
94
+ fp = str(fp)
95
+ added = await self.redis.sadd(self.redis_key, fp)
96
+
97
+ if self.ttl and self.ttl > 0:
98
+ await self.redis.expire(self.redis_key, self.ttl)
99
+
100
+ return added == 1
101
+ except Exception as e:
102
+ self.logger.error("添加指纹失败")
103
+ raise
104
+
105
+ async def get_stats(self) -> dict:
106
+ """获取过滤器统计信息"""
107
+ try:
108
+ count = await self.redis.scard(self.redis_key)
109
+ stats = {
110
+ '指纹总数': count,
111
+ 'Redis键名': self.redis_key,
112
+ 'TTL配置': f"{self.ttl}秒" if self.ttl else "持久化"
113
+ }
114
+ stats.update(self.stats)
115
+ return stats
116
+ except Exception as e:
117
+ self.logger.error("获取统计信息失败")
118
+ return self.stats
119
+
120
+ async def clear_all(self) -> int:
121
+ """清空所有指纹数据"""
122
+ try:
123
+ deleted = await self.redis.delete(self.redis_key)
124
+ self.logger.info(f"已清除指纹数: {deleted}")
125
+ return deleted
126
+ except Exception as e:
127
+ self.logger.error("清空指纹失败")
128
+ raise
129
+
130
+ async def closed(self, reason: Optional[str] = None) -> None:
131
+ """爬虫关闭时的清理操作"""
132
+ try:
133
+ if self.cleanup_fp:
134
+ deleted = await self.redis.delete(self.redis_key)
135
+ self.logger.info(f"爬虫关闭清理: 已删除{deleted}个指纹")
136
+ else:
137
+ count = await self.redis.scard(self.redis_key)
138
+ ttl_info = f"{self.ttl}秒" if self.ttl else "持久化"
139
+ self.logger.info(f"保留指纹数: {count} (TTL: {ttl_info})")
140
+ finally:
141
+ await self._close_redis()
142
+
143
+ async def _close_redis(self) -> None:
144
+ """安全关闭Redis连接"""
145
+ try:
146
+ if hasattr(self.redis, 'close'):
147
+ await self.redis.close()
148
+ self.logger.debug("Redis连接已关闭")
149
+ except Exception as e:
150
+ self.logger.warning(f"Redis关闭时出错:{e}")