crawlo 1.1.3__py3-none-any.whl → 1.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (118) hide show
  1. crawlo/__init__.py +34 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/list.py +155 -155
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -196
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +186 -186
  12. crawlo/config.py +279 -279
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -171
  15. crawlo/core/enhanced_engine.py +189 -189
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +165 -165
  18. crawlo/crawler.py +1027 -1027
  19. crawlo/downloader/__init__.py +242 -242
  20. crawlo/downloader/aiohttp_downloader.py +212 -212
  21. crawlo/downloader/cffi_downloader.py +251 -251
  22. crawlo/downloader/httpx_downloader.py +259 -259
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +81 -81
  25. crawlo/extension/__init__.py +38 -31
  26. crawlo/extension/health_check.py +142 -0
  27. crawlo/extension/log_interval.py +58 -49
  28. crawlo/extension/log_stats.py +82 -44
  29. crawlo/extension/logging_extension.py +44 -35
  30. crawlo/extension/memory_monitor.py +89 -0
  31. crawlo/extension/performance_profiler.py +118 -0
  32. crawlo/extension/request_recorder.py +108 -0
  33. crawlo/filters/__init__.py +154 -154
  34. crawlo/filters/aioredis_filter.py +241 -241
  35. crawlo/filters/memory_filter.py +269 -269
  36. crawlo/items/__init__.py +23 -23
  37. crawlo/items/base.py +21 -21
  38. crawlo/items/fields.py +53 -53
  39. crawlo/items/items.py +104 -104
  40. crawlo/middleware/__init__.py +21 -21
  41. crawlo/middleware/default_header.py +32 -32
  42. crawlo/middleware/download_delay.py +28 -28
  43. crawlo/middleware/middleware_manager.py +135 -135
  44. crawlo/middleware/proxy.py +248 -248
  45. crawlo/middleware/request_ignore.py +30 -30
  46. crawlo/middleware/response_code.py +18 -18
  47. crawlo/middleware/response_filter.py +26 -26
  48. crawlo/middleware/retry.py +124 -124
  49. crawlo/mode_manager.py +200 -200
  50. crawlo/network/__init__.py +21 -21
  51. crawlo/network/request.py +311 -311
  52. crawlo/network/response.py +271 -271
  53. crawlo/pipelines/__init__.py +21 -21
  54. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  55. crawlo/pipelines/console_pipeline.py +39 -39
  56. crawlo/pipelines/csv_pipeline.py +316 -316
  57. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  58. crawlo/pipelines/json_pipeline.py +218 -218
  59. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  60. crawlo/pipelines/mongo_pipeline.py +132 -117
  61. crawlo/pipelines/mysql_pipeline.py +317 -195
  62. crawlo/pipelines/pipeline_manager.py +56 -56
  63. crawlo/pipelines/redis_dedup_pipeline.py +162 -162
  64. crawlo/project.py +153 -153
  65. crawlo/queue/pqueue.py +37 -37
  66. crawlo/queue/queue_manager.py +307 -307
  67. crawlo/queue/redis_priority_queue.py +208 -208
  68. crawlo/settings/__init__.py +7 -7
  69. crawlo/settings/default_settings.py +278 -244
  70. crawlo/settings/setting_manager.py +99 -99
  71. crawlo/spider/__init__.py +639 -639
  72. crawlo/stats_collector.py +59 -59
  73. crawlo/subscriber.py +131 -106
  74. crawlo/task_manager.py +30 -30
  75. crawlo/templates/crawlo.cfg.tmpl +10 -10
  76. crawlo/templates/project/__init__.py.tmpl +3 -3
  77. crawlo/templates/project/items.py.tmpl +17 -17
  78. crawlo/templates/project/middlewares.py.tmpl +111 -87
  79. crawlo/templates/project/pipelines.py.tmpl +97 -341
  80. crawlo/templates/project/run.py.tmpl +251 -251
  81. crawlo/templates/project/settings.py.tmpl +279 -250
  82. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  83. crawlo/templates/spider/spider.py.tmpl +142 -178
  84. crawlo/utils/__init__.py +7 -7
  85. crawlo/utils/controlled_spider_mixin.py +439 -439
  86. crawlo/utils/date_tools.py +233 -233
  87. crawlo/utils/db_helper.py +343 -343
  88. crawlo/utils/func_tools.py +82 -82
  89. crawlo/utils/large_scale_config.py +286 -286
  90. crawlo/utils/large_scale_helper.py +343 -343
  91. crawlo/utils/log.py +128 -128
  92. crawlo/utils/queue_helper.py +175 -175
  93. crawlo/utils/request.py +267 -267
  94. crawlo/utils/request_serializer.py +219 -219
  95. crawlo/utils/spider_loader.py +62 -62
  96. crawlo/utils/system.py +11 -11
  97. crawlo/utils/tools.py +4 -4
  98. crawlo/utils/url.py +39 -39
  99. crawlo-1.1.4.dist-info/METADATA +403 -0
  100. crawlo-1.1.4.dist-info/RECORD +117 -0
  101. examples/__init__.py +7 -7
  102. examples/controlled_spider_example.py +205 -205
  103. tests/__init__.py +7 -7
  104. tests/test_final_validation.py +153 -153
  105. tests/test_proxy_health_check.py +32 -32
  106. tests/test_proxy_middleware_integration.py +136 -136
  107. tests/test_proxy_providers.py +56 -56
  108. tests/test_proxy_stats.py +19 -19
  109. tests/test_proxy_strategies.py +59 -59
  110. tests/test_redis_config.py +28 -28
  111. tests/test_redis_queue.py +224 -224
  112. tests/test_request_serialization.py +70 -70
  113. tests/test_scheduler.py +241 -241
  114. crawlo-1.1.3.dist-info/METADATA +0 -635
  115. crawlo-1.1.3.dist-info/RECORD +0 -113
  116. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/WHEEL +0 -0
  117. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/entry_points.txt +0 -0
  118. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/top_level.txt +0 -0
@@ -1,35 +1,44 @@
1
- from crawlo.exceptions import NotConfigured
2
- from crawlo.utils.log import get_logger
3
- from crawlo.utils.log import LoggerManager
4
-
5
-
6
- class CustomLoggerExtension:
7
- """
8
- 日志系统初始化扩展
9
- 遵循与 ExtensionManager 一致的接口规范:使用 create_instance
10
- """
11
-
12
- def __init__(self, settings):
13
- self.settings = settings
14
- # 初始化全局日志配置
15
- LoggerManager.configure(settings)
16
-
17
- @classmethod
18
- def create_instance(cls, crawler, *args, **kwargs):
19
- """
20
- 工厂方法:兼容 ExtensionManager 的创建方式
21
- ExtensionManager 调用
22
- """
23
- # 可以通过 settings 控制是否启用
24
- if not crawler.settings.get('LOG_FILE') and not crawler.settings.get('LOG_ENABLE_CUSTOM'):
25
- raise NotConfigured("CustomLoggerExtension: LOG_FILE not set and LOG_ENABLE_CUSTOM=False")
26
-
27
- return cls(crawler.settings)
28
-
29
- def spider_opened(self, spider):
30
- logger = get_logger(__name__)
31
- logger.info(
32
- f"CustomLoggerExtension: Logging initialized. "
33
- f"LOG_FILE={self.settings.get('LOG_FILE')}, "
34
- f"LOG_LEVEL={self.settings.get('LOG_LEVEL')}"
35
- )
1
+ from typing import Any
2
+ from crawlo.exceptions import NotConfigured
3
+ from crawlo.utils.log import get_logger
4
+ from crawlo.utils.log import LoggerManager
5
+
6
+
7
+ class CustomLoggerExtension:
8
+ """
9
+ 日志系统初始化扩展
10
+ 遵循与 ExtensionManager 一致的接口规范:使用 create_instance
11
+ """
12
+
13
+ def __init__(self, settings: Any):
14
+ self.settings = settings
15
+ # 初始化全局日志配置
16
+ LoggerManager.configure(settings)
17
+
18
+ @classmethod
19
+ def create_instance(cls, crawler: Any, *args: Any, **kwargs: Any) -> 'CustomLoggerExtension':
20
+ """
21
+ 工厂方法:兼容 ExtensionManager 的创建方式
22
+ 被 ExtensionManager 调用
23
+ """
24
+ # 可以通过 settings 控制是否启用
25
+ log_file = crawler.settings.get('LOG_FILE')
26
+ log_enable_custom = crawler.settings.get('LOG_ENABLE_CUSTOM', False)
27
+
28
+ # 只有当没有配置日志文件且未启用自定义日志时才禁用
29
+ if not log_file and not log_enable_custom:
30
+ raise NotConfigured("CustomLoggerExtension: LOG_FILE not set and LOG_ENABLE_CUSTOM=False")
31
+
32
+ return cls(crawler.settings)
33
+
34
+ def spider_opened(self, spider: Any) -> None:
35
+ logger = get_logger(__name__)
36
+ try:
37
+ logger.info(
38
+ f"CustomLoggerExtension: Logging initialized. "
39
+ f"LOG_FILE={self.settings.get('LOG_FILE')}, "
40
+ f"LOG_LEVEL={self.settings.get('LOG_LEVEL')}"
41
+ )
42
+ except Exception as e:
43
+ # 即使日志初始化信息无法打印,也不应该影响程序运行
44
+ pass
@@ -0,0 +1,89 @@
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import asyncio
4
+ import psutil
5
+ from typing import Any, Optional
6
+
7
+ from crawlo.utils.log import get_logger
8
+ from crawlo.event import spider_opened, spider_closed
9
+
10
+
11
+ class MemoryMonitorExtension:
12
+ """
13
+ 内存监控扩展
14
+ 定期监控爬虫进程的内存使用情况,并在超出阈值时发出警告
15
+ """
16
+
17
+ def __init__(self, crawler: Any):
18
+ self.task: Optional[asyncio.Task] = None
19
+ self.process = psutil.Process()
20
+ self.settings = crawler.settings
21
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
22
+
23
+ # 获取配置参数
24
+ self.interval = self.settings.get_int('MEMORY_MONITOR_INTERVAL', 60) # 默认60秒检查一次
25
+ self.warning_threshold = self.settings.get_float('MEMORY_WARNING_THRESHOLD', 80.0) # 默认80%警告阈值
26
+ self.critical_threshold = self.settings.get_float('MEMORY_CRITICAL_THRESHOLD', 90.0) # 默认90%严重阈值
27
+
28
+ @classmethod
29
+ def create_instance(cls, crawler: Any) -> 'MemoryMonitorExtension':
30
+ # 只有当配置启用时才创建实例
31
+ if not crawler.settings.get_bool('MEMORY_MONITOR_ENABLED', False):
32
+ from crawlo.exceptions import NotConfigured
33
+ raise NotConfigured("MemoryMonitorExtension: MEMORY_MONITOR_ENABLED is False")
34
+
35
+ o = cls(crawler)
36
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
37
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
38
+ return o
39
+
40
+ async def spider_opened(self) -> None:
41
+ """爬虫启动时开始监控"""
42
+ self.task = asyncio.create_task(self._monitor_loop())
43
+ self.logger.info(
44
+ f"Memory monitor started. Interval: {self.interval}s, "
45
+ f"Warning threshold: {self.warning_threshold}%, Critical threshold: {self.critical_threshold}%"
46
+ )
47
+
48
+ async def spider_closed(self) -> None:
49
+ """爬虫关闭时停止监控"""
50
+ if self.task:
51
+ self.task.cancel()
52
+ try:
53
+ await self.task
54
+ except asyncio.CancelledError:
55
+ pass
56
+ self.task = None
57
+ self.logger.info("Memory monitor stopped.")
58
+
59
+ async def _monitor_loop(self) -> None:
60
+ """内存监控循环"""
61
+ while True:
62
+ try:
63
+ # 获取内存使用信息
64
+ memory_info = self.process.memory_info()
65
+ memory_percent = self.process.memory_percent()
66
+
67
+ # 记录内存使用情况
68
+ self.logger.debug(
69
+ f"Memory usage: {memory_percent:.2f}% "
70
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB, "
71
+ f"VMS: {memory_info.vms / 1024 / 1024:.2f} MB)"
72
+ )
73
+
74
+ # 检查是否超过阈值
75
+ if memory_percent >= self.critical_threshold:
76
+ self.logger.critical(
77
+ f"Memory usage critical: {memory_percent:.2f}% "
78
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
79
+ )
80
+ elif memory_percent >= self.warning_threshold:
81
+ self.logger.warning(
82
+ f"Memory usage high: {memory_percent:.2f}% "
83
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
84
+ )
85
+
86
+ await asyncio.sleep(self.interval)
87
+ except Exception as e:
88
+ self.logger.error(f"Error in memory monitoring: {e}")
89
+ await asyncio.sleep(self.interval)
@@ -0,0 +1,118 @@
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import io
4
+ import os
5
+ import pstats
6
+ import asyncio
7
+ import cProfile
8
+ from typing import Any, Optional
9
+
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.event import spider_opened, spider_closed
12
+
13
+
14
+ class PerformanceProfilerExtension:
15
+ """
16
+ 性能分析扩展
17
+ 在爬虫运行期间进行性能分析,帮助优化爬虫性能
18
+ """
19
+
20
+ def __init__(self, crawler: Any):
21
+ self.settings = crawler.settings
22
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
+
24
+ # 获取配置参数
25
+ self.enabled = self.settings.get_bool('PERFORMANCE_PROFILER_ENABLED', False)
26
+ self.output_dir = self.settings.get('PERFORMANCE_PROFILER_OUTPUT_DIR', 'profiling')
27
+ self.interval = self.settings.get_int('PERFORMANCE_PROFILER_INTERVAL', 300) # 默认5分钟
28
+
29
+ self.profiler: Optional[cProfile.Profile] = None
30
+ self.task: Optional[asyncio.Task] = None
31
+
32
+ # 创建输出目录
33
+ if self.enabled:
34
+ os.makedirs(self.output_dir, exist_ok=True)
35
+
36
+ @classmethod
37
+ def create_instance(cls, crawler: Any) -> 'PerformanceProfilerExtension':
38
+ # 只有当配置启用时才创建实例
39
+ if not crawler.settings.get_bool('PERFORMANCE_PROFILER_ENABLED', False):
40
+ from crawlo.exceptions import NotConfigured
41
+ raise NotConfigured("PerformanceProfilerExtension: PERFORMANCE_PROFILER_ENABLED is False")
42
+
43
+ o = cls(crawler)
44
+ if o.enabled:
45
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
46
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
47
+ return o
48
+
49
+ async def spider_opened(self) -> None:
50
+ """爬虫启动时开始性能分析"""
51
+ if not self.enabled:
52
+ return
53
+
54
+ self.profiler = cProfile.Profile()
55
+ self.profiler.enable()
56
+
57
+ # 启动定期保存分析结果的任务
58
+ self.task = asyncio.create_task(self._periodic_save())
59
+
60
+ self.logger.info("Performance profiler started.")
61
+
62
+ async def spider_closed(self) -> None:
63
+ """爬虫关闭时停止性能分析并保存结果"""
64
+ if not self.enabled or not self.profiler:
65
+ return
66
+
67
+ # 停止定期保存任务
68
+ if self.task:
69
+ self.task.cancel()
70
+ try:
71
+ await self.task
72
+ except asyncio.CancelledError:
73
+ pass
74
+
75
+ # 停止分析器并保存最终结果
76
+ self.profiler.disable()
77
+
78
+ # 保存分析结果
79
+ await self._save_profile("final")
80
+ self.logger.info("Performance profiler stopped and results saved.")
81
+
82
+ async def _periodic_save(self) -> None:
83
+ """定期保存分析结果"""
84
+ counter = 1
85
+ while True:
86
+ try:
87
+ await asyncio.sleep(self.interval)
88
+ if self.profiler:
89
+ # 临时禁用分析器以保存结果
90
+ self.profiler.disable()
91
+ await self._save_profile(f"periodic_{counter}")
92
+ counter += 1
93
+ # 重新启用分析器
94
+ self.profiler.enable()
95
+ except asyncio.CancelledError:
96
+ break
97
+ except Exception as e:
98
+ self.logger.error(f"Error in periodic profiling save: {e}")
99
+
100
+ async def _save_profile(self, name: str) -> None:
101
+ """保存分析结果到文件"""
102
+ try:
103
+ # 创建内存中的字符串流
104
+ s = io.StringIO()
105
+ ps = pstats.Stats(self.profiler, stream=s)
106
+
107
+ # 排序并打印统计信息
108
+ ps.sort_stats('cumulative')
109
+ ps.print_stats()
110
+
111
+ # 保存到文件
112
+ filename = os.path.join(self.output_dir, f'profile_{name}.txt')
113
+ with open(filename, 'w', encoding='utf-8') as f:
114
+ f.write(s.getvalue())
115
+
116
+ self.logger.info(f"Performance profile saved to {filename}")
117
+ except Exception as e:
118
+ self.logger.error(f"Error saving performance profile: {e}")
@@ -0,0 +1,108 @@
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import os
4
+ import json
5
+ from typing import Any
6
+ from datetime import datetime
7
+
8
+ from crawlo import event
9
+ from crawlo.utils.log import get_logger
10
+
11
+
12
+ class RequestRecorderExtension:
13
+ """
14
+ 请求记录扩展
15
+ 记录所有发送的请求信息到文件,便于调试和分析
16
+ """
17
+
18
+ def __init__(self, crawler: Any):
19
+ self.settings = crawler.settings
20
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
21
+
22
+ # 获取配置参数
23
+ self.enabled = self.settings.get_bool('REQUEST_RECORDER_ENABLED', False)
24
+ self.output_dir = self.settings.get('REQUEST_RECORDER_OUTPUT_DIR', 'requests_log')
25
+ self.max_file_size = self.settings.get_int('REQUEST_RECORDER_MAX_FILE_SIZE', 10 * 1024 * 1024) # 默认10MB
26
+
27
+ # 创建输出目录
28
+ if self.enabled:
29
+ os.makedirs(self.output_dir, exist_ok=True)
30
+
31
+ self.current_file = None
32
+ self.current_file_size = 0
33
+
34
+ @classmethod
35
+ def create_instance(cls, crawler: Any) -> 'RequestRecorderExtension':
36
+ # 只有当配置启用时才创建实例
37
+ if not crawler.settings.get_bool('REQUEST_RECORDER_ENABLED', False):
38
+ from crawlo.exceptions import NotConfigured
39
+ raise NotConfigured("RequestRecorderExtension: REQUEST_RECORDER_ENABLED is False")
40
+
41
+ o = cls(crawler)
42
+ if o.enabled:
43
+ crawler.subscriber.subscribe(o.request_scheduled, event=event.request_scheduled)
44
+ crawler.subscriber.subscribe(o.response_received, event=event.response_received)
45
+ crawler.subscriber.subscribe(o.spider_closed, event=event.spider_closed)
46
+ return o
47
+
48
+ async def request_scheduled(self, request: Any, spider: Any) -> None:
49
+ """记录调度的请求"""
50
+ if not self.enabled:
51
+ return
52
+
53
+ try:
54
+ request_info = {
55
+ 'timestamp': datetime.now().isoformat(),
56
+ 'type': 'request',
57
+ 'url': request.url,
58
+ 'method': request.method,
59
+ 'headers': dict(request.headers),
60
+ 'meta': getattr(request, 'meta', {}),
61
+ }
62
+
63
+ await self._write_record(request_info)
64
+ except Exception as e:
65
+ self.logger.error(f"Error recording request: {e}")
66
+
67
+ async def response_received(self, response: Any, spider: Any) -> None:
68
+ """记录接收到的响应"""
69
+ if not self.enabled:
70
+ return
71
+
72
+ try:
73
+ response_info = {
74
+ 'timestamp': datetime.now().isoformat(),
75
+ 'type': 'response',
76
+ 'url': response.url,
77
+ 'status_code': response.status_code,
78
+ 'headers': dict(response.headers),
79
+ }
80
+
81
+ await self._write_record(response_info)
82
+ except Exception as e:
83
+ self.logger.error(f"Error recording response: {e}")
84
+
85
+ async def spider_closed(self, spider: Any) -> None:
86
+ """爬虫关闭时清理资源"""
87
+ if self.current_file:
88
+ self.current_file.close()
89
+ self.current_file = None
90
+ self.logger.info("Request recorder closed.")
91
+
92
+ async def _write_record(self, record: dict) -> None:
93
+ """写入记录到文件"""
94
+ # 检查是否需要创建新文件
95
+ if not self.current_file or self.current_file_size > self.max_file_size:
96
+ if self.current_file:
97
+ self.current_file.close()
98
+
99
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
100
+ filename = os.path.join(self.output_dir, f'requests_{timestamp}.jsonl')
101
+ self.current_file = open(filename, 'a', encoding='utf-8')
102
+ self.current_file_size = 0
103
+
104
+ # 写入记录
105
+ line = json.dumps(record, ensure_ascii=False) + '\n'
106
+ self.current_file.write(line)
107
+ self.current_file.flush()
108
+ self.current_file_size += len(line.encode('utf-8'))