crawlo 1.1.2__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (113) hide show
  1. crawlo/__init__.py +34 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/list.py +155 -155
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -196
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +186 -186
  12. crawlo/config.py +279 -279
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -171
  15. crawlo/core/enhanced_engine.py +189 -189
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +166 -162
  18. crawlo/crawler.py +1027 -1027
  19. crawlo/downloader/__init__.py +242 -242
  20. crawlo/downloader/aiohttp_downloader.py +212 -212
  21. crawlo/downloader/cffi_downloader.py +251 -251
  22. crawlo/downloader/httpx_downloader.py +259 -257
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +82 -78
  25. crawlo/extension/__init__.py +31 -31
  26. crawlo/extension/log_interval.py +49 -49
  27. crawlo/extension/log_stats.py +44 -44
  28. crawlo/extension/logging_extension.py +34 -34
  29. crawlo/filters/__init__.py +154 -154
  30. crawlo/filters/aioredis_filter.py +242 -242
  31. crawlo/filters/memory_filter.py +269 -269
  32. crawlo/items/__init__.py +23 -23
  33. crawlo/items/base.py +21 -21
  34. crawlo/items/fields.py +53 -53
  35. crawlo/items/items.py +104 -104
  36. crawlo/middleware/__init__.py +21 -21
  37. crawlo/middleware/default_header.py +32 -32
  38. crawlo/middleware/download_delay.py +28 -28
  39. crawlo/middleware/middleware_manager.py +135 -135
  40. crawlo/middleware/proxy.py +248 -248
  41. crawlo/middleware/request_ignore.py +30 -30
  42. crawlo/middleware/response_code.py +18 -18
  43. crawlo/middleware/response_filter.py +26 -26
  44. crawlo/middleware/retry.py +125 -125
  45. crawlo/mode_manager.py +200 -200
  46. crawlo/network/__init__.py +21 -21
  47. crawlo/network/request.py +311 -311
  48. crawlo/network/response.py +271 -269
  49. crawlo/pipelines/__init__.py +22 -13
  50. crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
  51. crawlo/pipelines/console_pipeline.py +39 -39
  52. crawlo/pipelines/csv_pipeline.py +316 -316
  53. crawlo/pipelines/database_dedup_pipeline.py +225 -0
  54. crawlo/pipelines/json_pipeline.py +218 -218
  55. crawlo/pipelines/memory_dedup_pipeline.py +116 -0
  56. crawlo/pipelines/mongo_pipeline.py +116 -116
  57. crawlo/pipelines/mysql_pipeline.py +195 -195
  58. crawlo/pipelines/pipeline_manager.py +56 -56
  59. crawlo/pipelines/redis_dedup_pipeline.py +163 -0
  60. crawlo/project.py +153 -153
  61. crawlo/queue/pqueue.py +37 -37
  62. crawlo/queue/queue_manager.py +307 -303
  63. crawlo/queue/redis_priority_queue.py +208 -191
  64. crawlo/settings/__init__.py +7 -7
  65. crawlo/settings/default_settings.py +245 -226
  66. crawlo/settings/setting_manager.py +99 -99
  67. crawlo/spider/__init__.py +639 -639
  68. crawlo/stats_collector.py +59 -59
  69. crawlo/subscriber.py +106 -106
  70. crawlo/task_manager.py +30 -30
  71. crawlo/templates/crawlo.cfg.tmpl +10 -10
  72. crawlo/templates/project/__init__.py.tmpl +3 -3
  73. crawlo/templates/project/items.py.tmpl +17 -17
  74. crawlo/templates/project/middlewares.py.tmpl +86 -86
  75. crawlo/templates/project/pipelines.py.tmpl +341 -335
  76. crawlo/templates/project/run.py.tmpl +251 -238
  77. crawlo/templates/project/settings.py.tmpl +250 -247
  78. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  79. crawlo/templates/spider/spider.py.tmpl +177 -177
  80. crawlo/utils/__init__.py +7 -7
  81. crawlo/utils/controlled_spider_mixin.py +439 -335
  82. crawlo/utils/date_tools.py +233 -233
  83. crawlo/utils/db_helper.py +343 -343
  84. crawlo/utils/func_tools.py +82 -82
  85. crawlo/utils/large_scale_config.py +286 -286
  86. crawlo/utils/large_scale_helper.py +343 -343
  87. crawlo/utils/log.py +128 -128
  88. crawlo/utils/queue_helper.py +175 -175
  89. crawlo/utils/request.py +267 -267
  90. crawlo/utils/request_serializer.py +219 -219
  91. crawlo/utils/spider_loader.py +62 -62
  92. crawlo/utils/system.py +11 -11
  93. crawlo/utils/tools.py +4 -4
  94. crawlo/utils/url.py +39 -39
  95. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/METADATA +635 -567
  96. crawlo-1.1.3.dist-info/RECORD +113 -0
  97. examples/__init__.py +7 -7
  98. examples/controlled_spider_example.py +205 -0
  99. tests/__init__.py +7 -7
  100. tests/test_final_validation.py +153 -153
  101. tests/test_proxy_health_check.py +32 -32
  102. tests/test_proxy_middleware_integration.py +136 -136
  103. tests/test_proxy_providers.py +56 -56
  104. tests/test_proxy_stats.py +19 -19
  105. tests/test_proxy_strategies.py +59 -59
  106. tests/test_redis_config.py +28 -28
  107. tests/test_redis_queue.py +224 -224
  108. tests/test_request_serialization.py +70 -70
  109. tests/test_scheduler.py +241 -241
  110. crawlo-1.1.2.dist-info/RECORD +0 -108
  111. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
  112. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
  113. {crawlo-1.1.2.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
@@ -1,190 +1,190 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 增强的引擎实现
5
- 解决大规模请求生成时的并发控制和背压问题
6
- """
7
- import asyncio
8
-
9
- from crawlo.core.engine import Engine as BaseEngine
10
- from crawlo.utils.log import get_logger
11
-
12
-
13
- class EnhancedEngine(BaseEngine):
14
- """
15
- 增强的引擎实现
16
-
17
- 主要改进:
18
- 1. 智能的请求生成控制
19
- 2. 背压感知的调度
20
- 3. 动态并发调整
21
- """
22
-
23
- def __init__(self, crawler):
24
- super().__init__(crawler)
25
-
26
- # 增强控制参数
27
- self.max_queue_size = self.settings.get_int('SCHEDULER_MAX_QUEUE_SIZE', 200)
28
- self.generation_batch_size = 10
29
- self.generation_interval = 0.05
30
- self.backpressure_ratio = 0.8 # 队列达到80%时启动背压
31
-
32
- # 状态跟踪
33
- self._generation_paused = False
34
- self._last_generation_time = 0
35
- self._generation_stats = {
36
- 'total_generated': 0,
37
- 'backpressure_events': 0
38
- }
39
-
40
- self.logger = get_logger(self.__class__.__name__)
41
-
42
- async def crawl(self):
43
- """
44
- 增强的爬取循环
45
- 支持智能请求生成和背压控制
46
- """
47
- generation_task = None
48
-
49
- try:
50
- # 启动请求生成任务
51
- if self.start_requests:
52
- generation_task = asyncio.create_task(
53
- self._controlled_request_generation()
54
- )
55
-
56
- # 主爬取循环
57
- while self.running:
58
- # 获取并处理请求
59
- if request := await self._get_next_request():
60
- await self._crawl(request)
61
-
62
- # 检查退出条件
63
- if await self._should_exit():
64
- break
65
-
66
- # 短暂休息避免忙等
67
- await asyncio.sleep(0.001)
68
-
69
- finally:
70
- # 清理生成任务
71
- if generation_task and not generation_task.done():
72
- generation_task.cancel()
73
- try:
74
- await generation_task
75
- except asyncio.CancelledError:
76
- pass
77
-
78
- await self.close_spider()
79
-
80
- async def _controlled_request_generation(self):
81
- """受控的请求生成"""
82
- self.logger.info("🎛️ 启动受控请求生成")
83
-
84
- batch = []
85
- total_generated = 0
86
-
87
- try:
88
- for request in self.start_requests:
89
- batch.append(request)
90
-
91
- # 批量处理
92
- if len(batch) >= self.generation_batch_size:
93
- generated = await self._process_generation_batch(batch)
94
- total_generated += generated
95
- batch = []
96
-
97
- # 背压检查
98
- if await self._should_pause_generation():
99
- await self._wait_for_capacity()
100
-
101
- # 处理剩余请求
102
- if batch:
103
- generated = await self._process_generation_batch(batch)
104
- total_generated += generated
105
-
106
- except Exception as e:
107
- self.logger.error(f"❌ 请求生成失败: {e}")
108
-
109
- finally:
110
- self.start_requests = None
111
- self.logger.info(f"🎉 请求生成完成,总计: {total_generated}")
112
-
113
- async def _process_generation_batch(self, batch) -> int:
114
- """处理一批请求"""
115
- generated = 0
116
-
117
- for request in batch:
118
- if not self.running:
119
- break
120
-
121
- # 等待队列有空间
122
- while await self._is_queue_full() and self.running:
123
- await asyncio.sleep(0.1)
124
-
125
- if self.running:
126
- await self.enqueue_request(request)
127
- generated += 1
128
- self._generation_stats['total_generated'] += 1
129
-
130
- # 控制生成速度
131
- if self.generation_interval > 0:
132
- await asyncio.sleep(self.generation_interval)
133
-
134
- return generated
135
-
136
- async def _should_pause_generation(self) -> bool:
137
- """判断是否应该暂停生成"""
138
- # 检查队列大小
139
- if await self._is_queue_full():
140
- return True
141
-
142
- # 检查任务管理器负载
143
- if self.task_manager:
144
- current_tasks = len(self.task_manager.current_task)
145
- if hasattr(self.task_manager, 'semaphore'):
146
- max_concurrency = getattr(self.task_manager.semaphore, '_initial_value', 8)
147
- if current_tasks >= max_concurrency * self.backpressure_ratio:
148
- return True
149
-
150
- return False
151
-
152
- async def _is_queue_full(self) -> bool:
153
- """检查队列是否已满"""
154
- if not self.scheduler:
155
- return False
156
-
157
- queue_size = len(self.scheduler)
158
- return queue_size >= self.max_queue_size * self.backpressure_ratio
159
-
160
- async def _wait_for_capacity(self):
161
- """等待系统有足够容量"""
162
- self._generation_stats['backpressure_events'] += 1
163
- self.logger.debug("⏸️ 触发背压,暂停请求生成")
164
-
165
- wait_time = 0.1
166
- max_wait = 2.0
167
-
168
- while await self._should_pause_generation() and self.running:
169
- await asyncio.sleep(wait_time)
170
- wait_time = min(wait_time * 1.1, max_wait)
171
-
172
- async def _should_exit(self) -> bool:
173
- """检查是否应该退出"""
174
- # 没有启动请求,且所有队列都空闲
175
- if (self.start_requests is None and
176
- self.scheduler.idle() and
177
- self.downloader.idle() and
178
- self.task_manager.all_done() and
179
- self.processor.idle()):
180
- return True
181
-
182
- return False
183
-
184
- def get_generation_stats(self) -> dict:
185
- """获取生成统计"""
186
- return {
187
- **self._generation_stats,
188
- 'queue_size': len(self.scheduler) if self.scheduler else 0,
189
- 'active_tasks': len(self.task_manager.current_task) if self.task_manager else 0
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 增强的引擎实现
5
+ 解决大规模请求生成时的并发控制和背压问题
6
+ """
7
+ import asyncio
8
+
9
+ from crawlo.core.engine import Engine as BaseEngine
10
+ from crawlo.utils.log import get_logger
11
+
12
+
13
+ class EnhancedEngine(BaseEngine):
14
+ """
15
+ 增强的引擎实现
16
+
17
+ 主要改进:
18
+ 1. 智能的请求生成控制
19
+ 2. 背压感知的调度
20
+ 3. 动态并发调整
21
+ """
22
+
23
+ def __init__(self, crawler):
24
+ super().__init__(crawler)
25
+
26
+ # 增强控制参数
27
+ self.max_queue_size = self.settings.get_int('SCHEDULER_MAX_QUEUE_SIZE', 200)
28
+ self.generation_batch_size = 10
29
+ self.generation_interval = 0.05
30
+ self.backpressure_ratio = 0.8 # 队列达到80%时启动背压
31
+
32
+ # 状态跟踪
33
+ self._generation_paused = False
34
+ self._last_generation_time = 0
35
+ self._generation_stats = {
36
+ 'total_generated': 0,
37
+ 'backpressure_events': 0
38
+ }
39
+
40
+ self.logger = get_logger(self.__class__.__name__)
41
+
42
+ async def crawl(self):
43
+ """
44
+ 增强的爬取循环
45
+ 支持智能请求生成和背压控制
46
+ """
47
+ generation_task = None
48
+
49
+ try:
50
+ # 启动请求生成任务
51
+ if self.start_requests:
52
+ generation_task = asyncio.create_task(
53
+ self._controlled_request_generation()
54
+ )
55
+
56
+ # 主爬取循环
57
+ while self.running:
58
+ # 获取并处理请求
59
+ if request := await self._get_next_request():
60
+ await self._crawl(request)
61
+
62
+ # 检查退出条件
63
+ if await self._should_exit():
64
+ break
65
+
66
+ # 短暂休息避免忙等
67
+ await asyncio.sleep(0.001)
68
+
69
+ finally:
70
+ # 清理生成任务
71
+ if generation_task and not generation_task.done():
72
+ generation_task.cancel()
73
+ try:
74
+ await generation_task
75
+ except asyncio.CancelledError:
76
+ pass
77
+
78
+ await self.close_spider()
79
+
80
+ async def _controlled_request_generation(self):
81
+ """受控的请求生成"""
82
+ self.logger.info("🎛️ 启动受控请求生成")
83
+
84
+ batch = []
85
+ total_generated = 0
86
+
87
+ try:
88
+ for request in self.start_requests:
89
+ batch.append(request)
90
+
91
+ # 批量处理
92
+ if len(batch) >= self.generation_batch_size:
93
+ generated = await self._process_generation_batch(batch)
94
+ total_generated += generated
95
+ batch = []
96
+
97
+ # 背压检查
98
+ if await self._should_pause_generation():
99
+ await self._wait_for_capacity()
100
+
101
+ # 处理剩余请求
102
+ if batch:
103
+ generated = await self._process_generation_batch(batch)
104
+ total_generated += generated
105
+
106
+ except Exception as e:
107
+ self.logger.error(f"❌ 请求生成失败: {e}")
108
+
109
+ finally:
110
+ self.start_requests = None
111
+ self.logger.info(f"🎉 请求生成完成,总计: {total_generated}")
112
+
113
+ async def _process_generation_batch(self, batch) -> int:
114
+ """处理一批请求"""
115
+ generated = 0
116
+
117
+ for request in batch:
118
+ if not self.running:
119
+ break
120
+
121
+ # 等待队列有空间
122
+ while await self._is_queue_full() and self.running:
123
+ await asyncio.sleep(0.1)
124
+
125
+ if self.running:
126
+ await self.enqueue_request(request)
127
+ generated += 1
128
+ self._generation_stats['total_generated'] += 1
129
+
130
+ # 控制生成速度
131
+ if self.generation_interval > 0:
132
+ await asyncio.sleep(self.generation_interval)
133
+
134
+ return generated
135
+
136
+ async def _should_pause_generation(self) -> bool:
137
+ """判断是否应该暂停生成"""
138
+ # 检查队列大小
139
+ if await self._is_queue_full():
140
+ return True
141
+
142
+ # 检查任务管理器负载
143
+ if self.task_manager:
144
+ current_tasks = len(self.task_manager.current_task)
145
+ if hasattr(self.task_manager, 'semaphore'):
146
+ max_concurrency = getattr(self.task_manager.semaphore, '_initial_value', 8)
147
+ if current_tasks >= max_concurrency * self.backpressure_ratio:
148
+ return True
149
+
150
+ return False
151
+
152
+ async def _is_queue_full(self) -> bool:
153
+ """检查队列是否已满"""
154
+ if not self.scheduler:
155
+ return False
156
+
157
+ queue_size = len(self.scheduler)
158
+ return queue_size >= self.max_queue_size * self.backpressure_ratio
159
+
160
+ async def _wait_for_capacity(self):
161
+ """等待系统有足够容量"""
162
+ self._generation_stats['backpressure_events'] += 1
163
+ self.logger.debug("⏸️ 触发背压,暂停请求生成")
164
+
165
+ wait_time = 0.1
166
+ max_wait = 2.0
167
+
168
+ while await self._should_pause_generation() and self.running:
169
+ await asyncio.sleep(wait_time)
170
+ wait_time = min(wait_time * 1.1, max_wait)
171
+
172
+ async def _should_exit(self) -> bool:
173
+ """检查是否应该退出"""
174
+ # 没有启动请求,且所有队列都空闲
175
+ if (self.start_requests is None and
176
+ self.scheduler.idle() and
177
+ self.downloader.idle() and
178
+ self.task_manager.all_done() and
179
+ self.processor.idle()):
180
+ return True
181
+
182
+ return False
183
+
184
+ def get_generation_stats(self) -> dict:
185
+ """获取生成统计"""
186
+ return {
187
+ **self._generation_stats,
188
+ 'queue_size': len(self.scheduler) if self.scheduler else 0,
189
+ 'active_tasks': len(self.task_manager.current_task) if self.task_manager else 0
190
190
  }
crawlo/core/processor.py CHANGED
@@ -1,40 +1,40 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from asyncio import Queue
4
- from typing import Union, Optional
5
-
6
- from crawlo import Request, Item
7
- from crawlo.pipelines.pipeline_manager import PipelineManager
8
-
9
-
10
- class Processor(object):
11
-
12
- def __init__(self, crawler):
13
- self.crawler = crawler
14
- self.queue: Queue = Queue()
15
- self.pipelines: Optional[PipelineManager] = None
16
-
17
- def open(self):
18
- self.pipelines = PipelineManager.from_crawler(self.crawler)
19
-
20
- async def process(self):
21
- while not self.idle():
22
- result = await self.queue.get()
23
- if isinstance(result, Request):
24
- await self.crawler.engine.enqueue_request(result)
25
- else:
26
- assert isinstance(result, Item)
27
- await self._process_item(result)
28
-
29
- async def _process_item(self, item):
30
- await self.pipelines.process_item(item=item)
31
-
32
- async def enqueue(self, output: Union[Request, Item]):
33
- await self.queue.put(output)
34
- await self.process()
35
-
36
- def idle(self) -> bool:
37
- return len(self) == 0
38
-
39
- def __len__(self):
40
- return self.queue.qsize()
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from asyncio import Queue
4
+ from typing import Union, Optional
5
+
6
+ from crawlo import Request, Item
7
+ from crawlo.pipelines.pipeline_manager import PipelineManager
8
+
9
+
10
+ class Processor(object):
11
+
12
+ def __init__(self, crawler):
13
+ self.crawler = crawler
14
+ self.queue: Queue = Queue()
15
+ self.pipelines: Optional[PipelineManager] = None
16
+
17
+ def open(self):
18
+ self.pipelines = PipelineManager.from_crawler(self.crawler)
19
+
20
+ async def process(self):
21
+ while not self.idle():
22
+ result = await self.queue.get()
23
+ if isinstance(result, Request):
24
+ await self.crawler.engine.enqueue_request(result)
25
+ else:
26
+ assert isinstance(result, Item)
27
+ await self._process_item(result)
28
+
29
+ async def _process_item(self, item):
30
+ await self.pipelines.process_item(item=item)
31
+
32
+ async def enqueue(self, output: Union[Request, Item]):
33
+ await self.queue.put(output)
34
+ await self.process()
35
+
36
+ def idle(self) -> bool:
37
+ return len(self) == 0
38
+
39
+ def __len__(self):
40
+ return self.queue.qsize()