crawlo 1.1.1__py3-none-any.whl → 1.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (68) hide show
  1. crawlo/__init__.py +2 -1
  2. crawlo/__version__.py +1 -1
  3. crawlo/commands/genspider.py +68 -42
  4. crawlo/commands/list.py +102 -93
  5. crawlo/commands/startproject.py +89 -4
  6. crawlo/commands/utils.py +187 -0
  7. crawlo/config.py +280 -0
  8. crawlo/core/engine.py +16 -3
  9. crawlo/core/enhanced_engine.py +190 -0
  10. crawlo/core/scheduler.py +113 -8
  11. crawlo/crawler.py +840 -307
  12. crawlo/downloader/__init__.py +181 -17
  13. crawlo/downloader/aiohttp_downloader.py +15 -2
  14. crawlo/downloader/cffi_downloader.py +11 -1
  15. crawlo/downloader/httpx_downloader.py +14 -3
  16. crawlo/filters/__init__.py +122 -5
  17. crawlo/filters/aioredis_filter.py +128 -36
  18. crawlo/filters/memory_filter.py +99 -32
  19. crawlo/middleware/proxy.py +11 -8
  20. crawlo/middleware/retry.py +40 -5
  21. crawlo/mode_manager.py +201 -0
  22. crawlo/network/__init__.py +17 -3
  23. crawlo/network/request.py +118 -10
  24. crawlo/network/response.py +131 -28
  25. crawlo/pipelines/__init__.py +1 -1
  26. crawlo/pipelines/csv_pipeline.py +317 -0
  27. crawlo/pipelines/json_pipeline.py +219 -0
  28. crawlo/queue/__init__.py +0 -0
  29. crawlo/queue/pqueue.py +37 -0
  30. crawlo/queue/queue_manager.py +304 -0
  31. crawlo/queue/redis_priority_queue.py +192 -0
  32. crawlo/settings/default_settings.py +68 -9
  33. crawlo/spider/__init__.py +576 -66
  34. crawlo/task_manager.py +4 -1
  35. crawlo/templates/project/middlewares.py.tmpl +56 -45
  36. crawlo/templates/project/pipelines.py.tmpl +308 -36
  37. crawlo/templates/project/run.py.tmpl +239 -0
  38. crawlo/templates/project/settings.py.tmpl +211 -17
  39. crawlo/templates/spider/spider.py.tmpl +153 -7
  40. crawlo/utils/controlled_spider_mixin.py +336 -0
  41. crawlo/utils/large_scale_config.py +287 -0
  42. crawlo/utils/large_scale_helper.py +344 -0
  43. crawlo/utils/queue_helper.py +176 -0
  44. crawlo/utils/request_serializer.py +220 -0
  45. crawlo-1.1.2.dist-info/METADATA +567 -0
  46. {crawlo-1.1.1.dist-info → crawlo-1.1.2.dist-info}/RECORD +54 -46
  47. tests/test_final_validation.py +154 -0
  48. tests/test_redis_config.py +29 -0
  49. tests/test_redis_queue.py +225 -0
  50. tests/test_request_serialization.py +71 -0
  51. tests/test_scheduler.py +242 -0
  52. crawlo/pipelines/mysql_batch_pipline.py +0 -273
  53. crawlo/utils/pqueue.py +0 -174
  54. crawlo-1.1.1.dist-info/METADATA +0 -220
  55. examples/baidu_spider/__init__.py +0 -7
  56. examples/baidu_spider/demo.py +0 -94
  57. examples/baidu_spider/items.py +0 -46
  58. examples/baidu_spider/middleware.py +0 -49
  59. examples/baidu_spider/pipeline.py +0 -55
  60. examples/baidu_spider/run.py +0 -27
  61. examples/baidu_spider/settings.py +0 -121
  62. examples/baidu_spider/spiders/__init__.py +0 -7
  63. examples/baidu_spider/spiders/bai_du.py +0 -61
  64. examples/baidu_spider/spiders/miit.py +0 -159
  65. examples/baidu_spider/spiders/sina.py +0 -79
  66. {crawlo-1.1.1.dist-info → crawlo-1.1.2.dist-info}/WHEEL +0 -0
  67. {crawlo-1.1.1.dist-info → crawlo-1.1.2.dist-info}/entry_points.txt +0 -0
  68. {crawlo-1.1.1.dist-info → crawlo-1.1.2.dist-info}/top_level.txt +0 -0
crawlo/core/scheduler.py CHANGED
@@ -4,14 +4,16 @@ from typing import Optional, Callable
4
4
 
5
5
  from crawlo.utils.log import get_logger
6
6
  from crawlo.utils.request import set_request
7
- from crawlo.utils.pqueue import SpiderPriorityQueue
7
+ from crawlo.utils.request_serializer import RequestSerializer
8
+ from crawlo.queue.queue_manager import QueueManager, QueueConfig
8
9
  from crawlo.project import load_class, common_call
9
10
 
10
11
 
11
12
  class Scheduler:
12
13
  def __init__(self, crawler, dupe_filter, stats, log_level, priority):
13
14
  self.crawler = crawler
14
- self.request_queue: Optional[SpiderPriorityQueue] = None
15
+ self.queue_manager: Optional[QueueManager] = None
16
+ self.request_serializer = RequestSerializer() # 专门处理序列化
15
17
 
16
18
  self.logger = get_logger(name=self.__class__.__name__, level=log_level)
17
19
  self.stats = stats
@@ -30,28 +32,131 @@ class Scheduler:
30
32
  )
31
33
  return o
32
34
 
33
- def open(self):
34
- self.request_queue = SpiderPriorityQueue()
35
+ async def open(self):
36
+ """初始化调度器和队列"""
37
+ # 创建队列配置
38
+ queue_config = QueueConfig.from_settings(self.crawler.settings)
39
+
40
+ # 创建队列管理器
41
+ self.queue_manager = QueueManager(queue_config)
42
+
43
+ # 初始化队列
44
+ success = await self.queue_manager.initialize()
45
+ if not success:
46
+ raise RuntimeError("队列初始化失败")
47
+
48
+ # 输出队列状态
49
+ status = self.queue_manager.get_status()
50
+ self.logger.info(f'队列类型: {status["type"]}, 状态: {status["health"]}')
35
51
  self.logger.info(f'requesting filter: {self.dupe_filter}')
36
52
 
37
53
  async def next_request(self):
38
- request = await self.request_queue.get()
54
+ """获取下一个请求"""
55
+ if not self.queue_manager:
56
+ return None
57
+
58
+ request = await self.queue_manager.get()
59
+
60
+ # 恢复 callback(从 Redis 队列取出时)
61
+ if request:
62
+ spider = getattr(self.crawler, 'spider', None)
63
+ request = self.request_serializer.restore_after_deserialization(request, spider)
64
+
39
65
  return request
40
66
 
41
67
  async def enqueue_request(self, request):
68
+ """将请求加入队列"""
42
69
  if not request.dont_filter and await common_call(self.dupe_filter.requested, request):
43
70
  self.dupe_filter.log_stats(request)
44
71
  return False
72
+
73
+ if not self.queue_manager:
74
+ self.logger.error("队列管理器未初始化")
75
+ return False
76
+
45
77
  set_request(request, self.priority)
46
- await self.request_queue.put(request)
47
- return True
78
+
79
+ # 使用统一的队列接口
80
+ success = await self.queue_manager.put(request, priority=getattr(request, 'priority', 0))
81
+
82
+ if success:
83
+ self.logger.debug(f"✅ 请求入队成功: {request.url}")
84
+
85
+ return success
48
86
 
49
87
  def idle(self) -> bool:
88
+ """检查队列是否为空"""
50
89
  return len(self) == 0
51
90
 
52
91
  async def close(self):
92
+ """关闭调度器"""
53
93
  if isinstance(closed := getattr(self.dupe_filter, 'closed', None), Callable):
54
94
  await closed()
95
+
96
+ if self.queue_manager:
97
+ await self.queue_manager.close()
55
98
 
56
99
  def __len__(self):
57
- return self.request_queue.qsize()
100
+ """获取队列大小"""
101
+ if not self.queue_manager:
102
+ return 0
103
+ # 返回同步的近似值,实际大小需要异步获取
104
+ return 0 if self.queue_manager.empty() else 1
105
+
106
+ # #!/usr/bin/python
107
+ # # -*- coding:UTF-8 -*-
108
+ # from typing import Optional, Callable
109
+ #
110
+ # from crawlo.utils.log import get_logger
111
+ # from crawlo.utils.request import set_request
112
+ # from crawlo.utils.pqueue import SpiderPriorityQueue
113
+ # from crawlo.project import load_class, common_call
114
+ #
115
+ #
116
+ # class Scheduler:
117
+ # def __init__(self, crawler, dupe_filter, stats, log_level, priority):
118
+ # self.crawler = crawler
119
+ # self.request_queue: Optional[SpiderPriorityQueue] = None
120
+ #
121
+ # self.logger = get_logger(name=self.__class__.__name__, level=log_level)
122
+ # self.stats = stats
123
+ # self.dupe_filter = dupe_filter
124
+ # self.priority = priority
125
+ #
126
+ # @classmethod
127
+ # def create_instance(cls, crawler):
128
+ # filter_cls = load_class(crawler.settings.get('FILTER_CLASS'))
129
+ # o = cls(
130
+ # crawler=crawler,
131
+ # dupe_filter=filter_cls.create_instance(crawler),
132
+ # stats=crawler.stats,
133
+ # log_level=crawler.settings.get('LOG_LEVEL'),
134
+ # priority=crawler.settings.get('DEPTH_PRIORITY')
135
+ # )
136
+ # return o
137
+ #
138
+ # def open(self):
139
+ # self.request_queue = SpiderPriorityQueue()
140
+ # self.logger.info(f'requesting filter: {self.dupe_filter}')
141
+ #
142
+ # async def next_request(self):
143
+ # request = await self.request_queue.get()
144
+ # return request
145
+ #
146
+ # async def enqueue_request(self, request):
147
+ # if not request.dont_filter and await common_call(self.dupe_filter.requested, request):
148
+ # self.dupe_filter.log_stats(request)
149
+ # return False
150
+ # set_request(request, self.priority)
151
+ # await self.request_queue.put(request)
152
+ # return True
153
+ #
154
+ # def idle(self) -> bool:
155
+ # return len(self) == 0
156
+ #
157
+ # async def close(self):
158
+ # if isinstance(closed := getattr(self.dupe_filter, 'closed', None), Callable):
159
+ # await closed()
160
+ #
161
+ # def __len__(self):
162
+ # return self.request_queue.qsize()