crawlo 1.1.9__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (221) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +65 -65
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +142 -132
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +292 -292
  14. crawlo/commands/startproject.py +418 -418
  15. crawlo/commands/stats.py +188 -188
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +252 -252
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -345
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -136
  23. crawlo/crawler.py +1027 -1027
  24. crawlo/downloader/__init__.py +266 -266
  25. crawlo/downloader/aiohttp_downloader.py +220 -220
  26. crawlo/downloader/cffi_downloader.py +256 -256
  27. crawlo/downloader/httpx_downloader.py +259 -259
  28. crawlo/downloader/hybrid_downloader.py +213 -213
  29. crawlo/downloader/playwright_downloader.py +402 -402
  30. crawlo/downloader/selenium_downloader.py +472 -472
  31. crawlo/event.py +11 -11
  32. crawlo/exceptions.py +81 -81
  33. crawlo/extension/__init__.py +37 -37
  34. crawlo/extension/health_check.py +141 -141
  35. crawlo/extension/log_interval.py +57 -57
  36. crawlo/extension/log_stats.py +81 -81
  37. crawlo/extension/logging_extension.py +43 -43
  38. crawlo/extension/memory_monitor.py +104 -104
  39. crawlo/extension/performance_profiler.py +133 -133
  40. crawlo/extension/request_recorder.py +107 -107
  41. crawlo/filters/__init__.py +154 -154
  42. crawlo/filters/aioredis_filter.py +280 -280
  43. crawlo/filters/memory_filter.py +269 -269
  44. crawlo/items/__init__.py +23 -23
  45. crawlo/items/base.py +21 -21
  46. crawlo/items/fields.py +53 -53
  47. crawlo/items/items.py +104 -104
  48. crawlo/middleware/__init__.py +21 -21
  49. crawlo/middleware/default_header.py +132 -32
  50. crawlo/middleware/download_delay.py +105 -28
  51. crawlo/middleware/middleware_manager.py +135 -135
  52. crawlo/middleware/offsite.py +116 -0
  53. crawlo/middleware/proxy.py +366 -272
  54. crawlo/middleware/request_ignore.py +88 -30
  55. crawlo/middleware/response_code.py +164 -18
  56. crawlo/middleware/response_filter.py +138 -26
  57. crawlo/middleware/retry.py +124 -124
  58. crawlo/mode_manager.py +211 -211
  59. crawlo/network/__init__.py +21 -21
  60. crawlo/network/request.py +338 -338
  61. crawlo/network/response.py +359 -359
  62. crawlo/pipelines/__init__.py +21 -21
  63. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  64. crawlo/pipelines/console_pipeline.py +39 -39
  65. crawlo/pipelines/csv_pipeline.py +316 -316
  66. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  67. crawlo/pipelines/json_pipeline.py +218 -218
  68. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  69. crawlo/pipelines/mongo_pipeline.py +131 -131
  70. crawlo/pipelines/mysql_pipeline.py +316 -316
  71. crawlo/pipelines/pipeline_manager.py +61 -61
  72. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  73. crawlo/project.py +187 -187
  74. crawlo/queue/pqueue.py +37 -37
  75. crawlo/queue/queue_manager.py +337 -334
  76. crawlo/queue/redis_priority_queue.py +298 -298
  77. crawlo/settings/__init__.py +7 -7
  78. crawlo/settings/default_settings.py +226 -219
  79. crawlo/settings/setting_manager.py +122 -122
  80. crawlo/spider/__init__.py +639 -639
  81. crawlo/stats_collector.py +59 -59
  82. crawlo/subscriber.py +130 -130
  83. crawlo/task_manager.py +30 -30
  84. crawlo/templates/crawlo.cfg.tmpl +10 -10
  85. crawlo/templates/project/__init__.py.tmpl +3 -3
  86. crawlo/templates/project/items.py.tmpl +17 -17
  87. crawlo/templates/project/middlewares.py.tmpl +118 -109
  88. crawlo/templates/project/pipelines.py.tmpl +96 -96
  89. crawlo/templates/project/run.py.tmpl +45 -45
  90. crawlo/templates/project/settings.py.tmpl +327 -326
  91. crawlo/templates/project/settings_distributed.py.tmpl +119 -119
  92. crawlo/templates/project/settings_gentle.py.tmpl +94 -94
  93. crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
  94. crawlo/templates/project/settings_simple.py.tmpl +68 -68
  95. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  96. crawlo/templates/spider/spider.py.tmpl +143 -141
  97. crawlo/tools/__init__.py +182 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/tools/data_validator.py +180 -180
  101. crawlo/tools/date_tools.py +35 -35
  102. crawlo/tools/distributed_coordinator.py +386 -386
  103. crawlo/tools/retry_mechanism.py +220 -220
  104. crawlo/tools/scenario_adapter.py +262 -262
  105. crawlo/utils/__init__.py +35 -35
  106. crawlo/utils/batch_processor.py +260 -260
  107. crawlo/utils/controlled_spider_mixin.py +439 -439
  108. crawlo/utils/date_tools.py +290 -290
  109. crawlo/utils/db_helper.py +343 -343
  110. crawlo/utils/enhanced_error_handler.py +359 -359
  111. crawlo/utils/env_config.py +105 -105
  112. crawlo/utils/error_handler.py +125 -125
  113. crawlo/utils/func_tools.py +82 -82
  114. crawlo/utils/large_scale_config.py +286 -286
  115. crawlo/utils/large_scale_helper.py +343 -343
  116. crawlo/utils/log.py +128 -128
  117. crawlo/utils/performance_monitor.py +284 -284
  118. crawlo/utils/queue_helper.py +175 -175
  119. crawlo/utils/redis_connection_pool.py +334 -334
  120. crawlo/utils/redis_key_validator.py +199 -199
  121. crawlo/utils/request.py +267 -267
  122. crawlo/utils/request_serializer.py +219 -219
  123. crawlo/utils/spider_loader.py +62 -62
  124. crawlo/utils/system.py +11 -11
  125. crawlo/utils/tools.py +4 -4
  126. crawlo/utils/url.py +39 -39
  127. crawlo-1.2.1.dist-info/METADATA +692 -0
  128. crawlo-1.2.1.dist-info/RECORD +220 -0
  129. examples/__init__.py +7 -7
  130. examples/aiohttp_settings.py +42 -0
  131. examples/curl_cffi_settings.py +41 -0
  132. examples/default_header_middleware_example.py +107 -0
  133. examples/default_header_spider_example.py +129 -0
  134. examples/download_delay_middleware_example.py +160 -0
  135. examples/httpx_settings.py +42 -0
  136. examples/multi_downloader_proxy_example.py +81 -0
  137. examples/offsite_middleware_example.py +55 -0
  138. examples/offsite_spider_example.py +107 -0
  139. examples/proxy_spider_example.py +166 -0
  140. examples/request_ignore_middleware_example.py +51 -0
  141. examples/request_ignore_spider_example.py +99 -0
  142. examples/response_code_middleware_example.py +52 -0
  143. examples/response_filter_middleware_example.py +67 -0
  144. examples/tong_hua_shun_settings.py +62 -0
  145. examples/tong_hua_shun_spider.py +170 -0
  146. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  147. tests/__init__.py +7 -7
  148. tests/advanced_tools_example.py +275 -275
  149. tests/authenticated_proxy_example.py +236 -236
  150. tests/cleaners_example.py +160 -160
  151. tests/config_validation_demo.py +102 -102
  152. tests/controlled_spider_example.py +205 -205
  153. tests/date_tools_example.py +180 -180
  154. tests/dynamic_loading_example.py +523 -523
  155. tests/dynamic_loading_test.py +104 -104
  156. tests/env_config_example.py +133 -133
  157. tests/error_handling_example.py +171 -171
  158. tests/redis_key_validation_demo.py +130 -130
  159. tests/response_improvements_example.py +144 -144
  160. tests/test_advanced_tools.py +148 -148
  161. tests/test_all_redis_key_configs.py +145 -145
  162. tests/test_authenticated_proxy.py +141 -141
  163. tests/test_cleaners.py +54 -54
  164. tests/test_comprehensive.py +146 -146
  165. tests/test_config_validator.py +193 -193
  166. tests/test_crawlo_proxy_integration.py +173 -0
  167. tests/test_date_tools.py +123 -123
  168. tests/test_default_header_middleware.py +159 -0
  169. tests/test_double_crawlo_fix.py +207 -207
  170. tests/test_double_crawlo_fix_simple.py +124 -124
  171. tests/test_download_delay_middleware.py +222 -0
  172. tests/test_downloader_proxy_compatibility.py +269 -0
  173. tests/test_dynamic_downloaders_proxy.py +124 -124
  174. tests/test_dynamic_proxy.py +92 -92
  175. tests/test_dynamic_proxy_config.py +146 -146
  176. tests/test_dynamic_proxy_real.py +109 -109
  177. tests/test_edge_cases.py +303 -303
  178. tests/test_enhanced_error_handler.py +270 -270
  179. tests/test_env_config.py +121 -121
  180. tests/test_error_handler_compatibility.py +112 -112
  181. tests/test_final_validation.py +153 -153
  182. tests/test_framework_env_usage.py +103 -103
  183. tests/test_integration.py +356 -356
  184. tests/test_item_dedup_redis_key.py +122 -122
  185. tests/test_offsite_middleware.py +222 -0
  186. tests/test_parsel.py +29 -29
  187. tests/test_performance.py +327 -327
  188. tests/test_proxy_api.py +265 -0
  189. tests/test_proxy_health_check.py +32 -32
  190. tests/test_proxy_middleware.py +122 -0
  191. tests/test_proxy_middleware_enhanced.py +217 -0
  192. tests/test_proxy_middleware_integration.py +136 -136
  193. tests/test_proxy_providers.py +56 -56
  194. tests/test_proxy_stats.py +19 -19
  195. tests/test_proxy_strategies.py +59 -59
  196. tests/test_queue_manager_double_crawlo.py +174 -231
  197. tests/test_queue_manager_redis_key.py +176 -176
  198. tests/test_real_scenario_proxy.py +196 -0
  199. tests/test_redis_config.py +28 -28
  200. tests/test_redis_connection_pool.py +294 -294
  201. tests/test_redis_key_naming.py +181 -181
  202. tests/test_redis_key_validator.py +123 -123
  203. tests/test_redis_queue.py +224 -224
  204. tests/test_request_ignore_middleware.py +183 -0
  205. tests/test_request_serialization.py +70 -70
  206. tests/test_response_code_middleware.py +350 -0
  207. tests/test_response_filter_middleware.py +428 -0
  208. tests/test_response_improvements.py +152 -152
  209. tests/test_retry_middleware.py +242 -0
  210. tests/test_scheduler.py +241 -241
  211. tests/test_simple_response.py +61 -61
  212. tests/test_telecom_spider_redis_key.py +205 -205
  213. tests/test_template_content.py +87 -87
  214. tests/test_template_redis_key.py +134 -134
  215. tests/test_tools.py +153 -153
  216. tests/tools_example.py +257 -257
  217. crawlo-1.1.9.dist-info/METADATA +0 -626
  218. crawlo-1.1.9.dist-info/RECORD +0 -190
  219. {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/WHEEL +0 -0
  220. {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/entry_points.txt +0 -0
  221. {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/top_level.txt +0 -0
@@ -1,110 +1,119 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- {{project_name}}.middlewares
4
- ============================
5
- 自定义中间件,用于在请求/响应/异常处理过程中插入自定义逻辑。
6
-
7
- 这是一个简单的示例中间件,您可以根据需要添加更多中间件。
8
- """
9
-
10
- import random
11
- from crawlo import Request, Response
12
- from crawlo.utils.log import get_logger
13
-
14
-
15
- class ExampleMiddleware:
16
- """
17
- 示例中间件,演示如何处理请求、响应和异常。
18
-
19
- 此中间件会:
20
- 1. 为请求添加随机 User-Agent
21
- 2. 记录请求和响应信息
22
- 3. 处理异常情况
23
- """
24
-
25
- def __init__(self):
26
- self.logger = get_logger(self.__class__.__name__)
27
- self.user_agents = [
28
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
29
- 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
30
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:135.0) Gecko/20100101 Firefox/135.0',
31
- 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/135.0',
32
- ]
33
-
34
- def process_request(self, request, spider):
35
- """
36
- 在请求被下载器执行前调用。
37
-
38
- Args:
39
- request: 请求对象
40
- spider: 爬虫实例
41
-
42
- Returns:
43
- None: 继续处理请求
44
- Response: 返回响应对象(短路处理)
45
- Request: 返回新请求对象(替换原请求)
46
- """
47
- # 为请求添加随机 User-Agent
48
- if 'User-Agent' not in request.headers:
49
- ua = random.choice(self.user_agents)
50
- request.headers['User-Agent'] = ua
51
- self.logger.debug(f"为请求 {request.url} 设置 User-Agent: {ua[:50]}...")
52
-
53
- return None
54
-
55
- def process_response(self, request, response, spider):
56
- """
57
- 在响应被 Spider 处理前调用。
58
-
59
- Args:
60
- request: 原始请求对象
61
- response: 响应对象
62
- spider: 爬虫实例
63
-
64
- Returns:
65
- Response: 处理后的响应对象
66
- """
67
- # 记录响应信息
68
- self.logger.info(f"收到响应: {request.url} - 状态码: {response.status_code}")
69
-
70
- # 可以在这里处理特殊状态码
71
- if response.status_code == 403:
72
- self.logger.warning(f"访问被拒绝: {request.url}")
73
-
74
- return response
75
-
76
- def process_exception(self, request, exception, spider):
77
- """
78
- 在下载或处理过程中发生异常时调用。
79
-
80
- Args:
81
- request: 请求对象
82
- exception: 异常对象
83
- spider: 爬虫实例
84
-
85
- Returns:
86
- None: 异常将继续传播
87
- Response: 返回响应对象(处理异常)
88
- Request: 返回新请求对象(重试请求)
89
- """
90
- self.logger.error(f"请求异常: {request.url} - {exception}")
91
- return None
92
-
93
-
94
- # ======================== 使用说明 ========================
95
- #
96
- # 在 settings.py 中启用中间件:
97
- # MIDDLEWARES = [
98
- # '{{project_name}}.middlewares.ExampleMiddleware',
99
- # ]
100
- #
101
- # 您可以根据需要添加更多中间件,例如:
102
- # 1. 请求处理中间件(修改请求头、设置代理等)
103
- # 2. 响应处理中间件(解析、过滤等)
104
- # 3. 异常处理中间件(重试、记录等)
105
- #
106
- # 每个中间件可以实现以下方法:
107
- # - process_request: 处理请求
108
- # - process_response: 处理响应
109
- # - process_exception: 处理异常
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}}.middlewares
4
+ ============================
5
+ 自定义中间件,用于在请求/响应/异常处理过程中插入自定义逻辑。
6
+
7
+ 这是一个简单的示例中间件,您可以根据需要添加更多中间件。
8
+ """
9
+
10
+ import random
11
+ from crawlo import Request, Response
12
+ from crawlo.utils.log import get_logger
13
+
14
+
15
+ class ExampleMiddleware:
16
+ """
17
+ 示例中间件,演示如何处理请求、响应和异常。
18
+
19
+ 此中间件会:
20
+ 1. 为请求添加随机 User-Agent
21
+ 2. 记录请求和响应信息
22
+ 3. 处理异常情况
23
+ """
24
+
25
+ def __init__(self):
26
+ self.logger = get_logger(self.__class__.__name__)
27
+ self.user_agents = [
28
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
29
+ 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
30
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:135.0) Gecko/20100101 Firefox/135.0',
31
+ 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/135.0',
32
+ ]
33
+
34
+ def process_request(self, request, spider):
35
+ """
36
+ 在请求被下载器执行前调用。
37
+
38
+ Args:
39
+ request: 请求对象
40
+ spider: 爬虫实例
41
+
42
+ Returns:
43
+ None: 继续处理请求
44
+ Response: 返回响应对象(短路处理)
45
+ Request: 返回新请求对象(替换原请求)
46
+ """
47
+ # 为请求添加随机 User-Agent
48
+ if 'User-Agent' not in request.headers:
49
+ ua = random.choice(self.user_agents)
50
+ request.headers['User-Agent'] = ua
51
+ self.logger.debug(f"为请求 {request.url} 设置 User-Agent: {ua[:50]}...")
52
+
53
+ return None
54
+
55
+ def process_response(self, request, response, spider):
56
+ """
57
+ 在响应被 Spider 处理前调用。
58
+
59
+ Args:
60
+ request: 原始请求对象
61
+ response: 响应对象
62
+ spider: 爬虫实例
63
+
64
+ Returns:
65
+ Response: 处理后的响应对象
66
+ """
67
+ # 记录响应信息
68
+ self.logger.info(f"收到响应: {request.url} - 状态码: {response.status_code}")
69
+
70
+ # 可以在这里处理特殊状态码
71
+ if response.status_code == 403:
72
+ self.logger.warning(f"访问被拒绝: {request.url}")
73
+
74
+ return response
75
+
76
+ def process_exception(self, request, exception, spider):
77
+ """
78
+ 在下载或处理过程中发生异常时调用。
79
+
80
+ Args:
81
+ request: 请求对象
82
+ exception: 异常对象
83
+ spider: 爬虫实例
84
+
85
+ Returns:
86
+ None: 异常将继续传播
87
+ Response: 返回响应对象(处理异常)
88
+ Request: 返回新请求对象(重试请求)
89
+ """
90
+ self.logger.error(f"请求异常: {request.url} - {exception}")
91
+ return None
92
+
93
+
94
+ # ======================== 使用说明 ========================
95
+ #
96
+ # 在 settings.py 中启用中间件:
97
+ # MIDDLEWARES = [
98
+ # '{{project_name}}.middlewares.ExampleMiddleware',
99
+ # ]
100
+ #
101
+ # 您可以根据需要添加更多中间件,例如:
102
+ # 1. 请求处理中间件(修改请求头、设置代理等)
103
+ # 2. 响应处理中间件(解析、过滤等)
104
+ # 3. 异常处理中间件(重试、记录等)
105
+ #
106
+ # 每个中间件可以实现以下方法:
107
+ # - process_request: 处理请求
108
+ # - process_response: 处理响应
109
+ # - process_exception: 处理异常
110
+ #
111
+ # 注意:Crawlo框架提供了许多内置中间件,您可以直接使用:
112
+ # - DownloadDelayMiddleware: 控制请求延迟
113
+ # - ResponseCodeMiddleware: 处理HTTP状态码并记录统计信息
114
+ # - ResponseFilterMiddleware: 过滤特定状态码的响应
115
+ # - DefaultHeaderMiddleware: 添加默认请求头
116
+ # - ProxyMiddleware: 设置代理
117
+ # - RetryMiddleware: 处理重试逻辑
118
+ # - OffsiteMiddleware: 过滤站外请求
110
119
  # ======================== 使用说明 ========================
@@ -1,97 +1,97 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- {{project_name}}.pipelines
4
- ==========================
5
- 数据管道,用于处理 Spider 返回的 Item。
6
- 例如:清理、验证、去重、保存到数据库等。
7
-
8
- 这是一个简单的示例管道,您可以根据需要添加更多管道。
9
- """
10
-
11
- from datetime import datetime
12
- from crawlo.exceptions import DropItem
13
- from crawlo.utils.log import get_logger
14
-
15
-
16
- class ExamplePipeline:
17
- """
18
- 示例管道,演示如何处理数据项。
19
-
20
- 此管道会:
21
- 1. 验证必要字段
22
- 2. 清理数据
23
- 3. 添加时间戳
24
- 4. 记录处理日志
25
- """
26
-
27
- def __init__(self):
28
- self.logger = get_logger(self.__class__.__name__)
29
- self.item_count = 0
30
-
31
- def process_item(self, item, spider):
32
- """
33
- 处理数据项。
34
-
35
- Args:
36
- item: 要处理的数据项
37
- spider: 爬虫实例
38
-
39
- Returns:
40
- 处理后的数据项
41
-
42
- Raises:
43
- DropItem: 如果数据项无效则抛出此异常
44
- """
45
- # 验证必要字段
46
- if not item.get('title') or not item.get('url'):
47
- raise DropItem("缺少必要字段: title 或 url")
48
-
49
- # 数据清理
50
- item['title'] = str(item['title']).strip()
51
-
52
- # 添加处理时间戳
53
- item['processed_at'] = datetime.now().isoformat()
54
-
55
- # 计数器
56
- self.item_count += 1
57
-
58
- # 记录日志
59
- self.logger.info(f"处理第 {self.item_count} 个数据项: {item['title']}")
60
-
61
- return item
62
-
63
- def open_spider(self, spider):
64
- """
65
- 爬虫启动时调用。
66
-
67
- Args:
68
- spider: 爬虫实例
69
- """
70
- self.logger.info(f"管道已启动,准备处理爬虫 '{spider.name}' 的数据")
71
-
72
- def close_spider(self, spider):
73
- """
74
- 爬虫关闭时调用。
75
-
76
- Args:
77
- spider: 爬虫实例
78
- """
79
- self.logger.info(f"管道已关闭,共处理了 {self.item_count} 个数据项")
80
-
81
-
82
- # ======================== 使用说明 ========================
83
- #
84
- # 在 settings.py 中启用管道:
85
- # PIPELINES = [
86
- # '{{project_name}}.pipelines.ExamplePipeline',
87
- # ]
88
- #
89
- # 您可以根据需要添加更多管道,例如:
90
- # 1. 数据验证管道
91
- # 2. 去重管道
92
- # 3. 数据存储管道(数据库、文件等)
93
- # 4. 数据转换管道
94
- #
95
- # 每个管道都应该实现 process_item 方法,
96
- # 可选实现 open_spider 和 close_spider 方法。
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}}.pipelines
4
+ ==========================
5
+ 数据管道,用于处理 Spider 返回的 Item。
6
+ 例如:清理、验证、去重、保存到数据库等。
7
+
8
+ 这是一个简单的示例管道,您可以根据需要添加更多管道。
9
+ """
10
+
11
+ from datetime import datetime
12
+ from crawlo.exceptions import DropItem
13
+ from crawlo.utils.log import get_logger
14
+
15
+
16
+ class ExamplePipeline:
17
+ """
18
+ 示例管道,演示如何处理数据项。
19
+
20
+ 此管道会:
21
+ 1. 验证必要字段
22
+ 2. 清理数据
23
+ 3. 添加时间戳
24
+ 4. 记录处理日志
25
+ """
26
+
27
+ def __init__(self):
28
+ self.logger = get_logger(self.__class__.__name__)
29
+ self.item_count = 0
30
+
31
+ def process_item(self, item, spider):
32
+ """
33
+ 处理数据项。
34
+
35
+ Args:
36
+ item: 要处理的数据项
37
+ spider: 爬虫实例
38
+
39
+ Returns:
40
+ 处理后的数据项
41
+
42
+ Raises:
43
+ DropItem: 如果数据项无效则抛出此异常
44
+ """
45
+ # 验证必要字段
46
+ if not item.get('title') or not item.get('url'):
47
+ raise DropItem("缺少必要字段: title 或 url")
48
+
49
+ # 数据清理
50
+ item['title'] = str(item['title']).strip()
51
+
52
+ # 添加处理时间戳
53
+ item['processed_at'] = datetime.now().isoformat()
54
+
55
+ # 计数器
56
+ self.item_count += 1
57
+
58
+ # 记录日志
59
+ self.logger.info(f"处理第 {self.item_count} 个数据项: {item['title']}")
60
+
61
+ return item
62
+
63
+ def open_spider(self, spider):
64
+ """
65
+ 爬虫启动时调用。
66
+
67
+ Args:
68
+ spider: 爬虫实例
69
+ """
70
+ self.logger.info(f"管道已启动,准备处理爬虫 '{spider.name}' 的数据")
71
+
72
+ def close_spider(self, spider):
73
+ """
74
+ 爬虫关闭时调用。
75
+
76
+ Args:
77
+ spider: 爬虫实例
78
+ """
79
+ self.logger.info(f"管道已关闭,共处理了 {self.item_count} 个数据项")
80
+
81
+
82
+ # ======================== 使用说明 ========================
83
+ #
84
+ # 在 settings.py 中启用管道:
85
+ # PIPELINES = [
86
+ # '{{project_name}}.pipelines.ExamplePipeline',
87
+ # ]
88
+ #
89
+ # 您可以根据需要添加更多管道,例如:
90
+ # 1. 数据验证管道
91
+ # 2. 去重管道
92
+ # 3. 数据存储管道(数据库、文件等)
93
+ # 4. 数据转换管道
94
+ #
95
+ # 每个管道都应该实现 process_item 方法,
96
+ # 可选实现 open_spider 和 close_spider 方法。
97
97
  # ======================== 使用说明 ========================
@@ -1,46 +1,46 @@
1
- #!/usr/bin/env python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- {{project_name}} 项目运行脚本
5
- ============================
6
- 基于 Crawlo 框架的简化爬虫启动器。
7
- """
8
-
9
- import sys
10
- import os
11
- import asyncio
12
-
13
- # 添加项目根目录到 Python 路径
14
- project_root = os.path.dirname(os.path.abspath(__file__))
15
- sys.path.insert(0, project_root)
16
-
17
- # 切换到项目根目录
18
- os.chdir(project_root)
19
-
20
- from crawlo.crawler import CrawlerProcess
21
-
22
- def main():
23
- """主函数:运行固定的爬虫"""
24
- print("🚀 启动 {{project_name}} 爬虫")
25
-
26
- # 创建爬虫进程(自动加载默认配置)
27
- try:
28
- # 确保 spider 模块被正确导入
29
- spider_modules = ['{{project_name}}.spiders']
30
- process = CrawlerProcess(spider_modules=spider_modules)
31
- print("✅ 爬虫进程初始化成功")
32
-
33
- # 运行固定的爬虫
34
- # TODO: 请将 'your_spider_name' 替换为实际的爬虫名称
35
- asyncio.run(process.crawl('your_spider_name'))
36
-
37
- print("✅ 爬虫运行完成")
38
-
39
- except Exception as e:
40
- print(f"❌ 运行失败: {e}")
41
- import traceback
42
- traceback.print_exc()
43
- sys.exit(1)
44
-
45
- if __name__ == '__main__':
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ {{project_name}} 项目运行脚本
5
+ ============================
6
+ 基于 Crawlo 框架的简化爬虫启动器。
7
+ """
8
+
9
+ import sys
10
+ import os
11
+ import asyncio
12
+
13
+ # 添加项目根目录到 Python 路径
14
+ project_root = os.path.dirname(os.path.abspath(__file__))
15
+ sys.path.insert(0, project_root)
16
+
17
+ # 切换到项目根目录
18
+ os.chdir(project_root)
19
+
20
+ from crawlo.crawler import CrawlerProcess
21
+
22
+ def main():
23
+ """主函数:运行固定的爬虫"""
24
+ print("🚀 启动 {{project_name}} 爬虫")
25
+
26
+ # 创建爬虫进程(自动加载默认配置)
27
+ try:
28
+ # 确保 spider 模块被正确导入
29
+ spider_modules = ['{{project_name}}.spiders']
30
+ process = CrawlerProcess(spider_modules=spider_modules)
31
+ print("✅ 爬虫进程初始化成功")
32
+
33
+ # 运行固定的爬虫
34
+ # TODO: 请将 'your_spider_name' 替换为实际的爬虫名称
35
+ asyncio.run(process.crawl('your_spider_name'))
36
+
37
+ print("✅ 爬虫运行完成")
38
+
39
+ except Exception as e:
40
+ print(f"❌ 运行失败: {e}")
41
+ import traceback
42
+ traceback.print_exc()
43
+ sys.exit(1)
44
+
45
+ if __name__ == '__main__':
46
46
  main()