crawlo 1.2.0__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (220) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +65 -65
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +142 -132
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +292 -292
  14. crawlo/commands/startproject.py +418 -418
  15. crawlo/commands/stats.py +188 -188
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +252 -252
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -354
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -143
  23. crawlo/crawler.py +1027 -1027
  24. crawlo/downloader/__init__.py +266 -266
  25. crawlo/downloader/aiohttp_downloader.py +220 -220
  26. crawlo/downloader/cffi_downloader.py +256 -256
  27. crawlo/downloader/httpx_downloader.py +259 -259
  28. crawlo/downloader/hybrid_downloader.py +213 -213
  29. crawlo/downloader/playwright_downloader.py +402 -402
  30. crawlo/downloader/selenium_downloader.py +472 -472
  31. crawlo/event.py +11 -11
  32. crawlo/exceptions.py +81 -81
  33. crawlo/extension/__init__.py +37 -37
  34. crawlo/extension/health_check.py +141 -141
  35. crawlo/extension/log_interval.py +57 -57
  36. crawlo/extension/log_stats.py +81 -81
  37. crawlo/extension/logging_extension.py +43 -43
  38. crawlo/extension/memory_monitor.py +104 -104
  39. crawlo/extension/performance_profiler.py +133 -133
  40. crawlo/extension/request_recorder.py +107 -107
  41. crawlo/filters/__init__.py +154 -154
  42. crawlo/filters/aioredis_filter.py +280 -280
  43. crawlo/filters/memory_filter.py +269 -269
  44. crawlo/items/__init__.py +23 -23
  45. crawlo/items/base.py +21 -21
  46. crawlo/items/fields.py +53 -53
  47. crawlo/items/items.py +104 -104
  48. crawlo/middleware/__init__.py +21 -21
  49. crawlo/middleware/default_header.py +132 -32
  50. crawlo/middleware/download_delay.py +105 -28
  51. crawlo/middleware/middleware_manager.py +135 -135
  52. crawlo/middleware/offsite.py +116 -0
  53. crawlo/middleware/proxy.py +366 -272
  54. crawlo/middleware/request_ignore.py +88 -30
  55. crawlo/middleware/response_code.py +164 -18
  56. crawlo/middleware/response_filter.py +138 -26
  57. crawlo/middleware/retry.py +124 -124
  58. crawlo/mode_manager.py +211 -211
  59. crawlo/network/__init__.py +21 -21
  60. crawlo/network/request.py +338 -338
  61. crawlo/network/response.py +359 -359
  62. crawlo/pipelines/__init__.py +21 -21
  63. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  64. crawlo/pipelines/console_pipeline.py +39 -39
  65. crawlo/pipelines/csv_pipeline.py +316 -316
  66. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  67. crawlo/pipelines/json_pipeline.py +218 -218
  68. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  69. crawlo/pipelines/mongo_pipeline.py +131 -131
  70. crawlo/pipelines/mysql_pipeline.py +316 -316
  71. crawlo/pipelines/pipeline_manager.py +61 -61
  72. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  73. crawlo/project.py +187 -187
  74. crawlo/queue/pqueue.py +37 -37
  75. crawlo/queue/queue_manager.py +337 -337
  76. crawlo/queue/redis_priority_queue.py +298 -298
  77. crawlo/settings/__init__.py +7 -7
  78. crawlo/settings/default_settings.py +226 -219
  79. crawlo/settings/setting_manager.py +122 -122
  80. crawlo/spider/__init__.py +639 -639
  81. crawlo/stats_collector.py +59 -59
  82. crawlo/subscriber.py +130 -130
  83. crawlo/task_manager.py +30 -30
  84. crawlo/templates/crawlo.cfg.tmpl +10 -10
  85. crawlo/templates/project/__init__.py.tmpl +3 -3
  86. crawlo/templates/project/items.py.tmpl +17 -17
  87. crawlo/templates/project/middlewares.py.tmpl +118 -109
  88. crawlo/templates/project/pipelines.py.tmpl +96 -96
  89. crawlo/templates/project/run.py.tmpl +45 -45
  90. crawlo/templates/project/settings.py.tmpl +327 -326
  91. crawlo/templates/project/settings_distributed.py.tmpl +119 -119
  92. crawlo/templates/project/settings_gentle.py.tmpl +94 -94
  93. crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
  94. crawlo/templates/project/settings_simple.py.tmpl +68 -68
  95. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  96. crawlo/templates/spider/spider.py.tmpl +143 -141
  97. crawlo/tools/__init__.py +182 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/tools/data_validator.py +180 -180
  101. crawlo/tools/date_tools.py +35 -35
  102. crawlo/tools/distributed_coordinator.py +386 -386
  103. crawlo/tools/retry_mechanism.py +220 -220
  104. crawlo/tools/scenario_adapter.py +262 -262
  105. crawlo/utils/__init__.py +35 -35
  106. crawlo/utils/batch_processor.py +260 -260
  107. crawlo/utils/controlled_spider_mixin.py +439 -439
  108. crawlo/utils/date_tools.py +290 -290
  109. crawlo/utils/db_helper.py +343 -343
  110. crawlo/utils/enhanced_error_handler.py +359 -359
  111. crawlo/utils/env_config.py +105 -105
  112. crawlo/utils/error_handler.py +125 -125
  113. crawlo/utils/func_tools.py +82 -82
  114. crawlo/utils/large_scale_config.py +286 -286
  115. crawlo/utils/large_scale_helper.py +343 -343
  116. crawlo/utils/log.py +128 -128
  117. crawlo/utils/performance_monitor.py +284 -284
  118. crawlo/utils/queue_helper.py +175 -175
  119. crawlo/utils/redis_connection_pool.py +334 -334
  120. crawlo/utils/redis_key_validator.py +199 -199
  121. crawlo/utils/request.py +267 -267
  122. crawlo/utils/request_serializer.py +219 -219
  123. crawlo/utils/spider_loader.py +62 -62
  124. crawlo/utils/system.py +11 -11
  125. crawlo/utils/tools.py +4 -4
  126. crawlo/utils/url.py +39 -39
  127. {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/METADATA +692 -697
  128. crawlo-1.2.1.dist-info/RECORD +220 -0
  129. examples/__init__.py +7 -7
  130. examples/aiohttp_settings.py +42 -0
  131. examples/curl_cffi_settings.py +41 -0
  132. examples/default_header_middleware_example.py +107 -0
  133. examples/default_header_spider_example.py +129 -0
  134. examples/download_delay_middleware_example.py +160 -0
  135. examples/httpx_settings.py +42 -0
  136. examples/multi_downloader_proxy_example.py +81 -0
  137. examples/offsite_middleware_example.py +55 -0
  138. examples/offsite_spider_example.py +107 -0
  139. examples/proxy_spider_example.py +166 -0
  140. examples/request_ignore_middleware_example.py +51 -0
  141. examples/request_ignore_spider_example.py +99 -0
  142. examples/response_code_middleware_example.py +52 -0
  143. examples/response_filter_middleware_example.py +67 -0
  144. examples/tong_hua_shun_settings.py +62 -0
  145. examples/tong_hua_shun_spider.py +170 -0
  146. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  147. tests/__init__.py +7 -7
  148. tests/advanced_tools_example.py +275 -275
  149. tests/authenticated_proxy_example.py +236 -236
  150. tests/cleaners_example.py +160 -160
  151. tests/config_validation_demo.py +102 -102
  152. tests/controlled_spider_example.py +205 -205
  153. tests/date_tools_example.py +180 -180
  154. tests/dynamic_loading_example.py +523 -523
  155. tests/dynamic_loading_test.py +104 -104
  156. tests/env_config_example.py +133 -133
  157. tests/error_handling_example.py +171 -171
  158. tests/redis_key_validation_demo.py +130 -130
  159. tests/response_improvements_example.py +144 -144
  160. tests/test_advanced_tools.py +148 -148
  161. tests/test_all_redis_key_configs.py +145 -145
  162. tests/test_authenticated_proxy.py +141 -141
  163. tests/test_cleaners.py +54 -54
  164. tests/test_comprehensive.py +146 -146
  165. tests/test_config_validator.py +193 -193
  166. tests/test_crawlo_proxy_integration.py +173 -0
  167. tests/test_date_tools.py +123 -123
  168. tests/test_default_header_middleware.py +159 -0
  169. tests/test_double_crawlo_fix.py +207 -207
  170. tests/test_double_crawlo_fix_simple.py +124 -124
  171. tests/test_download_delay_middleware.py +222 -0
  172. tests/test_downloader_proxy_compatibility.py +269 -0
  173. tests/test_dynamic_downloaders_proxy.py +124 -124
  174. tests/test_dynamic_proxy.py +92 -92
  175. tests/test_dynamic_proxy_config.py +146 -146
  176. tests/test_dynamic_proxy_real.py +109 -109
  177. tests/test_edge_cases.py +303 -303
  178. tests/test_enhanced_error_handler.py +270 -270
  179. tests/test_env_config.py +121 -121
  180. tests/test_error_handler_compatibility.py +112 -112
  181. tests/test_final_validation.py +153 -153
  182. tests/test_framework_env_usage.py +103 -103
  183. tests/test_integration.py +356 -356
  184. tests/test_item_dedup_redis_key.py +122 -122
  185. tests/test_offsite_middleware.py +222 -0
  186. tests/test_parsel.py +29 -29
  187. tests/test_performance.py +327 -327
  188. tests/test_proxy_api.py +265 -0
  189. tests/test_proxy_health_check.py +32 -32
  190. tests/test_proxy_middleware.py +122 -0
  191. tests/test_proxy_middleware_enhanced.py +217 -0
  192. tests/test_proxy_middleware_integration.py +136 -136
  193. tests/test_proxy_providers.py +56 -56
  194. tests/test_proxy_stats.py +19 -19
  195. tests/test_proxy_strategies.py +59 -59
  196. tests/test_queue_manager_double_crawlo.py +173 -173
  197. tests/test_queue_manager_redis_key.py +176 -176
  198. tests/test_real_scenario_proxy.py +196 -0
  199. tests/test_redis_config.py +28 -28
  200. tests/test_redis_connection_pool.py +294 -294
  201. tests/test_redis_key_naming.py +181 -181
  202. tests/test_redis_key_validator.py +123 -123
  203. tests/test_redis_queue.py +224 -224
  204. tests/test_request_ignore_middleware.py +183 -0
  205. tests/test_request_serialization.py +70 -70
  206. tests/test_response_code_middleware.py +350 -0
  207. tests/test_response_filter_middleware.py +428 -0
  208. tests/test_response_improvements.py +152 -152
  209. tests/test_retry_middleware.py +242 -0
  210. tests/test_scheduler.py +241 -241
  211. tests/test_simple_response.py +61 -61
  212. tests/test_telecom_spider_redis_key.py +205 -205
  213. tests/test_template_content.py +87 -87
  214. tests/test_template_redis_key.py +134 -134
  215. tests/test_tools.py +153 -153
  216. tests/tools_example.py +257 -257
  217. crawlo-1.2.0.dist-info/RECORD +0 -190
  218. {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/WHEEL +0 -0
  219. {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/entry_points.txt +0 -0
  220. {crawlo-1.2.0.dist-info → crawlo-1.2.1.dist-info}/top_level.txt +0 -0
crawlo/items/fields.py CHANGED
@@ -1,54 +1,54 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Field 类定义
5
- """
6
-
7
- from typing import Any, Optional, Type
8
-
9
-
10
- class Field:
11
- """
12
- 字段定义类,用于定义 Item 的字段属性和验证规则
13
- """
14
- def __init__(
15
- self,
16
- nullable: bool = True,
17
- *,
18
- default: Any = None,
19
- field_type: Optional[Type] = None,
20
- max_length: Optional[int] = None,
21
- description: str = ""
22
- ):
23
- self.nullable = nullable
24
- self.default = default
25
- self.field_type = field_type
26
- self.max_length = max_length
27
- self.description = description
28
-
29
- def validate(self, value: Any, field_name: str = "") -> Any:
30
- """
31
- 验证字段值是否符合规则
32
- """
33
- if value is None or (isinstance(value, str) and value.strip() == ""):
34
- if self.default is not None:
35
- return self.default
36
- elif not self.nullable:
37
- raise ValueError(
38
- f"字段 '{field_name}' 不允许为空。"
39
- )
40
-
41
- if value is not None and not (isinstance(value, str) and value.strip() == ""):
42
- if self.field_type and not isinstance(value, self.field_type):
43
- raise TypeError(
44
- f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
45
- )
46
- if self.max_length and len(str(value)) > self.max_length:
47
- raise ValueError(
48
- f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
49
- )
50
-
51
- return value
52
-
53
- def __repr__(self):
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Field 类定义
5
+ """
6
+
7
+ from typing import Any, Optional, Type
8
+
9
+
10
+ class Field:
11
+ """
12
+ 字段定义类,用于定义 Item 的字段属性和验证规则
13
+ """
14
+ def __init__(
15
+ self,
16
+ nullable: bool = True,
17
+ *,
18
+ default: Any = None,
19
+ field_type: Optional[Type] = None,
20
+ max_length: Optional[int] = None,
21
+ description: str = ""
22
+ ):
23
+ self.nullable = nullable
24
+ self.default = default
25
+ self.field_type = field_type
26
+ self.max_length = max_length
27
+ self.description = description
28
+
29
+ def validate(self, value: Any, field_name: str = "") -> Any:
30
+ """
31
+ 验证字段值是否符合规则
32
+ """
33
+ if value is None or (isinstance(value, str) and value.strip() == ""):
34
+ if self.default is not None:
35
+ return self.default
36
+ elif not self.nullable:
37
+ raise ValueError(
38
+ f"字段 '{field_name}' 不允许为空。"
39
+ )
40
+
41
+ if value is not None and not (isinstance(value, str) and value.strip() == ""):
42
+ if self.field_type and not isinstance(value, self.field_type):
43
+ raise TypeError(
44
+ f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
45
+ )
46
+ if self.max_length and len(str(value)) > self.max_length:
47
+ raise ValueError(
48
+ f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
49
+ )
50
+
51
+ return value
52
+
53
+ def __repr__(self):
54
54
  return f"<Field nullable={self.nullable} type={self.field_type} default={self.default}>"
crawlo/items/items.py CHANGED
@@ -1,105 +1,105 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Item 类定义
5
- """
6
- from copy import deepcopy
7
- from pprint import pformat
8
- from typing import Any, Iterator, Dict
9
- from collections.abc import MutableMapping
10
-
11
- from .base import ItemMeta
12
- from crawlo.exceptions import ItemInitError, ItemAttributeError
13
-
14
-
15
- class Item(MutableMapping, metaclass=ItemMeta):
16
- """
17
- 数据项基类,用于定义结构化数据
18
- """
19
- FIELDS: Dict[str, Any] = {}
20
-
21
- def __init__(self, *args, **kwargs):
22
- if args:
23
- raise ItemInitError(f"{self.__class__.__name__} 不支持位置参数:{args},请使用关键字参数初始化。")
24
-
25
- self._values: Dict[str, Any] = {}
26
-
27
- # 初始化字段,默认值填充
28
- for field_name, field_obj in self.FIELDS.items():
29
- if field_obj.default is not None:
30
- self._values[field_name] = field_obj.default
31
-
32
- # 覆盖默认值或设置新值
33
- for key, value in kwargs.items():
34
- self[key] = value
35
-
36
- def __getitem__(self, item: str) -> Any:
37
- return self._values[item]
38
-
39
- def __setitem__(self, key: str, value: Any) -> None:
40
- if key not in self.FIELDS:
41
- raise KeyError(f"{self.__class__.__name__} 不包含字段:{key}")
42
-
43
- field = self.FIELDS[key]
44
- try:
45
- validated_value = field.validate(value, field_name=key)
46
- self._values[key] = validated_value
47
- except Exception as e:
48
- error_lines = [
49
- "",
50
- "【字段校验失败】",
51
- f"字段名称: {key}",
52
- f"数据类型: {type(value)}",
53
- f"原始值: {repr(value)}",
54
- f"是否允许空值: {field.nullable}",
55
- f"错误原因: {str(e)}",
56
- ""
57
- ]
58
- detailed_error = "\n".join(error_lines)
59
- raise type(e)(detailed_error) from e
60
-
61
- def __delitem__(self, key: str) -> None:
62
- del self._values[key]
63
-
64
- def __setattr__(self, key: str, value: Any) -> None:
65
- if not key.startswith("_"):
66
- raise AttributeError(
67
- f"设置字段值请使用 item[{key!r}] = {value!r}"
68
- )
69
- super().__setattr__(key, value)
70
-
71
- def __getattr__(self, item: str) -> Any:
72
- raise AttributeError(
73
- f"{self.__class__.__name__} 不支持字段:{item}。"
74
- f"请先在 `{self.__class__.__name__}` 中声明该字段,再通过 item[{item!r}] 获取。"
75
- )
76
-
77
- def __getattribute__(self, item: str) -> Any:
78
- try:
79
- field = super().__getattribute__("FIELDS")
80
- if isinstance(field, dict) and item in field:
81
- raise ItemAttributeError(
82
- f"获取字段值请使用 item[{item!r}]"
83
- )
84
- except AttributeError:
85
- pass # 如果 FIELDS 尚未定义,继续执行后续逻辑
86
- return super().__getattribute__(item)
87
-
88
- def __repr__(self) -> str:
89
- return pformat(dict(self))
90
-
91
- __str__ = __repr__
92
-
93
- def __iter__(self) -> Iterator[str]:
94
- return iter(self._values)
95
-
96
- def __len__(self) -> int:
97
- return len(self._values)
98
-
99
- def to_dict(self) -> Dict[str, Any]:
100
- """转换为字典"""
101
- return dict(self)
102
-
103
- def copy(self) -> "Item":
104
- """深拷贝当前 Item"""
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Item 类定义
5
+ """
6
+ from copy import deepcopy
7
+ from pprint import pformat
8
+ from typing import Any, Iterator, Dict
9
+ from collections.abc import MutableMapping
10
+
11
+ from .base import ItemMeta
12
+ from crawlo.exceptions import ItemInitError, ItemAttributeError
13
+
14
+
15
+ class Item(MutableMapping, metaclass=ItemMeta):
16
+ """
17
+ 数据项基类,用于定义结构化数据
18
+ """
19
+ FIELDS: Dict[str, Any] = {}
20
+
21
+ def __init__(self, *args, **kwargs):
22
+ if args:
23
+ raise ItemInitError(f"{self.__class__.__name__} 不支持位置参数:{args},请使用关键字参数初始化。")
24
+
25
+ self._values: Dict[str, Any] = {}
26
+
27
+ # 初始化字段,默认值填充
28
+ for field_name, field_obj in self.FIELDS.items():
29
+ if field_obj.default is not None:
30
+ self._values[field_name] = field_obj.default
31
+
32
+ # 覆盖默认值或设置新值
33
+ for key, value in kwargs.items():
34
+ self[key] = value
35
+
36
+ def __getitem__(self, item: str) -> Any:
37
+ return self._values[item]
38
+
39
+ def __setitem__(self, key: str, value: Any) -> None:
40
+ if key not in self.FIELDS:
41
+ raise KeyError(f"{self.__class__.__name__} 不包含字段:{key}")
42
+
43
+ field = self.FIELDS[key]
44
+ try:
45
+ validated_value = field.validate(value, field_name=key)
46
+ self._values[key] = validated_value
47
+ except Exception as e:
48
+ error_lines = [
49
+ "",
50
+ "【字段校验失败】",
51
+ f"字段名称: {key}",
52
+ f"数据类型: {type(value)}",
53
+ f"原始值: {repr(value)}",
54
+ f"是否允许空值: {field.nullable}",
55
+ f"错误原因: {str(e)}",
56
+ ""
57
+ ]
58
+ detailed_error = "\n".join(error_lines)
59
+ raise type(e)(detailed_error) from e
60
+
61
+ def __delitem__(self, key: str) -> None:
62
+ del self._values[key]
63
+
64
+ def __setattr__(self, key: str, value: Any) -> None:
65
+ if not key.startswith("_"):
66
+ raise AttributeError(
67
+ f"设置字段值请使用 item[{key!r}] = {value!r}"
68
+ )
69
+ super().__setattr__(key, value)
70
+
71
+ def __getattr__(self, item: str) -> Any:
72
+ raise AttributeError(
73
+ f"{self.__class__.__name__} 不支持字段:{item}。"
74
+ f"请先在 `{self.__class__.__name__}` 中声明该字段,再通过 item[{item!r}] 获取。"
75
+ )
76
+
77
+ def __getattribute__(self, item: str) -> Any:
78
+ try:
79
+ field = super().__getattribute__("FIELDS")
80
+ if isinstance(field, dict) and item in field:
81
+ raise ItemAttributeError(
82
+ f"获取字段值请使用 item[{item!r}]"
83
+ )
84
+ except AttributeError:
85
+ pass # 如果 FIELDS 尚未定义,继续执行后续逻辑
86
+ return super().__getattribute__(item)
87
+
88
+ def __repr__(self) -> str:
89
+ return pformat(dict(self))
90
+
91
+ __str__ = __repr__
92
+
93
+ def __iter__(self) -> Iterator[str]:
94
+ return iter(self._values)
95
+
96
+ def __len__(self) -> int:
97
+ return len(self._values)
98
+
99
+ def to_dict(self) -> Dict[str, Any]:
100
+ """转换为字典"""
101
+ return dict(self)
102
+
103
+ def copy(self) -> "Item":
104
+ """深拷贝当前 Item"""
105
105
  return deepcopy(self)
@@ -1,21 +1,21 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from crawlo import Request, Response
4
-
5
-
6
- class BaseMiddleware(object):
7
- def process_request(self, request, spider) -> None | Request | Response:
8
- # 请求预处理
9
- pass
10
-
11
- def process_response(self, request, response, spider) -> Request | Response:
12
- # 响应预处理
13
- pass
14
-
15
- def process_exception(self, request, exp, spider) -> None | Request | Response:
16
- # 异常预处理
17
- pass
18
-
19
- @classmethod
20
- def create_instance(cls, crawler):
21
- return cls()
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from crawlo import Request, Response
4
+
5
+
6
+ class BaseMiddleware(object):
7
+ def process_request(self, request, spider) -> None | Request | Response:
8
+ # 请求预处理
9
+ pass
10
+
11
+ def process_response(self, request, response, spider) -> Request | Response:
12
+ # 响应预处理
13
+ pass
14
+
15
+ def process_exception(self, request, exp, spider) -> None | Request | Response:
16
+ # 异常预处理
17
+ pass
18
+
19
+ @classmethod
20
+ def create_instance(cls, crawler):
21
+ return cls()
@@ -1,32 +1,132 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from crawlo.event import spider_opened
4
-
5
-
6
- class DefaultHeaderMiddleware(object):
7
-
8
- def __init__(self, user_agent, headers, spider):
9
- self.user_agent = user_agent
10
- self.headers = headers
11
- self.spider = spider
12
-
13
- @classmethod
14
- def create_instance(cls, crawler):
15
- o = cls(
16
- user_agent=crawler.settings.get('USER_AGENT'),
17
- headers=crawler.settings.get_dict('DEFAULT_HEADERS'),
18
- spider=crawler.spider
19
- )
20
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
21
- return o
22
-
23
- async def spider_opened(self):
24
- self.user_agent = getattr(self.spider, 'user_agent', self.user_agent)
25
- self.headers = getattr(self.spider, 'headers', self.headers)
26
- if self.user_agent:
27
- self.headers.setdefault('User-Agent', self.user_agent)
28
-
29
- def process_request(self, request, _spider):
30
- if self.headers:
31
- for key, value in self.headers.items():
32
- request.headers.setdefault(key, value)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ DefaultHeaderMiddleware 中间件
5
+ 用于为所有请求添加默认请求头,支持随机更换User-Agent等功能
6
+ """
7
+
8
+ import random
9
+ from crawlo.utils.log import get_logger
10
+ from crawlo.exceptions import NotConfiguredError
11
+ # 导入User-Agent数据
12
+ from crawlo.data.user_agents import get_user_agents
13
+
14
+
15
+ class DefaultHeaderMiddleware(object):
16
+ """
17
+ DefaultHeaderMiddleware 中间件
18
+ 用于为所有请求添加默认请求头,包括User-Agent等,支持随机更换功能
19
+ """
20
+
21
+ def __init__(self, settings, log_level):
22
+ """
23
+ 初始化中间件
24
+ """
25
+ self.logger = get_logger(self.__class__.__name__, log_level)
26
+
27
+ # 获取默认请求头配置
28
+ self.headers = settings.get_dict('DEFAULT_REQUEST_HEADERS', {})
29
+
30
+ # 获取User-Agent配置
31
+ self.user_agent = settings.get('USER_AGENT')
32
+
33
+ # 获取随机User-Agent列表
34
+ self.user_agents = settings.get_list('USER_AGENTS', [])
35
+
36
+ # 获取随机请求头配置
37
+ self.random_headers = settings.get_dict('RANDOM_HEADERS', {})
38
+
39
+ # 获取随机性配置
40
+ self.randomness = settings.get_bool("RANDOMNESS", False)
41
+
42
+ # 检查是否启用随机User-Agent
43
+ self.random_user_agent_enabled = settings.get_bool("RANDOM_USER_AGENT_ENABLED", False)
44
+
45
+ # 获取User-Agent设备类型
46
+ self.user_agent_device_type = settings.get("USER_AGENT_DEVICE_TYPE", "all")
47
+
48
+ # 如果没有配置默认请求头、User-Agent且没有启用随机功能,则禁用此中间件
49
+ if not self.headers and not self.user_agent and not self.user_agents and not self.random_headers:
50
+ raise NotConfiguredError("未配置DEFAULT_REQUEST_HEADERS、USER_AGENT或随机头部配置,DefaultHeaderMiddleware已禁用")
51
+
52
+ # 如果配置了User-Agent,将其添加到默认请求头中
53
+ if self.user_agent:
54
+ self.headers.setdefault('User-Agent', self.user_agent)
55
+
56
+ # 如果启用了随机User-Agent但没有提供User-Agent列表,使用内置列表
57
+ if self.random_user_agent_enabled and not self.user_agents:
58
+ self.user_agents = get_user_agents(self.user_agent_device_type)
59
+
60
+ self.logger.info(f"DefaultHeaderMiddleware已启用,配置详情: "
61
+ f"默认请求头={len(self.headers)}, "
62
+ f"User-Agent列表={len(self.user_agents)}, "
63
+ f"随机头部={len(self.random_headers)}, "
64
+ f"随机功能={'启用' if self.randomness else '禁用'}")
65
+
66
+ @classmethod
67
+ def create_instance(cls, crawler):
68
+ """
69
+ 创建中间件实例
70
+ """
71
+ o = cls(
72
+ settings=crawler.settings,
73
+ log_level=crawler.settings.get('LOG_LEVEL')
74
+ )
75
+ return o
76
+
77
+ def _get_random_user_agent(self):
78
+ """
79
+ 获取随机User-Agent
80
+ """
81
+ if self.user_agents:
82
+ return random.choice(self.user_agents)
83
+ return None
84
+
85
+ def _apply_random_headers(self, request):
86
+ """
87
+ 应用随机请求头
88
+ """
89
+ if not self.random_headers:
90
+ return
91
+
92
+ for header_name, header_values in self.random_headers.items():
93
+ # 如果header_values是列表,随机选择一个值
94
+ if isinstance(header_values, (list, tuple)):
95
+ header_value = random.choice(header_values)
96
+ else:
97
+ header_value = header_values
98
+
99
+ # 只有当请求中没有该头部时才添加
100
+ if header_name not in request.headers:
101
+ request.headers[header_name] = header_value
102
+ self.logger.debug(f"为请求 {request.url} 添加随机头部: {header_name}={header_value[:50]}...")
103
+
104
+ def process_request(self, request, _spider):
105
+ """
106
+ 处理请求,添加默认请求头
107
+ """
108
+ # 添加默认请求头
109
+ if self.headers:
110
+ added_headers = []
111
+ for key, value in self.headers.items():
112
+ # 只有当请求中没有该头部时才添加
113
+ if key not in request.headers:
114
+ request.headers[key] = value
115
+ added_headers.append(key)
116
+
117
+ # 记录添加的请求头(仅在调试模式下)
118
+ if added_headers and self.logger.isEnabledFor(10): # DEBUG level
119
+ self.logger.debug(f"为请求 {request.url} 添加了 {len(added_headers)} 个默认请求头: {added_headers}")
120
+
121
+ # 处理随机User-Agent
122
+ if self.random_user_agent_enabled and 'User-Agent' not in request.headers:
123
+ random_ua = self._get_random_user_agent()
124
+ if random_ua:
125
+ request.headers['User-Agent'] = random_ua
126
+ self.logger.debug(f"为请求 {request.url} 设置随机User-Agent: {random_ua[:50]}...")
127
+
128
+ # 处理随机请求头
129
+ if self.randomness:
130
+ self._apply_random_headers(request)
131
+
132
+ return None
@@ -1,28 +1,105 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from asyncio import sleep
4
- from random import uniform
5
- from crawlo.utils.log import get_logger
6
- from crawlo.exceptions import NotConfiguredError
7
-
8
-
9
- class DownloadDelayMiddleware(object):
10
-
11
- def __init__(self, settings, log_level):
12
- self.delay = settings.get_float("DOWNLOAD_DELAY")
13
- if not self.delay:
14
- raise NotConfiguredError
15
- self.randomness = settings.get_bool("RANDOMNESS")
16
- self.floor, self.upper = settings.get_list("RANDOM_RANGE")
17
- self.logger = get_logger(self.__class__.__name__, log_level)
18
-
19
- @classmethod
20
- def create_instance(cls, crawler):
21
- o = cls(settings=crawler.settings, log_level=crawler.settings.get('LOG_LEVEL'))
22
- return o
23
-
24
- async def process_request(self, _request, _spider):
25
- if self.randomness:
26
- await sleep(uniform(self.delay * self.floor, self.delay * self.upper))
27
- else:
28
- await sleep(self.delay)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ DownloadDelayMiddleware 中间件
5
+ 用于控制请求之间的延迟时间,支持固定延迟和随机延迟
6
+ """
7
+
8
+ from asyncio import sleep
9
+ from random import uniform
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.exceptions import NotConfiguredError
12
+
13
+
14
+ class DownloadDelayMiddleware(object):
15
+ """
16
+ DownloadDelayMiddleware 中间件
17
+ 用于控制请求之间的延迟时间,支持固定延迟和随机延迟
18
+
19
+ 功能特性:
20
+ - 支持固定延迟时间
21
+ - 支持随机延迟时间
22
+ - 提供详细的日志信息
23
+ - 记录延迟统计信息
24
+ """
25
+
26
+ def __init__(self, settings, log_level, stats=None):
27
+ """
28
+ 初始化中间件
29
+
30
+ Args:
31
+ settings: 设置管理器
32
+ log_level: 日志级别
33
+ stats: 统计信息收集器(可选)
34
+ """
35
+ self.delay = settings.get_float("DOWNLOAD_DELAY")
36
+ if not self.delay:
37
+ raise NotConfiguredError("DOWNLOAD_DELAY not set or is zero")
38
+
39
+ self.randomness = settings.get_bool("RANDOMNESS", False)
40
+
41
+ # 安全地获取随机范围配置
42
+ random_range = settings.get_list("RANDOM_RANGE")
43
+ if len(random_range) >= 2:
44
+ try:
45
+ self.floor = float(random_range[0])
46
+ self.upper = float(random_range[1])
47
+ except (ValueError, TypeError):
48
+ # 如果配置无效,使用默认值
49
+ self.floor, self.upper = 0.5, 1.5
50
+ else:
51
+ # 如果配置不完整,使用默认值
52
+ self.floor, self.upper = 0.5, 1.5
53
+
54
+ self.logger = get_logger(self.__class__.__name__, log_level)
55
+ self.stats = stats
56
+
57
+ @classmethod
58
+ def create_instance(cls, crawler):
59
+ """
60
+ 创建中间件实例
61
+
62
+ Args:
63
+ crawler: 爬虫实例
64
+
65
+ Returns:
66
+ DownloadDelayMiddleware: 中间件实例
67
+ """
68
+ o = cls(
69
+ settings=crawler.settings,
70
+ log_level=crawler.settings.get('LOG_LEVEL'),
71
+ stats=getattr(crawler, 'stats', None)
72
+ )
73
+ return o
74
+
75
+ async def process_request(self, _request, _spider):
76
+ """
77
+ 处理请求,添加延迟
78
+
79
+ Args:
80
+ _request: 请求对象
81
+ _spider: 爬虫实例
82
+ """
83
+ if self.randomness:
84
+ # 计算随机延迟时间
85
+ delay_time = uniform(self.delay * self.floor, self.delay * self.upper)
86
+ await sleep(delay_time)
87
+
88
+ # 记录统计信息
89
+ if self.stats:
90
+ self.stats.inc_value('download_delay/random_count')
91
+ self.stats.inc_value('download_delay/random_total_time', delay_time)
92
+
93
+ # 记录日志
94
+ self.logger.debug(f"应用随机延迟: {delay_time:.2f}秒 (范围: {self.delay * self.floor:.2f} - {self.delay * self.upper:.2f})")
95
+ else:
96
+ # 应用固定延迟
97
+ await sleep(self.delay)
98
+
99
+ # 记录统计信息
100
+ if self.stats:
101
+ self.stats.inc_value('download_delay/fixed_count')
102
+ self.stats.inc_value('download_delay/fixed_total_time', self.delay)
103
+
104
+ # 记录日志
105
+ self.logger.debug(f"应用固定延迟: {self.delay:.2f}秒")