crawlo 1.2.3__py3-none-any.whl → 1.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (222) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +88 -81
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +144 -142
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -292
  14. crawlo/commands/startproject.py +436 -417
  15. crawlo/commands/stats.py +187 -187
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -251
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -354
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -143
  23. crawlo/crawler.py +1110 -1027
  24. crawlo/data/__init__.py +5 -5
  25. crawlo/data/user_agents.py +107 -107
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +220 -220
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -212
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +37 -37
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +280 -280
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -52
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +135 -135
  54. crawlo/middleware/offsite.py +114 -114
  55. crawlo/middleware/proxy.py +367 -367
  56. crawlo/middleware/request_ignore.py +86 -86
  57. crawlo/middleware/response_code.py +163 -163
  58. crawlo/middleware/response_filter.py +136 -136
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -317
  73. crawlo/pipelines/pipeline_manager.py +61 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +165 -165
  75. crawlo/project.py +279 -187
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +337 -337
  78. crawlo/queue/redis_priority_queue.py +298 -298
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +217 -226
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -129
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/settings.py.tmpl +324 -325
  92. crawlo/templates/project/settings_distributed.py.tmpl +154 -121
  93. crawlo/templates/project/settings_gentle.py.tmpl +127 -94
  94. crawlo/templates/project/settings_high_performance.py.tmpl +149 -151
  95. crawlo/templates/project/settings_simple.py.tmpl +102 -68
  96. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  97. crawlo/templates/{project/run.py.tmpl → run.py.tmpl} +47 -45
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -259
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -356
  113. crawlo/utils/env_config.py +105 -105
  114. crawlo/utils/error_handler.py +123 -123
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -344
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -285
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +334 -334
  122. crawlo/utils/redis_key_validator.py +198 -198
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -218
  125. crawlo/utils/spider_loader.py +61 -61
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.3.dist-info → crawlo-1.2.5.dist-info}/METADATA +764 -692
  130. crawlo-1.2.5.dist-info/RECORD +206 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_validator.py +193 -193
  152. tests/test_crawlo_proxy_integration.py +172 -172
  153. tests/test_date_tools.py +123 -123
  154. tests/test_default_header_middleware.py +158 -158
  155. tests/test_double_crawlo_fix.py +207 -207
  156. tests/test_double_crawlo_fix_simple.py +124 -124
  157. tests/test_download_delay_middleware.py +221 -221
  158. tests/test_downloader_proxy_compatibility.py +268 -268
  159. tests/test_dynamic_downloaders_proxy.py +124 -124
  160. tests/test_dynamic_proxy.py +92 -92
  161. tests/test_dynamic_proxy_config.py +146 -146
  162. tests/test_dynamic_proxy_real.py +109 -109
  163. tests/test_edge_cases.py +303 -303
  164. tests/test_enhanced_error_handler.py +270 -270
  165. tests/test_env_config.py +121 -121
  166. tests/test_error_handler_compatibility.py +112 -112
  167. tests/test_final_validation.py +153 -153
  168. tests/test_framework_env_usage.py +103 -103
  169. tests/test_integration.py +356 -356
  170. tests/test_item_dedup_redis_key.py +122 -122
  171. tests/test_offsite_middleware.py +221 -221
  172. tests/test_parsel.py +29 -29
  173. tests/test_performance.py +327 -327
  174. tests/test_proxy_api.py +264 -264
  175. tests/test_proxy_health_check.py +32 -32
  176. tests/test_proxy_middleware.py +121 -121
  177. tests/test_proxy_middleware_enhanced.py +216 -216
  178. tests/test_proxy_middleware_integration.py +136 -136
  179. tests/test_proxy_providers.py +56 -56
  180. tests/test_proxy_stats.py +19 -19
  181. tests/test_proxy_strategies.py +59 -59
  182. tests/test_queue_manager_double_crawlo.py +173 -173
  183. tests/test_queue_manager_redis_key.py +176 -176
  184. tests/test_real_scenario_proxy.py +195 -195
  185. tests/test_redis_config.py +28 -28
  186. tests/test_redis_connection_pool.py +294 -294
  187. tests/test_redis_key_naming.py +181 -181
  188. tests/test_redis_key_validator.py +123 -123
  189. tests/test_redis_queue.py +224 -224
  190. tests/test_request_ignore_middleware.py +182 -182
  191. tests/test_request_serialization.py +70 -70
  192. tests/test_response_code_middleware.py +349 -349
  193. tests/test_response_filter_middleware.py +427 -427
  194. tests/test_response_improvements.py +152 -152
  195. tests/test_retry_middleware.py +241 -241
  196. tests/test_scheduler.py +241 -241
  197. tests/test_simple_response.py +61 -61
  198. tests/test_telecom_spider_redis_key.py +205 -205
  199. tests/test_template_content.py +87 -87
  200. tests/test_template_redis_key.py +134 -134
  201. tests/test_tools.py +153 -153
  202. tests/tools_example.py +257 -257
  203. crawlo-1.2.3.dist-info/RECORD +0 -222
  204. examples/aiohttp_settings.py +0 -42
  205. examples/curl_cffi_settings.py +0 -41
  206. examples/default_header_middleware_example.py +0 -107
  207. examples/default_header_spider_example.py +0 -129
  208. examples/download_delay_middleware_example.py +0 -160
  209. examples/httpx_settings.py +0 -42
  210. examples/multi_downloader_proxy_example.py +0 -81
  211. examples/offsite_middleware_example.py +0 -55
  212. examples/offsite_spider_example.py +0 -107
  213. examples/proxy_spider_example.py +0 -166
  214. examples/request_ignore_middleware_example.py +0 -51
  215. examples/request_ignore_spider_example.py +0 -99
  216. examples/response_code_middleware_example.py +0 -52
  217. examples/response_filter_middleware_example.py +0 -67
  218. examples/tong_hua_shun_settings.py +0 -62
  219. examples/tong_hua_shun_spider.py +0 -170
  220. {crawlo-1.2.3.dist-info → crawlo-1.2.5.dist-info}/WHEEL +0 -0
  221. {crawlo-1.2.3.dist-info → crawlo-1.2.5.dist-info}/entry_points.txt +0 -0
  222. {crawlo-1.2.3.dist-info → crawlo-1.2.5.dist-info}/top_level.txt +0 -0
crawlo/items/fields.py CHANGED
@@ -1,53 +1,53 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Field 类定义
5
- """
6
- from typing import Any, Optional, Type
7
-
8
-
9
- class Field:
10
- """
11
- 字段定义类,用于定义 Item 的字段属性和验证规则
12
- """
13
- def __init__(
14
- self,
15
- nullable: bool = True,
16
- *,
17
- default: Any = None,
18
- field_type: Optional[Type] = None,
19
- max_length: Optional[int] = None,
20
- description: str = ""
21
- ):
22
- self.nullable = nullable
23
- self.default = default
24
- self.field_type = field_type
25
- self.max_length = max_length
26
- self.description = description
27
-
28
- def validate(self, value: Any, field_name: str = "") -> Any:
29
- """
30
- 验证字段值是否符合规则
31
- """
32
- if value is None or (isinstance(value, str) and value.strip() == ""):
33
- if self.default is not None:
34
- return self.default
35
- elif not self.nullable:
36
- raise ValueError(
37
- f"字段 '{field_name}' 不允许为空。"
38
- )
39
-
40
- if value is not None and not (isinstance(value, str) and value.strip() == ""):
41
- if self.field_type and not isinstance(value, self.field_type):
42
- raise TypeError(
43
- f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
44
- )
45
- if self.max_length and len(str(value)) > self.max_length:
46
- raise ValueError(
47
- f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
48
- )
49
-
50
- return value
51
-
52
- def __repr__(self):
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Field 类定义
5
+ """
6
+ from typing import Any, Optional, Type
7
+
8
+
9
+ class Field:
10
+ """
11
+ 字段定义类,用于定义 Item 的字段属性和验证规则
12
+ """
13
+ def __init__(
14
+ self,
15
+ nullable: bool = True,
16
+ *,
17
+ default: Any = None,
18
+ field_type: Optional[Type] = None,
19
+ max_length: Optional[int] = None,
20
+ description: str = ""
21
+ ):
22
+ self.nullable = nullable
23
+ self.default = default
24
+ self.field_type = field_type
25
+ self.max_length = max_length
26
+ self.description = description
27
+
28
+ def validate(self, value: Any, field_name: str = "") -> Any:
29
+ """
30
+ 验证字段值是否符合规则
31
+ """
32
+ if value is None or (isinstance(value, str) and value.strip() == ""):
33
+ if self.default is not None:
34
+ return self.default
35
+ elif not self.nullable:
36
+ raise ValueError(
37
+ f"字段 '{field_name}' 不允许为空。"
38
+ )
39
+
40
+ if value is not None and not (isinstance(value, str) and value.strip() == ""):
41
+ if self.field_type and not isinstance(value, self.field_type):
42
+ raise TypeError(
43
+ f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
44
+ )
45
+ if self.max_length and len(str(value)) > self.max_length:
46
+ raise ValueError(
47
+ f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
48
+ )
49
+
50
+ return value
51
+
52
+ def __repr__(self):
53
53
  return f"<Field nullable={self.nullable} type={self.field_type} default={self.default}>"
crawlo/items/items.py CHANGED
@@ -1,105 +1,105 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Item 类定义
5
- """
6
- from copy import deepcopy
7
- from pprint import pformat
8
- from typing import Any, Iterator, Dict
9
- from collections.abc import MutableMapping
10
-
11
- from .base import ItemMeta
12
- from crawlo.exceptions import ItemInitError, ItemAttributeError
13
-
14
-
15
- class Item(MutableMapping, metaclass=ItemMeta):
16
- """
17
- 数据项基类,用于定义结构化数据
18
- """
19
- FIELDS: Dict[str, Any] = {}
20
-
21
- def __init__(self, *args, **kwargs):
22
- if args:
23
- raise ItemInitError(f"{self.__class__.__name__} 不支持位置参数:{args},请使用关键字参数初始化。")
24
-
25
- self._values: Dict[str, Any] = {}
26
-
27
- # 初始化字段,默认值填充
28
- for field_name, field_obj in self.FIELDS.items():
29
- if field_obj.default is not None:
30
- self._values[field_name] = field_obj.default
31
-
32
- # 覆盖默认值或设置新值
33
- for key, value in kwargs.items():
34
- self[key] = value
35
-
36
- def __getitem__(self, item: str) -> Any:
37
- return self._values[item]
38
-
39
- def __setitem__(self, key: str, value: Any) -> None:
40
- if key not in self.FIELDS:
41
- raise KeyError(f"{self.__class__.__name__} 不包含字段:{key}")
42
-
43
- field = self.FIELDS[key]
44
- try:
45
- validated_value = field.validate(value, field_name=key)
46
- self._values[key] = validated_value
47
- except Exception as e:
48
- error_lines = [
49
- "",
50
- "【字段校验失败】",
51
- f"字段名称: {key}",
52
- f"数据类型: {type(value)}",
53
- f"原始值: {repr(value)}",
54
- f"是否允许空值: {field.nullable}",
55
- f"错误原因: {str(e)}",
56
- ""
57
- ]
58
- detailed_error = "\n".join(error_lines)
59
- raise type(e)(detailed_error) from e
60
-
61
- def __delitem__(self, key: str) -> None:
62
- del self._values[key]
63
-
64
- def __setattr__(self, key: str, value: Any) -> None:
65
- if not key.startswith("_"):
66
- raise AttributeError(
67
- f"设置字段值请使用 item[{key!r}] = {value!r}"
68
- )
69
- super().__setattr__(key, value)
70
-
71
- def __getattr__(self, item: str) -> Any:
72
- raise AttributeError(
73
- f"{self.__class__.__name__} 不支持字段:{item}。"
74
- f"请先在 `{self.__class__.__name__}` 中声明该字段,再通过 item[{item!r}] 获取。"
75
- )
76
-
77
- def __getattribute__(self, item: str) -> Any:
78
- try:
79
- field = super().__getattribute__("FIELDS")
80
- if isinstance(field, dict) and item in field:
81
- raise ItemAttributeError(
82
- f"获取字段值请使用 item[{item!r}]"
83
- )
84
- except AttributeError:
85
- pass # 如果 FIELDS 尚未定义,继续执行后续逻辑
86
- return super().__getattribute__(item)
87
-
88
- def __repr__(self) -> str:
89
- return pformat(dict(self))
90
-
91
- __str__ = __repr__
92
-
93
- def __iter__(self) -> Iterator[str]:
94
- return iter(self._values)
95
-
96
- def __len__(self) -> int:
97
- return len(self._values)
98
-
99
- def to_dict(self) -> Dict[str, Any]:
100
- """转换为字典"""
101
- return dict(self)
102
-
103
- def copy(self) -> "Item":
104
- """深拷贝当前 Item"""
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Item 类定义
5
+ """
6
+ from copy import deepcopy
7
+ from pprint import pformat
8
+ from typing import Any, Iterator, Dict
9
+ from collections.abc import MutableMapping
10
+
11
+ from .base import ItemMeta
12
+ from crawlo.exceptions import ItemInitError, ItemAttributeError
13
+
14
+
15
+ class Item(MutableMapping, metaclass=ItemMeta):
16
+ """
17
+ 数据项基类,用于定义结构化数据
18
+ """
19
+ FIELDS: Dict[str, Any] = {}
20
+
21
+ def __init__(self, *args, **kwargs):
22
+ if args:
23
+ raise ItemInitError(f"{self.__class__.__name__} 不支持位置参数:{args},请使用关键字参数初始化。")
24
+
25
+ self._values: Dict[str, Any] = {}
26
+
27
+ # 初始化字段,默认值填充
28
+ for field_name, field_obj in self.FIELDS.items():
29
+ if field_obj.default is not None:
30
+ self._values[field_name] = field_obj.default
31
+
32
+ # 覆盖默认值或设置新值
33
+ for key, value in kwargs.items():
34
+ self[key] = value
35
+
36
+ def __getitem__(self, item: str) -> Any:
37
+ return self._values[item]
38
+
39
+ def __setitem__(self, key: str, value: Any) -> None:
40
+ if key not in self.FIELDS:
41
+ raise KeyError(f"{self.__class__.__name__} 不包含字段:{key}")
42
+
43
+ field = self.FIELDS[key]
44
+ try:
45
+ validated_value = field.validate(value, field_name=key)
46
+ self._values[key] = validated_value
47
+ except Exception as e:
48
+ error_lines = [
49
+ "",
50
+ "【字段校验失败】",
51
+ f"字段名称: {key}",
52
+ f"数据类型: {type(value)}",
53
+ f"原始值: {repr(value)}",
54
+ f"是否允许空值: {field.nullable}",
55
+ f"错误原因: {str(e)}",
56
+ ""
57
+ ]
58
+ detailed_error = "\n".join(error_lines)
59
+ raise type(e)(detailed_error) from e
60
+
61
+ def __delitem__(self, key: str) -> None:
62
+ del self._values[key]
63
+
64
+ def __setattr__(self, key: str, value: Any) -> None:
65
+ if not key.startswith("_"):
66
+ raise AttributeError(
67
+ f"设置字段值请使用 item[{key!r}] = {value!r}"
68
+ )
69
+ super().__setattr__(key, value)
70
+
71
+ def __getattr__(self, item: str) -> Any:
72
+ raise AttributeError(
73
+ f"{self.__class__.__name__} 不支持字段:{item}。"
74
+ f"请先在 `{self.__class__.__name__}` 中声明该字段,再通过 item[{item!r}] 获取。"
75
+ )
76
+
77
+ def __getattribute__(self, item: str) -> Any:
78
+ try:
79
+ field = super().__getattribute__("FIELDS")
80
+ if isinstance(field, dict) and item in field:
81
+ raise ItemAttributeError(
82
+ f"获取字段值请使用 item[{item!r}]"
83
+ )
84
+ except AttributeError:
85
+ pass # 如果 FIELDS 尚未定义,继续执行后续逻辑
86
+ return super().__getattribute__(item)
87
+
88
+ def __repr__(self) -> str:
89
+ return pformat(dict(self))
90
+
91
+ __str__ = __repr__
92
+
93
+ def __iter__(self) -> Iterator[str]:
94
+ return iter(self._values)
95
+
96
+ def __len__(self) -> int:
97
+ return len(self._values)
98
+
99
+ def to_dict(self) -> Dict[str, Any]:
100
+ """转换为字典"""
101
+ return dict(self)
102
+
103
+ def copy(self) -> "Item":
104
+ """深拷贝当前 Item"""
105
105
  return deepcopy(self)
@@ -1,21 +1,21 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from crawlo import Request, Response
4
-
5
-
6
- class BaseMiddleware(object):
7
- def process_request(self, request, spider) -> None | Request | Response:
8
- # 请求预处理
9
- pass
10
-
11
- def process_response(self, request, response, spider) -> Request | Response:
12
- # 响应预处理
13
- pass
14
-
15
- def process_exception(self, request, exp, spider) -> None | Request | Response:
16
- # 异常预处理
17
- pass
18
-
19
- @classmethod
20
- def create_instance(cls, crawler):
21
- return cls()
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from crawlo import Request, Response
4
+
5
+
6
+ class BaseMiddleware(object):
7
+ def process_request(self, request, spider) -> None | Request | Response:
8
+ # 请求预处理
9
+ pass
10
+
11
+ def process_response(self, request, response, spider) -> Request | Response:
12
+ # 响应预处理
13
+ pass
14
+
15
+ def process_exception(self, request, exp, spider) -> None | Request | Response:
16
+ # 异常预处理
17
+ pass
18
+
19
+ @classmethod
20
+ def create_instance(cls, crawler):
21
+ return cls()
@@ -1,132 +1,132 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- DefaultHeaderMiddleware 中间件
5
- 用于为所有请求添加默认请求头,支持随机更换User-Agent等功能
6
- """
7
-
8
- import random
9
- from crawlo.utils.log import get_logger
10
- from crawlo.exceptions import NotConfiguredError
11
- # 导入User-Agent数据
12
- from crawlo.data.user_agents import get_user_agents
13
-
14
-
15
- class DefaultHeaderMiddleware(object):
16
- """
17
- DefaultHeaderMiddleware 中间件
18
- 用于为所有请求添加默认请求头,包括User-Agent等,支持随机更换功能
19
- """
20
-
21
- def __init__(self, settings, log_level):
22
- """
23
- 初始化中间件
24
- """
25
- self.logger = get_logger(self.__class__.__name__, log_level)
26
-
27
- # 获取默认请求头配置
28
- self.headers = settings.get_dict('DEFAULT_REQUEST_HEADERS', {})
29
-
30
- # 获取User-Agent配置
31
- self.user_agent = settings.get('USER_AGENT')
32
-
33
- # 获取随机User-Agent列表
34
- self.user_agents = settings.get_list('USER_AGENTS', [])
35
-
36
- # 获取随机请求头配置
37
- self.random_headers = settings.get_dict('RANDOM_HEADERS', {})
38
-
39
- # 获取随机性配置
40
- self.randomness = settings.get_bool("RANDOMNESS", False)
41
-
42
- # 检查是否启用随机User-Agent
43
- self.random_user_agent_enabled = settings.get_bool("RANDOM_USER_AGENT_ENABLED", False)
44
-
45
- # 获取User-Agent设备类型
46
- self.user_agent_device_type = settings.get("USER_AGENT_DEVICE_TYPE", "all")
47
-
48
- # 如果没有配置默认请求头、User-Agent且没有启用随机功能,则禁用此中间件
49
- if not self.headers and not self.user_agent and not self.user_agents and not self.random_headers:
50
- raise NotConfiguredError("未配置DEFAULT_REQUEST_HEADERS、USER_AGENT或随机头部配置,DefaultHeaderMiddleware已禁用")
51
-
52
- # 如果配置了User-Agent,将其添加到默认请求头中
53
- if self.user_agent:
54
- self.headers.setdefault('User-Agent', self.user_agent)
55
-
56
- # 如果启用了随机User-Agent但没有提供User-Agent列表,使用内置列表
57
- if self.random_user_agent_enabled and not self.user_agents:
58
- self.user_agents = get_user_agents(self.user_agent_device_type)
59
-
60
- self.logger.info(f"DefaultHeaderMiddleware已启用,配置详情: "
61
- f"默认请求头={len(self.headers)}, "
62
- f"User-Agent列表={len(self.user_agents)}, "
63
- f"随机头部={len(self.random_headers)}, "
64
- f"随机功能={'启用' if self.randomness else '禁用'}")
65
-
66
- @classmethod
67
- def create_instance(cls, crawler):
68
- """
69
- 创建中间件实例
70
- """
71
- o = cls(
72
- settings=crawler.settings,
73
- log_level=crawler.settings.get('LOG_LEVEL')
74
- )
75
- return o
76
-
77
- def _get_random_user_agent(self):
78
- """
79
- 获取随机User-Agent
80
- """
81
- if self.user_agents:
82
- return random.choice(self.user_agents)
83
- return None
84
-
85
- def _apply_random_headers(self, request):
86
- """
87
- 应用随机请求头
88
- """
89
- if not self.random_headers:
90
- return
91
-
92
- for header_name, header_values in self.random_headers.items():
93
- # 如果header_values是列表,随机选择一个值
94
- if isinstance(header_values, (list, tuple)):
95
- header_value = random.choice(header_values)
96
- else:
97
- header_value = header_values
98
-
99
- # 只有当请求中没有该头部时才添加
100
- if header_name not in request.headers:
101
- request.headers[header_name] = header_value
102
- self.logger.debug(f"为请求 {request.url} 添加随机头部: {header_name}={header_value[:50]}...")
103
-
104
- def process_request(self, request, _spider):
105
- """
106
- 处理请求,添加默认请求头
107
- """
108
- # 添加默认请求头
109
- if self.headers:
110
- added_headers = []
111
- for key, value in self.headers.items():
112
- # 只有当请求中没有该头部时才添加
113
- if key not in request.headers:
114
- request.headers[key] = value
115
- added_headers.append(key)
116
-
117
- # 记录添加的请求头(仅在调试模式下)
118
- if added_headers and self.logger.isEnabledFor(10): # DEBUG level
119
- self.logger.debug(f"为请求 {request.url} 添加了 {len(added_headers)} 个默认请求头: {added_headers}")
120
-
121
- # 处理随机User-Agent
122
- if self.random_user_agent_enabled and 'User-Agent' not in request.headers:
123
- random_ua = self._get_random_user_agent()
124
- if random_ua:
125
- request.headers['User-Agent'] = random_ua
126
- self.logger.debug(f"为请求 {request.url} 设置随机User-Agent: {random_ua[:50]}...")
127
-
128
- # 处理随机请求头
129
- if self.randomness:
130
- self._apply_random_headers(request)
131
-
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ DefaultHeaderMiddleware 中间件
5
+ 用于为所有请求添加默认请求头,支持随机更换User-Agent等功能
6
+ """
7
+
8
+ import random
9
+ from crawlo.utils.log import get_logger
10
+ from crawlo.exceptions import NotConfiguredError
11
+ # 导入User-Agent数据
12
+ from crawlo.data.user_agents import get_user_agents
13
+
14
+
15
+ class DefaultHeaderMiddleware(object):
16
+ """
17
+ DefaultHeaderMiddleware 中间件
18
+ 用于为所有请求添加默认请求头,包括User-Agent等,支持随机更换功能
19
+ """
20
+
21
+ def __init__(self, settings, log_level):
22
+ """
23
+ 初始化中间件
24
+ """
25
+ self.logger = get_logger(self.__class__.__name__, log_level)
26
+
27
+ # 获取默认请求头配置
28
+ self.headers = settings.get_dict('DEFAULT_REQUEST_HEADERS', {})
29
+
30
+ # 获取User-Agent配置
31
+ self.user_agent = settings.get('USER_AGENT')
32
+
33
+ # 获取随机User-Agent列表
34
+ self.user_agents = settings.get_list('USER_AGENTS', [])
35
+
36
+ # 获取随机请求头配置
37
+ self.random_headers = settings.get_dict('RANDOM_HEADERS', {})
38
+
39
+ # 获取随机性配置
40
+ self.randomness = settings.get_bool("RANDOMNESS", False)
41
+
42
+ # 检查是否启用随机User-Agent
43
+ self.random_user_agent_enabled = settings.get_bool("RANDOM_USER_AGENT_ENABLED", False)
44
+
45
+ # 获取User-Agent设备类型
46
+ self.user_agent_device_type = settings.get("USER_AGENT_DEVICE_TYPE", "all")
47
+
48
+ # 如果没有配置默认请求头、User-Agent且没有启用随机功能,则禁用此中间件
49
+ if not self.headers and not self.user_agent and not self.user_agents and not self.random_headers:
50
+ raise NotConfiguredError("未配置DEFAULT_REQUEST_HEADERS、USER_AGENT或随机头部配置,DefaultHeaderMiddleware已禁用")
51
+
52
+ # 如果配置了User-Agent,将其添加到默认请求头中
53
+ if self.user_agent:
54
+ self.headers.setdefault('User-Agent', self.user_agent)
55
+
56
+ # 如果启用了随机User-Agent但没有提供User-Agent列表,使用内置列表
57
+ if self.random_user_agent_enabled and not self.user_agents:
58
+ self.user_agents = get_user_agents(self.user_agent_device_type)
59
+
60
+ self.logger.info(f"DefaultHeaderMiddleware已启用,配置详情: "
61
+ f"默认请求头={len(self.headers)}, "
62
+ f"User-Agent列表={len(self.user_agents)}, "
63
+ f"随机头部={len(self.random_headers)}, "
64
+ f"随机功能={'启用' if self.randomness else '禁用'}")
65
+
66
+ @classmethod
67
+ def create_instance(cls, crawler):
68
+ """
69
+ 创建中间件实例
70
+ """
71
+ o = cls(
72
+ settings=crawler.settings,
73
+ log_level=crawler.settings.get('LOG_LEVEL')
74
+ )
75
+ return o
76
+
77
+ def _get_random_user_agent(self):
78
+ """
79
+ 获取随机User-Agent
80
+ """
81
+ if self.user_agents:
82
+ return random.choice(self.user_agents)
83
+ return None
84
+
85
+ def _apply_random_headers(self, request):
86
+ """
87
+ 应用随机请求头
88
+ """
89
+ if not self.random_headers:
90
+ return
91
+
92
+ for header_name, header_values in self.random_headers.items():
93
+ # 如果header_values是列表,随机选择一个值
94
+ if isinstance(header_values, (list, tuple)):
95
+ header_value = random.choice(header_values)
96
+ else:
97
+ header_value = header_values
98
+
99
+ # 只有当请求中没有该头部时才添加
100
+ if header_name not in request.headers:
101
+ request.headers[header_name] = header_value
102
+ self.logger.debug(f"为请求 {request.url} 添加随机头部: {header_name}={header_value[:50]}...")
103
+
104
+ def process_request(self, request, _spider):
105
+ """
106
+ 处理请求,添加默认请求头
107
+ """
108
+ # 添加默认请求头
109
+ if self.headers:
110
+ added_headers = []
111
+ for key, value in self.headers.items():
112
+ # 只有当请求中没有该头部时才添加
113
+ if key not in request.headers:
114
+ request.headers[key] = value
115
+ added_headers.append(key)
116
+
117
+ # 记录添加的请求头(仅在调试模式下)
118
+ if added_headers and self.logger.isEnabledFor(10): # DEBUG level
119
+ self.logger.debug(f"为请求 {request.url} 添加了 {len(added_headers)} 个默认请求头: {added_headers}")
120
+
121
+ # 处理随机User-Agent
122
+ if self.random_user_agent_enabled and 'User-Agent' not in request.headers:
123
+ random_ua = self._get_random_user_agent()
124
+ if random_ua:
125
+ request.headers['User-Agent'] = random_ua
126
+ self.logger.debug(f"为请求 {request.url} 设置随机User-Agent: {random_ua[:50]}...")
127
+
128
+ # 处理随机请求头
129
+ if self.randomness:
130
+ self._apply_random_headers(request)
131
+
132
132
  return None