crawlo 1.4.4__py3-none-any.whl → 1.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (120) hide show
  1. crawlo/__init__.py +11 -15
  2. crawlo/__version__.py +1 -1
  3. crawlo/commands/startproject.py +24 -0
  4. crawlo/core/engine.py +2 -2
  5. crawlo/core/scheduler.py +4 -4
  6. crawlo/crawler.py +8 -7
  7. crawlo/downloader/__init__.py +5 -2
  8. crawlo/downloader/cffi_downloader.py +3 -1
  9. crawlo/extension/__init__.py +2 -2
  10. crawlo/filters/aioredis_filter.py +8 -1
  11. crawlo/filters/memory_filter.py +8 -1
  12. crawlo/initialization/built_in.py +13 -4
  13. crawlo/initialization/core.py +5 -4
  14. crawlo/interfaces.py +24 -0
  15. crawlo/middleware/__init__.py +7 -4
  16. crawlo/middleware/middleware_manager.py +15 -8
  17. crawlo/middleware/proxy.py +171 -348
  18. crawlo/mode_manager.py +45 -11
  19. crawlo/network/response.py +374 -69
  20. crawlo/pipelines/mysql_pipeline.py +340 -189
  21. crawlo/pipelines/pipeline_manager.py +2 -2
  22. crawlo/project.py +2 -4
  23. crawlo/settings/default_settings.py +42 -30
  24. crawlo/stats_collector.py +10 -1
  25. crawlo/task_manager.py +2 -2
  26. crawlo/templates/project/items.py.tmpl +2 -2
  27. crawlo/templates/project/middlewares.py.tmpl +9 -89
  28. crawlo/templates/project/pipelines.py.tmpl +8 -68
  29. crawlo/templates/project/settings.py.tmpl +10 -55
  30. crawlo/templates/project/settings_distributed.py.tmpl +20 -22
  31. crawlo/templates/project/settings_gentle.py.tmpl +5 -0
  32. crawlo/templates/project/settings_high_performance.py.tmpl +5 -0
  33. crawlo/templates/project/settings_minimal.py.tmpl +25 -1
  34. crawlo/templates/project/settings_simple.py.tmpl +5 -0
  35. crawlo/templates/run.py.tmpl +1 -8
  36. crawlo/templates/spider/spider.py.tmpl +5 -108
  37. crawlo/tools/__init__.py +0 -11
  38. crawlo/utils/__init__.py +17 -1
  39. crawlo/utils/db_helper.py +226 -319
  40. crawlo/utils/error_handler.py +313 -67
  41. crawlo/utils/fingerprint.py +3 -4
  42. crawlo/utils/misc.py +82 -0
  43. crawlo/utils/request.py +55 -66
  44. crawlo/utils/selector_helper.py +138 -0
  45. crawlo/utils/spider_loader.py +185 -45
  46. crawlo/utils/text_helper.py +95 -0
  47. crawlo-1.4.6.dist-info/METADATA +329 -0
  48. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/RECORD +110 -69
  49. tests/authenticated_proxy_example.py +10 -6
  50. tests/bug_check_test.py +251 -0
  51. tests/direct_selector_helper_test.py +97 -0
  52. tests/explain_mysql_update_behavior.py +77 -0
  53. tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -0
  54. tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -0
  55. tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -0
  56. tests/ofweek_scrapy/ofweek_scrapy/settings.py +85 -0
  57. tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -0
  58. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +162 -0
  59. tests/ofweek_scrapy/scrapy.cfg +11 -0
  60. tests/performance_comparison.py +4 -5
  61. tests/simple_crawlo_test.py +1 -2
  62. tests/simple_follow_test.py +39 -0
  63. tests/simple_response_selector_test.py +95 -0
  64. tests/simple_selector_helper_test.py +155 -0
  65. tests/simple_selector_test.py +208 -0
  66. tests/simple_url_test.py +74 -0
  67. tests/simulate_mysql_update_test.py +140 -0
  68. tests/test_asyncmy_usage.py +57 -0
  69. tests/test_crawler_process_import.py +39 -0
  70. tests/test_crawler_process_spider_modules.py +48 -0
  71. tests/test_crawlo_proxy_integration.py +8 -2
  72. tests/test_downloader_proxy_compatibility.py +24 -20
  73. tests/test_edge_cases.py +7 -5
  74. tests/test_encoding_core.py +57 -0
  75. tests/test_encoding_detection.py +127 -0
  76. tests/test_factory_compatibility.py +197 -0
  77. tests/test_mysql_pipeline_config.py +165 -0
  78. tests/test_mysql_pipeline_error.py +99 -0
  79. tests/test_mysql_pipeline_init_log.py +83 -0
  80. tests/test_mysql_pipeline_integration.py +133 -0
  81. tests/test_mysql_pipeline_refactor.py +144 -0
  82. tests/test_mysql_pipeline_refactor_simple.py +86 -0
  83. tests/test_mysql_pipeline_robustness.py +196 -0
  84. tests/test_mysql_pipeline_types.py +89 -0
  85. tests/test_mysql_update_columns.py +94 -0
  86. tests/test_optimized_selector_naming.py +101 -0
  87. tests/test_priority_behavior.py +18 -18
  88. tests/test_proxy_middleware.py +104 -8
  89. tests/test_proxy_middleware_enhanced.py +1 -5
  90. tests/test_proxy_middleware_integration.py +7 -2
  91. tests/test_proxy_middleware_refactored.py +25 -2
  92. tests/test_proxy_only.py +84 -0
  93. tests/test_proxy_with_downloader.py +153 -0
  94. tests/test_real_scenario_proxy.py +17 -17
  95. tests/test_response_follow.py +105 -0
  96. tests/test_response_selector_methods.py +93 -0
  97. tests/test_response_url_methods.py +71 -0
  98. tests/test_response_urljoin.py +87 -0
  99. tests/test_scrapy_style_encoding.py +113 -0
  100. tests/test_selector_helper.py +101 -0
  101. tests/test_selector_optimizations.py +147 -0
  102. tests/test_spider_loader.py +50 -0
  103. tests/test_spider_loader_comprehensive.py +70 -0
  104. tests/test_spiders/__init__.py +1 -0
  105. tests/test_spiders/test_spider.py +10 -0
  106. tests/verify_mysql_warnings.py +110 -0
  107. crawlo/middleware/simple_proxy.py +0 -65
  108. crawlo/tools/anti_crawler.py +0 -269
  109. crawlo/utils/class_loader.py +0 -26
  110. crawlo/utils/enhanced_error_handler.py +0 -357
  111. crawlo-1.4.4.dist-info/METADATA +0 -190
  112. tests/simple_log_test.py +0 -58
  113. tests/simple_test.py +0 -48
  114. tests/test_framework_logger.py +0 -67
  115. tests/test_framework_startup.py +0 -65
  116. tests/test_mode_change.py +0 -73
  117. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/WHEEL +0 -0
  118. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/entry_points.txt +0 -0
  119. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/top_level.txt +0 -0
  120. /tests/{final_command_test_report.md → ofweek_scrapy/ofweek_scrapy/__init__.py} +0 -0
@@ -0,0 +1,70 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+
4
+ """
5
+ 综合测试SpiderLoader功能
6
+ """
7
+
8
+ import sys
9
+ import os
10
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
11
+
12
+ from crawlo.utils.spider_loader import SpiderLoader
13
+ from crawlo.crawler import CrawlerProcess
14
+ from crawlo.settings.setting_manager import SettingManager
15
+
16
+
17
+ def test_spider_loader_comprehensive():
18
+ """综合测试SpiderLoader功能"""
19
+ print("综合测试SpiderLoader功能...")
20
+
21
+ # 1. 测试基本的SpiderLoader功能
22
+ print("\n1. 测试基本的SpiderLoader功能")
23
+ settings = SettingManager({
24
+ 'SPIDER_MODULES': ['tests.test_spiders'],
25
+ 'SPIDER_LOADER_WARN_ONLY': True
26
+ })
27
+
28
+ loader = SpiderLoader.from_settings(settings)
29
+ spider_names = loader.list()
30
+ print(f" 发现的爬虫: {spider_names}")
31
+
32
+ if spider_names:
33
+ spider_name = spider_names[0]
34
+ spider_class = loader.load(spider_name)
35
+ print(f" 成功加载爬虫: {spider_name} -> {spider_class}")
36
+
37
+ # 2. 测试CrawlerProcess与SPIDER_MODULES的集成
38
+ print("\n2. 测试CrawlerProcess与SPIDER_MODULES的集成")
39
+ process = CrawlerProcess(settings=settings)
40
+ process_spider_names = process.get_spider_names()
41
+ print(f" CrawlerProcess发现的爬虫: {process_spider_names}")
42
+
43
+ is_registered = process.is_spider_registered('test_spider')
44
+ print(f" 爬虫'test_spider'是否已注册: {is_registered}")
45
+
46
+ spider_class = process.get_spider_class('test_spider')
47
+ print(f" 爬虫'test_spider'的类: {spider_class}")
48
+
49
+ # 3. 测试接口规范
50
+ print("\n3. 测试接口规范")
51
+ # 检查SpiderLoader是否实现了ISpiderLoader接口所需的方法
52
+ from crawlo.interfaces import ISpiderLoader
53
+ # 由于ISpiderLoader是Protocol,我们不能直接使用isinstance检查
54
+ # 而是检查是否实现了所需的方法
55
+ required_methods = ['load', 'list', 'find_by_request']
56
+ implements_interface = all(hasattr(loader, method) for method in required_methods)
57
+ print(f" SpiderLoader是否实现了ISpiderLoader接口: {implements_interface}")
58
+
59
+ # 4. 测试方法存在性
60
+ print("\n4. 测试方法存在性")
61
+ required_methods = ['load', 'list', 'find_by_request', 'get_all']
62
+ for method in required_methods:
63
+ has_method = hasattr(loader, method)
64
+ print(f" SpiderLoader是否有{method}方法: {has_method}")
65
+
66
+ print("\n综合测试完成!")
67
+
68
+
69
+ if __name__ == '__main__':
70
+ test_spider_loader_comprehensive()
@@ -0,0 +1 @@
1
+ # 测试爬虫模块
@@ -0,0 +1,10 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ from crawlo.spider import Spider
4
+
5
+
6
+ class TestSpider(Spider):
7
+ name = 'test_spider'
8
+
9
+ def parse(self, response):
10
+ pass
@@ -0,0 +1,110 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ 验证 MySQL 警告是否已解决
4
+ 通过模拟实际运行环境来检查
5
+ """
6
+ import asyncio
7
+ import sys
8
+ import os
9
+
10
+ # 添加项目根目录到 Python 路径
11
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
12
+
13
+ from crawlo.utils.db_helper import SQLBuilder
14
+ from crawlo.pipelines.mysql_pipeline import BaseMySQLPipeline, AsyncmyMySQLPipeline, AiomysqlMySQLPipeline
15
+
16
+
17
+ def verify_mysql_syntax():
18
+ """验证 MySQL 语法是否正确,不会产生警告"""
19
+ print("=== 验证 MySQL 语法是否正确 ===\n")
20
+
21
+ # 模拟实际使用的数据
22
+ test_data = {
23
+ 'title': '新一代OLED屏下光谱颜色传感技术:解锁显示新密码,重塑视觉新体验',
24
+ 'publish_time': '2025-10-09 09:57',
25
+ 'url': 'https://ee.ofweek.com/2025-10/ART-8460-2806-30671544.html',
26
+ 'source': '',
27
+ 'content': '在全球智能手机市场竞争日趋白热化的当下,消费者对手机屏幕显示效果的要求愈发严苛...'
28
+ }
29
+
30
+ # 模拟 ofweek_standalone 项目的配置
31
+ update_columns = ('title', 'publish_time')
32
+
33
+ print("1. 检查 SQLBuilder 生成的语法...")
34
+ sql = SQLBuilder.make_insert(
35
+ table="news_items",
36
+ data=test_data,
37
+ auto_update=False,
38
+ update_columns=update_columns,
39
+ insert_ignore=False
40
+ )
41
+
42
+ print("生成的 SQL:")
43
+ print(sql[:200] + "..." if len(sql) > 200 else sql)
44
+ print()
45
+
46
+ # 检查是否包含弃用的 VALUES() 函数用法
47
+ if "VALUES(`title`)" in sql or "VALUES(`publish_time`)" in sql:
48
+ print("✗ 发现弃用的 VALUES() 函数用法,会产生警告")
49
+ return False
50
+ else:
51
+ print("✓ 未发现弃用的 VALUES() 函数用法")
52
+
53
+ if "AS `excluded`" in sql and "ON DUPLICATE KEY UPDATE" in sql:
54
+ print("✓ 正确使用了新的 MySQL 语法")
55
+ else:
56
+ print("✗ 未正确使用新的 MySQL 语法")
57
+ return False
58
+
59
+ # 检查更新子句
60
+ if "`title`=`excluded`.`title`" in sql and "`publish_time`=`excluded`.`publish_time`" in sql:
61
+ print("✓ 更新子句正确使用了 excluded 别名")
62
+ else:
63
+ print("✗ 更新子句语法不正确")
64
+ return False
65
+
66
+ print("\n2. 检查批量插入语法...")
67
+ batch_result = SQLBuilder.make_batch(
68
+ table="news_items",
69
+ datas=[test_data, test_data],
70
+ auto_update=False,
71
+ update_columns=update_columns
72
+ )
73
+
74
+ if batch_result:
75
+ batch_sql, _ = batch_result
76
+ print("生成的批量 SQL:")
77
+ print(batch_sql[:200] + "..." if len(batch_sql) > 200 else batch_sql)
78
+ print()
79
+
80
+ # 检查批量插入语法
81
+ if "VALUES(`title`)" in batch_sql or "VALUES(`publish_time`)" in batch_sql:
82
+ print("✗ 批量插入中发现弃用的 VALUES() 函数用法,会产生警告")
83
+ return False
84
+ else:
85
+ print("✓ 批量插入未发现弃用的 VALUES() 函数用法")
86
+
87
+ if "AS `excluded`" in batch_sql and "ON DUPLICATE KEY UPDATE" in batch_sql:
88
+ print("✓ 批量插入正确使用了新的 MySQL 语法")
89
+ else:
90
+ print("✗ 批量插入未正确使用新的 MySQL 语法")
91
+ return False
92
+
93
+ # 检查批量更新子句
94
+ if "`title`=`excluded`.`title`" in batch_sql and "`publish_time`=`excluded`.`publish_time`" in batch_sql:
95
+ print("✓ 批量插入更新子句正确使用了 excluded 别名")
96
+ else:
97
+ print("✗ 批量插入更新子句语法不正确")
98
+ return False
99
+
100
+ print("\n=== 验证完成 ===")
101
+ print("✓ 所有语法检查通过,应该不会再出现 MySQL 的 VALUES() 函数弃用警告")
102
+ return True
103
+
104
+
105
+ if __name__ == "__main__":
106
+ success = verify_mysql_syntax()
107
+ if success:
108
+ print("\n🎉 MySQL 语法问题已解决!")
109
+ else:
110
+ print("\n❌ 仍存在 MySQL 语法问题需要修复")
@@ -1,65 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 简化版代理中间件
5
- 提供基础的代理功能,避免过度复杂的实现
6
- """
7
- import random
8
- from typing import Optional, List
9
-
10
- from crawlo import Request, Response
11
- from crawlo.exceptions import NotConfiguredError
12
- from crawlo.utils.log import get_logger
13
-
14
-
15
- class SimpleProxyMiddleware:
16
- """简化版代理中间件"""
17
-
18
- def __init__(self, settings, log_level):
19
- self.logger = get_logger(self.__class__.__name__, log_level)
20
-
21
- # 获取代理列表
22
- self.proxies: List[str] = settings.get("PROXY_LIST", [])
23
- self.enabled = settings.get_bool("PROXY_ENABLED", False)
24
-
25
- if not self.enabled:
26
- self.logger.info("SimpleProxyMiddleware disabled")
27
- return
28
-
29
- if not self.proxies:
30
- raise NotConfiguredError("PROXY_LIST not configured, SimpleProxyMiddleware disabled")
31
-
32
- self.logger.info(f"SimpleProxyMiddleware enabled with {len(self.proxies)} proxies")
33
-
34
- @classmethod
35
- def create_instance(cls, crawler):
36
- return cls(settings=crawler.settings, log_level=crawler.settings.get("LOG_LEVEL"))
37
-
38
- async def process_request(self, request: Request, spider) -> Optional[Request]:
39
- """为请求分配代理"""
40
- if not self.enabled:
41
- return None
42
-
43
- if request.proxy:
44
- # 请求已指定代理,不覆盖
45
- return None
46
-
47
- if self.proxies:
48
- # 随机选择一个代理
49
- proxy = random.choice(self.proxies)
50
- request.proxy = proxy
51
- self.logger.debug(f"Assigned proxy {proxy} to {request.url}")
52
-
53
- return None
54
-
55
- def process_response(self, request: Request, response: Response, spider) -> Response:
56
- """处理响应"""
57
- if request.proxy:
58
- self.logger.debug(f"Proxy request successful: {request.proxy} | {request.url}")
59
- return response
60
-
61
- def process_exception(self, request: Request, exception: Exception, spider) -> Optional[Request]:
62
- """处理异常"""
63
- if request.proxy:
64
- self.logger.warning(f"Proxy request failed: {request.proxy} | {request.url} | {repr(exception)}")
65
- return None
@@ -1,269 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-09-10 22:00
5
- # @Author : crawl-coder
6
- # @Desc : 反爬虫应对工具
7
- """
8
-
9
- import asyncio
10
- import random
11
- import time
12
- from typing import Dict, Any, Optional, List, Callable
13
-
14
-
15
- class ProxyPoolManager:
16
- """代理池管理器类"""
17
-
18
- def __init__(self, proxies: Optional[List[Dict[str, str]]] = None):
19
- """
20
- 初始化代理池管理器
21
-
22
- Args:
23
- proxies (Optional[List[Dict[str, str]]]): 代理列表
24
- """
25
- self.proxies = proxies or [
26
- {"http": "http://proxy1.example.com:8080", "https": "https://proxy1.example.com:8080"},
27
- {"http": "http://proxy2.example.com:8080", "https": "https://proxy2.example.com:8080"},
28
- {"http": "http://proxy3.example.com:8080", "https": "https://proxy3.example.com:8080"}
29
- ]
30
- self.proxy_status = {id(proxy): {"last_used": 0, "success_count": 0, "fail_count": 0}
31
- for proxy in self.proxies}
32
-
33
- def get_random_proxy(self) -> Dict[str, str]:
34
- """
35
- 获取随机代理
36
-
37
- Returns:
38
- Dict[str, str]: 代理配置
39
- """
40
- return random.choice(self.proxies)
41
-
42
- def get_best_proxy(self) -> Dict[str, str]:
43
- """
44
- 根据成功率获取最佳代理
45
-
46
- Returns:
47
- Dict[str, str]: 代理配置
48
- """
49
- if not self.proxy_status:
50
- return self.get_random_proxy()
51
-
52
- # 计算每个代理的成功率
53
- proxy_scores = []
54
- for proxy in self.proxies:
55
- proxy_id = id(proxy)
56
- status = self.proxy_status.get(proxy_id, {"success_count": 0, "fail_count": 0})
57
- total = status["success_count"] + status["fail_count"]
58
-
59
- if total == 0:
60
- score = 0.5 # 默认成功率
61
- else:
62
- score = status["success_count"] / total
63
-
64
- proxy_scores.append((proxy, score))
65
-
66
- # 按成功率排序,返回成功率最高的代理
67
- proxy_scores.sort(key=lambda x: x[1], reverse=True)
68
- return proxy_scores[0][0]
69
-
70
- def report_proxy_result(self, proxy: Dict[str, str], success: bool) -> None:
71
- """
72
- 报告代理使用结果
73
-
74
- Args:
75
- proxy (Dict[str, str]): 代理配置
76
- success (bool): 是否成功
77
- """
78
- proxy_id = id(proxy)
79
- if proxy_id not in self.proxy_status:
80
- self.proxy_status[proxy_id] = {"last_used": 0, "success_count": 0, "fail_count": 0}
81
-
82
- status = self.proxy_status[proxy_id]
83
- status["last_used"] = time.time()
84
-
85
- if success:
86
- status["success_count"] += 1
87
- else:
88
- status["fail_count"] += 1
89
-
90
- def remove_invalid_proxy(self, proxy: Dict[str, str]) -> None:
91
- """
92
- 移除无效代理
93
-
94
- Args:
95
- proxy (Dict[str, str]): 代理配置
96
- """
97
- if proxy in self.proxies:
98
- self.proxies.remove(proxy)
99
- proxy_id = id(proxy)
100
- if proxy_id in self.proxy_status:
101
- del self.proxy_status[proxy_id]
102
-
103
-
104
- class CaptchaHandler:
105
- """验证码处理器类"""
106
-
107
- def __init__(self, captcha_service: Optional[Callable] = None):
108
- """
109
- 初始化验证码处理器
110
-
111
- Args:
112
- captcha_service (Optional[Callable]): 验证码识别服务
113
- """
114
- self.captcha_service = captcha_service
115
-
116
- async def recognize_captcha(self, image_data: bytes,
117
- captcha_type: str = "image") -> Optional[str]:
118
- """
119
- 识别验证码
120
-
121
- Args:
122
- image_data (bytes): 验证码图片数据
123
- captcha_type (str): 验证码类型
124
-
125
- Returns:
126
- Optional[str]: 识别结果
127
- """
128
- if self.captcha_service:
129
- try:
130
- return await self.captcha_service(image_data, captcha_type)
131
- except Exception:
132
- return None
133
- else:
134
- # 如果没有配置验证码服务,返回None
135
- return None
136
-
137
- async def handle_manual_captcha(self, prompt: str = "请输入验证码: ") -> str:
138
- """
139
- 处理手动验证码输入
140
-
141
- Args:
142
- prompt (str): 提示信息
143
-
144
- Returns:
145
- str: 用户输入的验证码
146
- """
147
- # 在实际应用中,这里可能需要与用户界面交互
148
- # 为了演示目的,我们模拟用户输入
149
- print(prompt)
150
- return input() if not asyncio.get_event_loop().is_running() else ""
151
-
152
-
153
- class AntiCrawler:
154
- """反爬虫应对工具类"""
155
-
156
- def __init__(self, proxies: Optional[List[Dict[str, str]]] = None,
157
- captcha_service: Optional[Callable] = None):
158
- """
159
- 初始化反爬虫应对工具
160
-
161
- Args:
162
- proxies (Optional[List[Dict[str, str]]]): 代理列表
163
- captcha_service (Optional[Callable]): 验证码识别服务
164
- """
165
- self.proxy_manager = ProxyPoolManager(proxies)
166
- self.captcha_handler = CaptchaHandler(captcha_service)
167
-
168
- def get_random_user_agent(self) -> str:
169
- """
170
- 获取随机User-Agent
171
-
172
- Returns:
173
- str: 随机User-Agent
174
- """
175
- user_agents = [
176
- "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
177
- "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
178
- "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0",
179
- "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15",
180
- "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Edge/91.0.864.59",
181
- "Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Mobile/15E148 Safari/604.1",
182
- "Mozilla/5.0 (Linux; Android 11; Pixel 5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Mobile Safari/537.36"
183
- ]
184
- return random.choice(user_agents)
185
-
186
- def rotate_proxy(self) -> Dict[str, str]:
187
- """
188
- 轮换代理
189
-
190
- Returns:
191
- Dict[str, str]: 代理配置
192
- """
193
- return self.proxy_manager.get_best_proxy()
194
-
195
- def handle_captcha(self, response_text: str) -> bool:
196
- """
197
- 检测是否遇到验证码
198
-
199
- Args:
200
- response_text (str): 响应文本
201
-
202
- Returns:
203
- bool: 是否遇到验证码
204
- """
205
- captcha_keywords = ["captcha", "verify", "验证", "验证码", "human verification"]
206
- return any(keyword in response_text.lower() for keyword in captcha_keywords)
207
-
208
- def detect_rate_limiting(self, status_code: int, response_headers: Dict[str, Any]) -> bool:
209
- """
210
- 检测是否遇到频率限制
211
-
212
- Args:
213
- status_code (int): HTTP状态码
214
- response_headers (Dict[str, Any]): 响应头
215
-
216
- Returns:
217
- bool: 是否遇到频率限制
218
- """
219
- # 检查状态码
220
- if status_code in [429, 503]:
221
- return True
222
-
223
- # 检查响应头
224
- rate_limit_headers = ["x-ratelimit-remaining", "retry-after", "x-ratelimit-reset"]
225
- return any(header.lower() in [k.lower() for k in response_headers.keys()]
226
- for header in rate_limit_headers)
227
-
228
- def random_delay(self, min_delay: float = 1.0, max_delay: float = 3.0) -> None:
229
- """
230
- 随机延迟,避免请求过于频繁
231
-
232
- Args:
233
- min_delay (float): 最小延迟时间(秒)
234
- max_delay (float): 最大延迟时间(秒)
235
- """
236
- delay = random.uniform(min_delay, max_delay)
237
- time.sleep(delay)
238
-
239
- async def async_random_delay(self, min_delay: float = 1.0, max_delay: float = 3.0) -> None:
240
- """
241
- 异步随机延迟,避免请求过于频繁
242
-
243
- Args:
244
- min_delay (float): 最小延迟时间(秒)
245
- max_delay (float): 最大延迟时间(秒)
246
- """
247
- delay = random.uniform(min_delay, max_delay)
248
- await asyncio.sleep(delay)
249
-
250
-
251
- # 便捷函数
252
- def get_random_user_agent() -> str:
253
- """获取随机User-Agent"""
254
- return AntiCrawler().get_random_user_agent()
255
-
256
-
257
- def rotate_proxy(proxies: Optional[List[Dict[str, str]]] = None) -> Dict[str, str]:
258
- """轮换代理"""
259
- return AntiCrawler(proxies).rotate_proxy()
260
-
261
-
262
- def handle_captcha(response_text: str) -> bool:
263
- """检测是否遇到验证码"""
264
- return AntiCrawler().handle_captcha(response_text)
265
-
266
-
267
- def detect_rate_limiting(status_code: int, response_headers: Dict[str, Any]) -> bool:
268
- """检测是否遇到频率限制"""
269
- return AntiCrawler().detect_rate_limiting(status_code, response_headers)
@@ -1,26 +0,0 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- 类加载器工具模块
4
- ==============
5
- 提供动态类加载功能,避免循环依赖问题。
6
- """
7
- import importlib
8
- from typing import Any
9
-
10
-
11
- def load_class(path: str) -> Any:
12
- """
13
- 动态加载类
14
-
15
- Args:
16
- path: 类的完整路径,如 'package.module.ClassName'
17
-
18
- Returns:
19
- 加载的类对象
20
- """
21
- try:
22
- module_path, class_name = path.rsplit('.', 1)
23
- module = importlib.import_module(module_path)
24
- return getattr(module, class_name)
25
- except (ValueError, ImportError, AttributeError) as e:
26
- raise ImportError(f"无法加载类 '{path}': {e}")