crawlo 1.2.8__py3-none-any.whl → 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +63 -61
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +314 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -251
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +365 -365
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +256 -251
- crawlo/crawler.py +1097 -1099
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -107
- crawlo/downloader/__init__.py +273 -266
- crawlo/downloader/aiohttp_downloader.py +226 -228
- crawlo/downloader/cffi_downloader.py +245 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -39
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +45 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +234 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +136 -136
- crawlo/middleware/offsite.py +114 -114
- crawlo/middleware/proxy.py +386 -368
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -0
- crawlo/mode_manager.py +212 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +223 -223
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +317 -317
- crawlo/pipelines/pipeline_manager.py +74 -62
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +284 -315
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +379 -378
- crawlo/queue/redis_priority_queue.py +306 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +216 -220
- crawlo/settings/setting_manager.py +175 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +261 -288
- crawlo/templates/project/settings_distributed.py.tmpl +174 -157
- crawlo/templates/project/settings_gentle.py.tmpl +95 -100
- crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
- crawlo/templates/project/settings_minimal.py.tmpl +30 -0
- crawlo/templates/project/settings_simple.py.tmpl +96 -98
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/run.py.tmpl +47 -47
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/tools/__init__.py +200 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/{cleaners → tools}/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +290 -36
- crawlo/tools/distributed_coordinator.py +388 -387
- crawlo/{cleaners → tools}/encoding_converter.py +127 -126
- crawlo/tools/request_tools.py +83 -0
- crawlo/tools/retry_mechanism.py +224 -221
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/{cleaners → tools}/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +123 -123
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +146 -128
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +351 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +218 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/METADATA +1011 -764
- crawlo-1.3.0.dist-info/RECORD +219 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +107 -237
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +143 -103
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_pipelines.py +67 -0
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +151 -0
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +153 -0
- tests/test_config_validator.py +182 -193
- tests/test_crawlo_proxy_integration.py +109 -173
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_distributed.py +65 -0
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +169 -357
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +185 -0
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_random_user_agent.py +73 -0
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +112 -0
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -153
- tests/test_user_agents.py +97 -0
- tests/tools_example.py +260 -257
- tests/verify_distributed.py +117 -0
- crawlo/cleaners/__init__.py +0 -61
- crawlo/utils/date_tools.py +0 -290
- crawlo-1.2.8.dist-info/RECORD +0 -209
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/WHEEL +0 -0
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.8.dist-info → crawlo-1.3.0.dist-info}/top_level.txt +0 -0
tests/test_advanced_tools.py
CHANGED
|
@@ -1,149 +1,149 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
高级工具测试
|
|
5
|
-
"""
|
|
6
|
-
import unittest
|
|
7
|
-
from crawlo.tools import (
|
|
8
|
-
# 数据处理工具
|
|
9
|
-
clean_text,
|
|
10
|
-
format_currency,
|
|
11
|
-
validate_email,
|
|
12
|
-
validate_url,
|
|
13
|
-
check_data_integrity,
|
|
14
|
-
|
|
15
|
-
# 重试机制
|
|
16
|
-
RetryMechanism,
|
|
17
|
-
should_retry,
|
|
18
|
-
exponential_backoff,
|
|
19
|
-
|
|
20
|
-
# 反爬虫应对工具
|
|
21
|
-
AntiCrawler,
|
|
22
|
-
rotate_proxy,
|
|
23
|
-
handle_captcha,
|
|
24
|
-
detect_rate_limiting,
|
|
25
|
-
|
|
26
|
-
# 分布式协调工具
|
|
27
|
-
generate_pagination_tasks,
|
|
28
|
-
distribute_tasks,
|
|
29
|
-
DistributedCoordinator,
|
|
30
|
-
TaskDistributor,
|
|
31
|
-
DeduplicationTool
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
class TestAdvancedTools(unittest.TestCase):
|
|
36
|
-
"""高级工具测试类"""
|
|
37
|
-
|
|
38
|
-
def test_data_processing_tools(self):
|
|
39
|
-
"""测试数据处理工具"""
|
|
40
|
-
# 测试数据清洗
|
|
41
|
-
dirty_text = "<p>这是一个 <b>测试</b>&文本</p>"
|
|
42
|
-
clean_result = clean_text(dirty_text)
|
|
43
|
-
self.assertEqual(clean_result, "这是一个 测试&文本")
|
|
44
|
-
|
|
45
|
-
# 测试数据格式化
|
|
46
|
-
price = 1234.567
|
|
47
|
-
formatted_price = format_currency(price, "¥", 2)
|
|
48
|
-
self.assertEqual(formatted_price, "¥1,234.57")
|
|
49
|
-
|
|
50
|
-
# 测试字段验证
|
|
51
|
-
self.assertTrue(validate_email("test@example.com"))
|
|
52
|
-
self.assertFalse(validate_email("invalid-email"))
|
|
53
|
-
|
|
54
|
-
self.assertTrue(validate_url("https://example.com"))
|
|
55
|
-
self.assertFalse(validate_url("invalid-url"))
|
|
56
|
-
|
|
57
|
-
# 测试数据完整性检查
|
|
58
|
-
data = {
|
|
59
|
-
"name": "张三",
|
|
60
|
-
"email": "test@example.com",
|
|
61
|
-
"phone": "13812345678"
|
|
62
|
-
}
|
|
63
|
-
required_fields = ["name", "email", "phone"]
|
|
64
|
-
integrity_result = check_data_integrity(data, required_fields)
|
|
65
|
-
self.assertTrue(integrity_result["is_valid"])
|
|
66
|
-
|
|
67
|
-
def test_retry_mechanism(self):
|
|
68
|
-
"""测试重试机制"""
|
|
69
|
-
# 测试指数退避
|
|
70
|
-
delay = exponential_backoff(0)
|
|
71
|
-
self.assertGreater(delay, 0)
|
|
72
|
-
|
|
73
|
-
# 测试是否应该重试
|
|
74
|
-
self.assertTrue(should_retry(status_code=500))
|
|
75
|
-
self.assertTrue(should_retry(exception=ConnectionError()))
|
|
76
|
-
self.assertFalse(should_retry(status_code=200))
|
|
77
|
-
|
|
78
|
-
def test_anti_crawler_tools(self):
|
|
79
|
-
"""测试反爬虫应对工具"""
|
|
80
|
-
# 测试反爬虫工具
|
|
81
|
-
anti_crawler = AntiCrawler()
|
|
82
|
-
|
|
83
|
-
# 测试随机User-Agent
|
|
84
|
-
user_agent = anti_crawler.get_random_user_agent()
|
|
85
|
-
self.assertIsInstance(user_agent, str)
|
|
86
|
-
self.assertGreater(len(user_agent), 0)
|
|
87
|
-
|
|
88
|
-
# 测试代理轮换
|
|
89
|
-
proxy = anti_crawler.rotate_proxy()
|
|
90
|
-
self.assertIsInstance(proxy, dict)
|
|
91
|
-
|
|
92
|
-
# 测试验证码检测
|
|
93
|
-
self.assertTrue(anti_crawler.handle_captcha("请输入验证码进行验证"))
|
|
94
|
-
self.assertFalse(anti_crawler.handle_captcha("正常页面内容"))
|
|
95
|
-
|
|
96
|
-
# 测试频率限制检测
|
|
97
|
-
self.assertTrue(anti_crawler.detect_rate_limiting(429, {}))
|
|
98
|
-
self.assertFalse(anti_crawler.detect_rate_limiting(200, {}))
|
|
99
|
-
|
|
100
|
-
def test_distributed_coordinator_tools(self):
|
|
101
|
-
"""测试分布式协调工具"""
|
|
102
|
-
# 测试任务分发器
|
|
103
|
-
distributor = TaskDistributor()
|
|
104
|
-
|
|
105
|
-
# 测试分页任务生成
|
|
106
|
-
base_url = "https://example.com/products"
|
|
107
|
-
pagination_tasks = distributor.generate_pagination_tasks(base_url, 1, 5)
|
|
108
|
-
self.assertEqual(len(pagination_tasks), 5)
|
|
109
|
-
|
|
110
|
-
# 测试任务分发
|
|
111
|
-
tasks = list(range(1, 21)) # 20个任务
|
|
112
|
-
distributed = distributor.distribute_tasks(tasks, 4) # 分发给4个工作节点
|
|
113
|
-
self.assertEqual(len(distributed), 4)
|
|
114
|
-
self.assertEqual(sum(len(worker_tasks) for worker_tasks in distributed), 20)
|
|
115
|
-
|
|
116
|
-
# 测试去重工具
|
|
117
|
-
dedup_tool = DeduplicationTool()
|
|
118
|
-
|
|
119
|
-
# 测试数据指纹生成
|
|
120
|
-
fingerprint1 = dedup_tool.generate_fingerprint({"name": "test", "value": 123})
|
|
121
|
-
fingerprint2 = dedup_tool.generate_fingerprint({"name": "test", "value": 123})
|
|
122
|
-
self.assertEqual(fingerprint1, fingerprint2)
|
|
123
|
-
|
|
124
|
-
# 测试去重功能
|
|
125
|
-
self.assertFalse(dedup_tool.is_duplicate({"name": "test", "value": 123}))
|
|
126
|
-
self.assertTrue(dedup_tool.add_to_dedup({"name": "test", "value": 123}))
|
|
127
|
-
self.assertTrue(dedup_tool.is_duplicate({"name": "test", "value": 123}))
|
|
128
|
-
self.assertFalse(dedup_tool.add_to_dedup({"name": "test", "value": 123}))
|
|
129
|
-
|
|
130
|
-
# 测试分布式协调器
|
|
131
|
-
coordinator = DistributedCoordinator()
|
|
132
|
-
|
|
133
|
-
# 测试任务ID生成
|
|
134
|
-
task_id = coordinator.generate_task_id("https://example.com", "test_spider")
|
|
135
|
-
self.assertIsInstance(task_id, str)
|
|
136
|
-
self.assertEqual(len(task_id), 32) # MD5 hash长度
|
|
137
|
-
|
|
138
|
-
# 测试分页任务生成
|
|
139
|
-
pagination_tasks = coordinator.generate_pagination_tasks("https://example.com/products", 1, 5)
|
|
140
|
-
self.assertEqual(len(pagination_tasks), 5)
|
|
141
|
-
|
|
142
|
-
# 测试任务分发
|
|
143
|
-
tasks = list(range(1, 21)) # 20个任务
|
|
144
|
-
distributed = coordinator.distribute_tasks(tasks, 4) # 分发给4个工作节点
|
|
145
|
-
self.assertEqual(len(distributed), 4)
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
if __name__ == '__main__':
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
高级工具测试
|
|
5
|
+
"""
|
|
6
|
+
import unittest
|
|
7
|
+
from crawlo.tools import (
|
|
8
|
+
# 数据处理工具
|
|
9
|
+
clean_text,
|
|
10
|
+
format_currency,
|
|
11
|
+
validate_email,
|
|
12
|
+
validate_url,
|
|
13
|
+
check_data_integrity,
|
|
14
|
+
|
|
15
|
+
# 重试机制
|
|
16
|
+
RetryMechanism,
|
|
17
|
+
should_retry,
|
|
18
|
+
exponential_backoff,
|
|
19
|
+
|
|
20
|
+
# 反爬虫应对工具
|
|
21
|
+
AntiCrawler,
|
|
22
|
+
rotate_proxy,
|
|
23
|
+
handle_captcha,
|
|
24
|
+
detect_rate_limiting,
|
|
25
|
+
|
|
26
|
+
# 分布式协调工具
|
|
27
|
+
generate_pagination_tasks,
|
|
28
|
+
distribute_tasks,
|
|
29
|
+
DistributedCoordinator,
|
|
30
|
+
TaskDistributor,
|
|
31
|
+
DeduplicationTool
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class TestAdvancedTools(unittest.TestCase):
|
|
36
|
+
"""高级工具测试类"""
|
|
37
|
+
|
|
38
|
+
def test_data_processing_tools(self):
|
|
39
|
+
"""测试数据处理工具"""
|
|
40
|
+
# 测试数据清洗
|
|
41
|
+
dirty_text = "<p>这是一个 <b>测试</b>&文本</p>"
|
|
42
|
+
clean_result = clean_text(dirty_text)
|
|
43
|
+
self.assertEqual(clean_result, "这是一个 测试&文本")
|
|
44
|
+
|
|
45
|
+
# 测试数据格式化
|
|
46
|
+
price = 1234.567
|
|
47
|
+
formatted_price = format_currency(price, "¥", 2)
|
|
48
|
+
self.assertEqual(formatted_price, "¥1,234.57")
|
|
49
|
+
|
|
50
|
+
# 测试字段验证
|
|
51
|
+
self.assertTrue(validate_email("test@example.com"))
|
|
52
|
+
self.assertFalse(validate_email("invalid-email"))
|
|
53
|
+
|
|
54
|
+
self.assertTrue(validate_url("https://example.com"))
|
|
55
|
+
self.assertFalse(validate_url("invalid-url"))
|
|
56
|
+
|
|
57
|
+
# 测试数据完整性检查
|
|
58
|
+
data = {
|
|
59
|
+
"name": "张三",
|
|
60
|
+
"email": "test@example.com",
|
|
61
|
+
"phone": "13812345678"
|
|
62
|
+
}
|
|
63
|
+
required_fields = ["name", "email", "phone"]
|
|
64
|
+
integrity_result = check_data_integrity(data, required_fields)
|
|
65
|
+
self.assertTrue(integrity_result["is_valid"])
|
|
66
|
+
|
|
67
|
+
def test_retry_mechanism(self):
|
|
68
|
+
"""测试重试机制"""
|
|
69
|
+
# 测试指数退避
|
|
70
|
+
delay = exponential_backoff(0)
|
|
71
|
+
self.assertGreater(delay, 0)
|
|
72
|
+
|
|
73
|
+
# 测试是否应该重试
|
|
74
|
+
self.assertTrue(should_retry(status_code=500))
|
|
75
|
+
self.assertTrue(should_retry(exception=ConnectionError()))
|
|
76
|
+
self.assertFalse(should_retry(status_code=200))
|
|
77
|
+
|
|
78
|
+
def test_anti_crawler_tools(self):
|
|
79
|
+
"""测试反爬虫应对工具"""
|
|
80
|
+
# 测试反爬虫工具
|
|
81
|
+
anti_crawler = AntiCrawler()
|
|
82
|
+
|
|
83
|
+
# 测试随机User-Agent
|
|
84
|
+
user_agent = anti_crawler.get_random_user_agent()
|
|
85
|
+
self.assertIsInstance(user_agent, str)
|
|
86
|
+
self.assertGreater(len(user_agent), 0)
|
|
87
|
+
|
|
88
|
+
# 测试代理轮换
|
|
89
|
+
proxy = anti_crawler.rotate_proxy()
|
|
90
|
+
self.assertIsInstance(proxy, dict)
|
|
91
|
+
|
|
92
|
+
# 测试验证码检测
|
|
93
|
+
self.assertTrue(anti_crawler.handle_captcha("请输入验证码进行验证"))
|
|
94
|
+
self.assertFalse(anti_crawler.handle_captcha("正常页面内容"))
|
|
95
|
+
|
|
96
|
+
# 测试频率限制检测
|
|
97
|
+
self.assertTrue(anti_crawler.detect_rate_limiting(429, {}))
|
|
98
|
+
self.assertFalse(anti_crawler.detect_rate_limiting(200, {}))
|
|
99
|
+
|
|
100
|
+
def test_distributed_coordinator_tools(self):
|
|
101
|
+
"""测试分布式协调工具"""
|
|
102
|
+
# 测试任务分发器
|
|
103
|
+
distributor = TaskDistributor()
|
|
104
|
+
|
|
105
|
+
# 测试分页任务生成
|
|
106
|
+
base_url = "https://example.com/products"
|
|
107
|
+
pagination_tasks = distributor.generate_pagination_tasks(base_url, 1, 5)
|
|
108
|
+
self.assertEqual(len(pagination_tasks), 5)
|
|
109
|
+
|
|
110
|
+
# 测试任务分发
|
|
111
|
+
tasks = list(range(1, 21)) # 20个任务
|
|
112
|
+
distributed = distributor.distribute_tasks(tasks, 4) # 分发给4个工作节点
|
|
113
|
+
self.assertEqual(len(distributed), 4)
|
|
114
|
+
self.assertEqual(sum(len(worker_tasks) for worker_tasks in distributed), 20)
|
|
115
|
+
|
|
116
|
+
# 测试去重工具
|
|
117
|
+
dedup_tool = DeduplicationTool()
|
|
118
|
+
|
|
119
|
+
# 测试数据指纹生成
|
|
120
|
+
fingerprint1 = dedup_tool.generate_fingerprint({"name": "test", "value": 123})
|
|
121
|
+
fingerprint2 = dedup_tool.generate_fingerprint({"name": "test", "value": 123})
|
|
122
|
+
self.assertEqual(fingerprint1, fingerprint2)
|
|
123
|
+
|
|
124
|
+
# 测试去重功能
|
|
125
|
+
self.assertFalse(dedup_tool.is_duplicate({"name": "test", "value": 123}))
|
|
126
|
+
self.assertTrue(dedup_tool.add_to_dedup({"name": "test", "value": 123}))
|
|
127
|
+
self.assertTrue(dedup_tool.is_duplicate({"name": "test", "value": 123}))
|
|
128
|
+
self.assertFalse(dedup_tool.add_to_dedup({"name": "test", "value": 123}))
|
|
129
|
+
|
|
130
|
+
# 测试分布式协调器
|
|
131
|
+
coordinator = DistributedCoordinator()
|
|
132
|
+
|
|
133
|
+
# 测试任务ID生成
|
|
134
|
+
task_id = coordinator.generate_task_id("https://example.com", "test_spider")
|
|
135
|
+
self.assertIsInstance(task_id, str)
|
|
136
|
+
self.assertEqual(len(task_id), 32) # MD5 hash长度
|
|
137
|
+
|
|
138
|
+
# 测试分页任务生成
|
|
139
|
+
pagination_tasks = coordinator.generate_pagination_tasks("https://example.com/products", 1, 5)
|
|
140
|
+
self.assertEqual(len(pagination_tasks), 5)
|
|
141
|
+
|
|
142
|
+
# 测试任务分发
|
|
143
|
+
tasks = list(range(1, 21)) # 20个任务
|
|
144
|
+
distributed = coordinator.distribute_tasks(tasks, 4) # 分发给4个工作节点
|
|
145
|
+
self.assertEqual(len(distributed), 4)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
if __name__ == '__main__':
|
|
149
149
|
unittest.main()
|
|
@@ -1,146 +1,146 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
所有Redis Key配置测试脚本
|
|
5
|
-
用于验证所有配置文件是否符合新的Redis key命名规范
|
|
6
|
-
"""
|
|
7
|
-
import sys
|
|
8
|
-
import os
|
|
9
|
-
import re
|
|
10
|
-
|
|
11
|
-
# 添加项目根目录到路径
|
|
12
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def test_all_redis_key_configs():
|
|
16
|
-
"""测试所有Redis key配置"""
|
|
17
|
-
print("
|
|
18
|
-
|
|
19
|
-
try:
|
|
20
|
-
# 检查示例项目配置文件
|
|
21
|
-
example_projects = [
|
|
22
|
-
"examples/books_distributed/books_distributed/settings.py",
|
|
23
|
-
"examples/api_data_collection/api_data_collection/settings.py",
|
|
24
|
-
"examples/telecom_licenses_distributed/telecom_licenses_distributed/settings.py"
|
|
25
|
-
]
|
|
26
|
-
|
|
27
|
-
for project_config in example_projects:
|
|
28
|
-
print(f" 检查 {project_config}...")
|
|
29
|
-
if not os.path.exists(project_config):
|
|
30
|
-
print(f"
|
|
31
|
-
return False
|
|
32
|
-
|
|
33
|
-
with open(project_config, 'r', encoding='utf-8') as f:
|
|
34
|
-
content = f.read()
|
|
35
|
-
|
|
36
|
-
# 检查是否移除了旧的REDIS_KEY配置
|
|
37
|
-
if re.search(r'REDIS_KEY\s*=', content) and 'crawlo:{PROJECT_NAME}:filter:fingerprint' not in content:
|
|
38
|
-
print(f"
|
|
39
|
-
return False
|
|
40
|
-
|
|
41
|
-
# 检查是否添加了新的注释
|
|
42
|
-
if 'crawlo:{PROJECT_NAME}:filter:fingerprint' not in content:
|
|
43
|
-
print(f"
|
|
44
|
-
return False
|
|
45
|
-
|
|
46
|
-
print(f"
|
|
47
|
-
|
|
48
|
-
# 检查模板文件
|
|
49
|
-
template_file = "crawlo/templates/project/settings.py.tmpl"
|
|
50
|
-
print(f" 检查 {template_file}...")
|
|
51
|
-
if not os.path.exists(template_file):
|
|
52
|
-
print(f"
|
|
53
|
-
return False
|
|
54
|
-
|
|
55
|
-
with open(template_file, 'r', encoding='utf-8') as f:
|
|
56
|
-
template_content = f.read()
|
|
57
|
-
|
|
58
|
-
# 检查是否移除了旧的REDIS_KEY配置
|
|
59
|
-
if "REDIS_KEY = f'{{project_name}}:fingerprint'" in template_content:
|
|
60
|
-
print("
|
|
61
|
-
return False
|
|
62
|
-
|
|
63
|
-
# 检查是否添加了新的注释
|
|
64
|
-
if '# crawlo:{project_name}:filter:fingerprint (请求去重)' not in template_content:
|
|
65
|
-
print("
|
|
66
|
-
return False
|
|
67
|
-
|
|
68
|
-
if '# crawlo:{project_name}:item:fingerprint (数据项去重)' not in template_content:
|
|
69
|
-
print("
|
|
70
|
-
return False
|
|
71
|
-
|
|
72
|
-
print(f"
|
|
73
|
-
|
|
74
|
-
# 检查mode_manager.py
|
|
75
|
-
mode_manager_file = "crawlo/mode_manager.py"
|
|
76
|
-
print(f" 检查 {mode_manager_file}...")
|
|
77
|
-
if not os.path.exists(mode_manager_file):
|
|
78
|
-
print(f"
|
|
79
|
-
return False
|
|
80
|
-
|
|
81
|
-
with open(mode_manager_file, 'r', encoding='utf-8') as f:
|
|
82
|
-
mode_manager_content = f.read()
|
|
83
|
-
|
|
84
|
-
# 检查是否移除了旧的REDIS_KEY配置
|
|
85
|
-
if "'REDIS_KEY': f'{project_name}:fingerprint'" in mode_manager_content:
|
|
86
|
-
print("
|
|
87
|
-
return False
|
|
88
|
-
|
|
89
|
-
# 检查是否添加了新的注释
|
|
90
|
-
if 'crawlo:{project_name}:filter:fingerprint (请求去重)' not in mode_manager_content:
|
|
91
|
-
print("
|
|
92
|
-
return False
|
|
93
|
-
|
|
94
|
-
print(f"
|
|
95
|
-
|
|
96
|
-
# 检查默认设置文件
|
|
97
|
-
default_settings_file = "crawlo/settings/default_settings.py"
|
|
98
|
-
print(f" 检查 {default_settings_file}...")
|
|
99
|
-
if not os.path.exists(default_settings_file):
|
|
100
|
-
print(f"
|
|
101
|
-
return False
|
|
102
|
-
|
|
103
|
-
with open(default_settings_file, 'r', encoding='utf-8') as f:
|
|
104
|
-
default_settings_content = f.read()
|
|
105
|
-
|
|
106
|
-
# 检查是否移除了旧的REDIS_KEY配置
|
|
107
|
-
if re.search(r'REDIS_KEY\s*=\s*.*fingerprint', default_settings_content):
|
|
108
|
-
print("
|
|
109
|
-
return False
|
|
110
|
-
|
|
111
|
-
print(f"
|
|
112
|
-
|
|
113
|
-
print("
|
|
114
|
-
return True
|
|
115
|
-
|
|
116
|
-
except Exception as e:
|
|
117
|
-
print(f"
|
|
118
|
-
return False
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
def main():
|
|
122
|
-
"""主测试函数"""
|
|
123
|
-
print("
|
|
124
|
-
print("=" * 50)
|
|
125
|
-
|
|
126
|
-
try:
|
|
127
|
-
success = test_all_redis_key_configs()
|
|
128
|
-
|
|
129
|
-
print("=" * 50)
|
|
130
|
-
if success:
|
|
131
|
-
print("
|
|
132
|
-
else:
|
|
133
|
-
print("
|
|
134
|
-
return 1
|
|
135
|
-
|
|
136
|
-
except Exception as e:
|
|
137
|
-
print("=" * 50)
|
|
138
|
-
print(f"
|
|
139
|
-
return 1
|
|
140
|
-
|
|
141
|
-
return 0
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
if __name__ == "__main__":
|
|
145
|
-
exit_code = main()
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
所有Redis Key配置测试脚本
|
|
5
|
+
用于验证所有配置文件是否符合新的Redis key命名规范
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import re
|
|
10
|
+
|
|
11
|
+
# 添加项目根目录到路径
|
|
12
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_all_redis_key_configs():
|
|
16
|
+
"""测试所有Redis key配置"""
|
|
17
|
+
print("测试所有Redis key配置...")
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
# 检查示例项目配置文件
|
|
21
|
+
example_projects = [
|
|
22
|
+
"examples/books_distributed/books_distributed/settings.py",
|
|
23
|
+
"examples/api_data_collection/api_data_collection/settings.py",
|
|
24
|
+
"examples/telecom_licenses_distributed/telecom_licenses_distributed/settings.py"
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
for project_config in example_projects:
|
|
28
|
+
print(f" 检查 {project_config}...")
|
|
29
|
+
if not os.path.exists(project_config):
|
|
30
|
+
print(f"配置文件不存在: {project_config}")
|
|
31
|
+
return False
|
|
32
|
+
|
|
33
|
+
with open(project_config, 'r', encoding='utf-8') as f:
|
|
34
|
+
content = f.read()
|
|
35
|
+
|
|
36
|
+
# 检查是否移除了旧的REDIS_KEY配置
|
|
37
|
+
if re.search(r'REDIS_KEY\s*=', content) and 'crawlo:{PROJECT_NAME}:filter:fingerprint' not in content:
|
|
38
|
+
print(f"{project_config}中仍然存在旧的REDIS_KEY配置")
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
# 检查是否添加了新的注释
|
|
42
|
+
if 'crawlo:{PROJECT_NAME}:filter:fingerprint' not in content:
|
|
43
|
+
print(f"{project_config}中缺少新的Redis key命名规范注释")
|
|
44
|
+
return False
|
|
45
|
+
|
|
46
|
+
print(f" {project_config}符合新的Redis key命名规范")
|
|
47
|
+
|
|
48
|
+
# 检查模板文件
|
|
49
|
+
template_file = "crawlo/templates/project/settings.py.tmpl"
|
|
50
|
+
print(f" 检查 {template_file}...")
|
|
51
|
+
if not os.path.exists(template_file):
|
|
52
|
+
print(f"模板文件不存在: {template_file}")
|
|
53
|
+
return False
|
|
54
|
+
|
|
55
|
+
with open(template_file, 'r', encoding='utf-8') as f:
|
|
56
|
+
template_content = f.read()
|
|
57
|
+
|
|
58
|
+
# 检查是否移除了旧的REDIS_KEY配置
|
|
59
|
+
if "REDIS_KEY = f'{{project_name}}:fingerprint'" in template_content:
|
|
60
|
+
print("模板文件中仍然存在旧的REDIS_KEY配置")
|
|
61
|
+
return False
|
|
62
|
+
|
|
63
|
+
# 检查是否添加了新的注释
|
|
64
|
+
if '# crawlo:{project_name}:filter:fingerprint (请求去重)' not in template_content:
|
|
65
|
+
print("模板文件中缺少请求去重的Redis key命名规范注释")
|
|
66
|
+
return False
|
|
67
|
+
|
|
68
|
+
if '# crawlo:{project_name}:item:fingerprint (数据项去重)' not in template_content:
|
|
69
|
+
print("模板文件中缺少数据项去重的Redis key命名规范注释")
|
|
70
|
+
return False
|
|
71
|
+
|
|
72
|
+
print(f" {template_file}符合新的Redis key命名规范")
|
|
73
|
+
|
|
74
|
+
# 检查mode_manager.py
|
|
75
|
+
mode_manager_file = "crawlo/mode_manager.py"
|
|
76
|
+
print(f" 检查 {mode_manager_file}...")
|
|
77
|
+
if not os.path.exists(mode_manager_file):
|
|
78
|
+
print(f"文件不存在: {mode_manager_file}")
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
with open(mode_manager_file, 'r', encoding='utf-8') as f:
|
|
82
|
+
mode_manager_content = f.read()
|
|
83
|
+
|
|
84
|
+
# 检查是否移除了旧的REDIS_KEY配置
|
|
85
|
+
if "'REDIS_KEY': f'{project_name}:fingerprint'" in mode_manager_content:
|
|
86
|
+
print("mode_manager.py中仍然存在旧的REDIS_KEY配置")
|
|
87
|
+
return False
|
|
88
|
+
|
|
89
|
+
# 检查是否添加了新的注释
|
|
90
|
+
if 'crawlo:{project_name}:filter:fingerprint (请求去重)' not in mode_manager_content:
|
|
91
|
+
print("mode_manager.py中缺少新的Redis key命名规范注释")
|
|
92
|
+
return False
|
|
93
|
+
|
|
94
|
+
print(f" {mode_manager_file}符合新的Redis key命名规范")
|
|
95
|
+
|
|
96
|
+
# 检查默认设置文件
|
|
97
|
+
default_settings_file = "crawlo/settings/default_settings.py"
|
|
98
|
+
print(f" 检查 {default_settings_file}...")
|
|
99
|
+
if not os.path.exists(default_settings_file):
|
|
100
|
+
print(f"文件不存在: {default_settings_file}")
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
with open(default_settings_file, 'r', encoding='utf-8') as f:
|
|
104
|
+
default_settings_content = f.read()
|
|
105
|
+
|
|
106
|
+
# 检查是否移除了旧的REDIS_KEY配置
|
|
107
|
+
if re.search(r'REDIS_KEY\s*=\s*.*fingerprint', default_settings_content):
|
|
108
|
+
print("默认设置文件中仍然存在旧的REDIS_KEY配置")
|
|
109
|
+
return False
|
|
110
|
+
|
|
111
|
+
print(f" {default_settings_file}符合新的Redis key命名规范")
|
|
112
|
+
|
|
113
|
+
print("所有Redis key配置测试通过!")
|
|
114
|
+
return True
|
|
115
|
+
|
|
116
|
+
except Exception as e:
|
|
117
|
+
print(f"测试过程中发生错误: {e}")
|
|
118
|
+
return False
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def main():
|
|
122
|
+
"""主测试函数"""
|
|
123
|
+
print("开始所有Redis key配置测试...")
|
|
124
|
+
print("=" * 50)
|
|
125
|
+
|
|
126
|
+
try:
|
|
127
|
+
success = test_all_redis_key_configs()
|
|
128
|
+
|
|
129
|
+
print("=" * 50)
|
|
130
|
+
if success:
|
|
131
|
+
print("所有测试通过!所有配置文件符合新的Redis key命名规范")
|
|
132
|
+
else:
|
|
133
|
+
print("测试失败,请检查配置文件")
|
|
134
|
+
return 1
|
|
135
|
+
|
|
136
|
+
except Exception as e:
|
|
137
|
+
print("=" * 50)
|
|
138
|
+
print(f"测试过程中发生异常: {e}")
|
|
139
|
+
return 1
|
|
140
|
+
|
|
141
|
+
return 0
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
if __name__ == "__main__":
|
|
145
|
+
exit_code = main()
|
|
146
146
|
sys.exit(exit_code)
|