crawlo 1.1.4__py3-none-any.whl → 1.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -34
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +61 -0
- crawlo/cleaners/data_formatter.py +226 -0
- crawlo/cleaners/encoding_converter.py +126 -0
- crawlo/cleaners/text_cleaner.py +233 -0
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +285 -285
- crawlo/commands/startproject.py +300 -196
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +309 -279
- crawlo/config_validator.py +253 -0
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +346 -172
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +137 -166
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +266 -242
- crawlo/downloader/aiohttp_downloader.py +220 -212
- crawlo/downloader/cffi_downloader.py +256 -251
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +214 -0
- crawlo/downloader/playwright_downloader.py +403 -0
- crawlo/downloader/selenium_downloader.py +473 -0
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -88
- crawlo/extension/performance_profiler.py +133 -117
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +280 -242
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +272 -248
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +206 -201
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -311
- crawlo/network/response.py +360 -271
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +316 -316
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/pipelines/redis_dedup_pipeline.py +166 -162
- crawlo/project.py +153 -153
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +320 -307
- crawlo/queue/redis_priority_queue.py +277 -209
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +216 -278
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +130 -130
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +110 -110
- crawlo/templates/project/pipelines.py.tmpl +97 -97
- crawlo/templates/project/run.py.tmpl +251 -251
- crawlo/templates/project/settings.py.tmpl +326 -279
- crawlo/templates/project/settings_distributed.py.tmpl +120 -0
- crawlo/templates/project/settings_gentle.py.tmpl +95 -0
- crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
- crawlo/templates/project/settings_simple.py.tmpl +69 -0
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +141 -141
- crawlo/tools/__init__.py +183 -0
- crawlo/tools/anti_crawler.py +269 -0
- crawlo/tools/authenticated_proxy.py +241 -0
- crawlo/tools/data_validator.py +181 -0
- crawlo/tools/date_tools.py +36 -0
- crawlo/tools/distributed_coordinator.py +387 -0
- crawlo/tools/retry_mechanism.py +221 -0
- crawlo/tools/scenario_adapter.py +263 -0
- crawlo/utils/__init__.py +35 -7
- crawlo/utils/batch_processor.py +261 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +360 -0
- crawlo/utils/env_config.py +106 -0
- crawlo/utils/error_handler.py +126 -0
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +285 -0
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +335 -0
- crawlo/utils/redis_key_validator.py +200 -0
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/METADATA +401 -403
- crawlo-1.1.5.dist-info/RECORD +185 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +276 -0
- tests/authenticated_proxy_example.py +237 -0
- tests/cleaners_example.py +161 -0
- tests/config_validation_demo.py +103 -0
- {examples → tests}/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +181 -0
- tests/dynamic_loading_example.py +524 -0
- tests/dynamic_loading_test.py +105 -0
- tests/env_config_example.py +134 -0
- tests/error_handling_example.py +172 -0
- tests/redis_key_validation_demo.py +131 -0
- tests/response_improvements_example.py +145 -0
- tests/test_advanced_tools.py +149 -0
- tests/test_all_redis_key_configs.py +146 -0
- tests/test_authenticated_proxy.py +142 -0
- tests/test_cleaners.py +55 -0
- tests/test_comprehensive.py +147 -0
- tests/test_config_validator.py +194 -0
- tests/test_date_tools.py +124 -0
- tests/test_dynamic_downloaders_proxy.py +125 -0
- tests/test_dynamic_proxy.py +93 -0
- tests/test_dynamic_proxy_config.py +147 -0
- tests/test_dynamic_proxy_real.py +110 -0
- tests/test_edge_cases.py +304 -0
- tests/test_enhanced_error_handler.py +271 -0
- tests/test_env_config.py +122 -0
- tests/test_error_handler_compatibility.py +113 -0
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +104 -0
- tests/test_integration.py +357 -0
- tests/test_item_dedup_redis_key.py +123 -0
- tests/test_parsel.py +30 -0
- tests/test_performance.py +328 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_redis_key.py +177 -0
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +295 -0
- tests/test_redis_key_naming.py +182 -0
- tests/test_redis_key_validator.py +124 -0
- tests/test_redis_queue.py +224 -224
- tests/test_request_serialization.py +70 -70
- tests/test_response_improvements.py +153 -0
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +62 -0
- tests/test_telecom_spider_redis_key.py +206 -0
- tests/test_template_content.py +88 -0
- tests/test_template_redis_key.py +135 -0
- tests/test_tools.py +154 -0
- tests/tools_example.py +258 -0
- crawlo/core/enhanced_engine.py +0 -190
- crawlo-1.1.4.dist-info/RECORD +0 -117
- {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
- {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
tests/tools_example.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Crawlo框架工具包使用示例
|
|
5
|
+
"""
|
|
6
|
+
from crawlo.tools import (
|
|
7
|
+
# 日期工具
|
|
8
|
+
parse_time,
|
|
9
|
+
format_time,
|
|
10
|
+
time_diff,
|
|
11
|
+
|
|
12
|
+
# 数据清洗工具
|
|
13
|
+
clean_text,
|
|
14
|
+
format_currency,
|
|
15
|
+
extract_emails,
|
|
16
|
+
|
|
17
|
+
# 数据验证工具
|
|
18
|
+
validate_email,
|
|
19
|
+
validate_url,
|
|
20
|
+
validate_phone,
|
|
21
|
+
|
|
22
|
+
# 请求处理工具
|
|
23
|
+
build_url,
|
|
24
|
+
add_query_params,
|
|
25
|
+
merge_headers,
|
|
26
|
+
|
|
27
|
+
# 反爬虫应对工具
|
|
28
|
+
get_random_user_agent,
|
|
29
|
+
rotate_proxy,
|
|
30
|
+
|
|
31
|
+
# 带认证代理工具
|
|
32
|
+
AuthenticatedProxy,
|
|
33
|
+
create_proxy_config,
|
|
34
|
+
get_proxy_info,
|
|
35
|
+
|
|
36
|
+
# 分布式协调工具
|
|
37
|
+
generate_task_id,
|
|
38
|
+
get_cluster_info
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def demo_date_tools():
|
|
43
|
+
"""演示日期工具的使用"""
|
|
44
|
+
print("=== 日期工具演示 ===\n")
|
|
45
|
+
|
|
46
|
+
# 解析时间
|
|
47
|
+
time_str = "2025-09-10 14:30:00"
|
|
48
|
+
parsed_time = parse_time(time_str)
|
|
49
|
+
print(f"解析时间: {time_str} -> {parsed_time}")
|
|
50
|
+
|
|
51
|
+
# 格式化时间
|
|
52
|
+
formatted_time = format_time(parsed_time, "%Y年%m月%d日 %H:%M:%S")
|
|
53
|
+
print(f"格式化时间: {parsed_time} -> {formatted_time}")
|
|
54
|
+
|
|
55
|
+
# 时间差计算
|
|
56
|
+
time_str2 = "2025-09-11 16:45:30"
|
|
57
|
+
parsed_time2 = parse_time(time_str2)
|
|
58
|
+
diff = time_diff(parsed_time2, parsed_time)
|
|
59
|
+
print(f"时间差: {time_str2} - {time_str} = {diff} 秒")
|
|
60
|
+
|
|
61
|
+
print()
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def demo_data_cleaning_tools():
|
|
65
|
+
"""演示数据清洗工具的使用"""
|
|
66
|
+
print("=== 数据清洗工具演示 ===\n")
|
|
67
|
+
|
|
68
|
+
# 清洗文本
|
|
69
|
+
dirty_text = "<p>这是一个 <b>测试</b>&文本</p>"
|
|
70
|
+
clean_result = clean_text(dirty_text)
|
|
71
|
+
print(f"清洗文本: {dirty_text} -> {clean_result}")
|
|
72
|
+
|
|
73
|
+
# 格式化货币
|
|
74
|
+
price = 1234.567
|
|
75
|
+
formatted_price = format_currency(price, "¥", 2)
|
|
76
|
+
print(f"格式化货币: {price} -> {formatted_price}")
|
|
77
|
+
|
|
78
|
+
# 提取邮箱
|
|
79
|
+
text_with_email = "联系邮箱: test@example.com, support@crawler.com"
|
|
80
|
+
emails = extract_emails(text_with_email)
|
|
81
|
+
print(f"提取邮箱: {text_with_email} -> {emails}")
|
|
82
|
+
|
|
83
|
+
print()
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def demo_data_validation_tools():
|
|
87
|
+
"""演示数据验证工具的使用"""
|
|
88
|
+
print("=== 数据验证工具演示 ===\n")
|
|
89
|
+
|
|
90
|
+
# 验证邮箱
|
|
91
|
+
email = "test@example.com"
|
|
92
|
+
is_valid_email = validate_email(email)
|
|
93
|
+
print(f"验证邮箱: {email} -> {'有效' if is_valid_email else '无效'}")
|
|
94
|
+
|
|
95
|
+
# 验证无效邮箱
|
|
96
|
+
invalid_email = "invalid-email"
|
|
97
|
+
is_valid_invalid = validate_email(invalid_email)
|
|
98
|
+
print(f"验证邮箱: {invalid_email} -> {'有效' if is_valid_invalid else '无效'}")
|
|
99
|
+
|
|
100
|
+
# 验证URL
|
|
101
|
+
url = "https://example.com/path?param=value"
|
|
102
|
+
is_valid_url = validate_url(url)
|
|
103
|
+
print(f"验证URL: {url} -> {'有效' if is_valid_url else '无效'}")
|
|
104
|
+
|
|
105
|
+
# 验证电话号码
|
|
106
|
+
phone = "13812345678"
|
|
107
|
+
is_valid_phone = validate_phone(phone)
|
|
108
|
+
print(f"验证电话: {phone} -> {'有效' if is_valid_phone else '无效'}")
|
|
109
|
+
|
|
110
|
+
print()
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def demo_request_handling_tools():
|
|
114
|
+
"""演示请求处理工具的使用"""
|
|
115
|
+
print("=== 请求处理工具演示 ===\n")
|
|
116
|
+
|
|
117
|
+
# 构建URL
|
|
118
|
+
base_url = "https://api.example.com"
|
|
119
|
+
path = "/v1/users"
|
|
120
|
+
query_params = {"page": 1, "limit": 10}
|
|
121
|
+
full_url = build_url(base_url, path, query_params)
|
|
122
|
+
print(f"构建URL: {base_url} + {path} + {query_params} -> {full_url}")
|
|
123
|
+
|
|
124
|
+
# 添加查询参数
|
|
125
|
+
existing_url = "https://api.example.com/v1/users?page=1"
|
|
126
|
+
new_params = {"sort": "name", "order": "asc"}
|
|
127
|
+
updated_url = add_query_params(existing_url, new_params)
|
|
128
|
+
print(f"添加参数: {existing_url} + {new_params} -> {updated_url}")
|
|
129
|
+
|
|
130
|
+
# 合并请求头
|
|
131
|
+
base_headers = {"Content-Type": "application/json", "Accept": "application/json"}
|
|
132
|
+
additional_headers = {"Authorization": "Bearer token123", "User-Agent": "Crawlo/1.0"}
|
|
133
|
+
merged_headers = merge_headers(base_headers, additional_headers)
|
|
134
|
+
print(f"合并请求头:")
|
|
135
|
+
print(f" 基础头: {base_headers}")
|
|
136
|
+
print(f" 额外头: {additional_headers}")
|
|
137
|
+
print(f" 合并后: {merged_headers}")
|
|
138
|
+
|
|
139
|
+
print()
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def demo_anti_crawler_tools():
|
|
143
|
+
"""演示反爬虫应对工具的使用"""
|
|
144
|
+
print("=== 反爬虫应对工具演示 ===\n")
|
|
145
|
+
|
|
146
|
+
# 获取随机User-Agent
|
|
147
|
+
user_agent = get_random_user_agent()
|
|
148
|
+
print(f"随机User-Agent: {user_agent[:50]}...")
|
|
149
|
+
|
|
150
|
+
# 轮换代理
|
|
151
|
+
proxy = rotate_proxy()
|
|
152
|
+
print(f"轮换代理: {proxy}")
|
|
153
|
+
|
|
154
|
+
print()
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def demo_authenticated_proxy_tools():
|
|
158
|
+
"""演示带认证代理工具的使用"""
|
|
159
|
+
print("=== 带认证代理工具演示 ===\n")
|
|
160
|
+
|
|
161
|
+
# 创建带认证的代理
|
|
162
|
+
proxy_url = "http://username:password@proxy.example.com:8080"
|
|
163
|
+
proxy = AuthenticatedProxy(proxy_url)
|
|
164
|
+
|
|
165
|
+
print(f"代理URL: {proxy}")
|
|
166
|
+
print(f"清洁URL: {proxy.clean_url}")
|
|
167
|
+
print(f"用户名: {proxy.username}")
|
|
168
|
+
print(f"密码: {proxy.password}")
|
|
169
|
+
print(f"代理字典: {proxy.proxy_dict}")
|
|
170
|
+
print(f"认证凭据: {proxy.get_auth_credentials()}")
|
|
171
|
+
print(f"认证头: {proxy.get_auth_header()}")
|
|
172
|
+
print(f"是否有效: {proxy.is_valid()}")
|
|
173
|
+
|
|
174
|
+
# 创建代理配置
|
|
175
|
+
proxy_config = create_proxy_config(proxy_url)
|
|
176
|
+
print(f"\n代理配置: {proxy_config}")
|
|
177
|
+
|
|
178
|
+
# 获取代理信息
|
|
179
|
+
proxy_info = get_proxy_info(proxy_url)
|
|
180
|
+
print(f"代理信息: {proxy_info}")
|
|
181
|
+
|
|
182
|
+
print()
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def demo_distributed_coordinator_tools():
|
|
186
|
+
"""演示分布式协调工具的使用"""
|
|
187
|
+
print("=== 分布式协调工具演示 ===\n")
|
|
188
|
+
|
|
189
|
+
# 生成任务ID
|
|
190
|
+
url = "https://example.com/page/1"
|
|
191
|
+
spider_name = "example_spider"
|
|
192
|
+
task_id = generate_task_id(url, spider_name)
|
|
193
|
+
print(f"生成任务ID: URL={url}, Spider={spider_name} -> {task_id}")
|
|
194
|
+
|
|
195
|
+
# 获取集群信息
|
|
196
|
+
cluster_info = get_cluster_info()
|
|
197
|
+
print(f"集群信息: {cluster_info}")
|
|
198
|
+
|
|
199
|
+
print()
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
if __name__ == '__main__':
|
|
203
|
+
# 运行演示
|
|
204
|
+
demo_date_tools()
|
|
205
|
+
demo_data_cleaning_tools()
|
|
206
|
+
demo_data_validation_tools()
|
|
207
|
+
demo_request_handling_tools()
|
|
208
|
+
demo_anti_crawler_tools()
|
|
209
|
+
demo_authenticated_proxy_tools()
|
|
210
|
+
demo_distributed_coordinator_tools()
|
|
211
|
+
|
|
212
|
+
print("=== 在爬虫中使用工具包 ===\n")
|
|
213
|
+
print("在爬虫项目中,您可以这样使用工具包:")
|
|
214
|
+
print("""
|
|
215
|
+
from crawlo import Spider, Request
|
|
216
|
+
from crawlo.tools import (
|
|
217
|
+
clean_text,
|
|
218
|
+
validate_email,
|
|
219
|
+
get_random_user_agent,
|
|
220
|
+
build_url,
|
|
221
|
+
AuthenticatedProxy
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
class ExampleSpider(Spider):
|
|
225
|
+
def start_requests(self):
|
|
226
|
+
headers = {"User-Agent": get_random_user_agent()}
|
|
227
|
+
|
|
228
|
+
# 使用带认证的代理
|
|
229
|
+
proxy_url = "http://username:password@proxy.example.com:8080"
|
|
230
|
+
proxy = AuthenticatedProxy(proxy_url)
|
|
231
|
+
|
|
232
|
+
request = Request("https://example.com", headers=headers)
|
|
233
|
+
# 根据下载器类型设置代理
|
|
234
|
+
downloader_type = self.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
|
|
235
|
+
if downloader_type == "aiohttp":
|
|
236
|
+
request.proxy = proxy.clean_url
|
|
237
|
+
auth = proxy.get_auth_credentials()
|
|
238
|
+
if auth:
|
|
239
|
+
request.meta["proxy_auth"] = auth
|
|
240
|
+
else:
|
|
241
|
+
request.proxy = proxy.proxy_dict
|
|
242
|
+
|
|
243
|
+
yield request
|
|
244
|
+
|
|
245
|
+
def parse(self, response):
|
|
246
|
+
# 提取数据
|
|
247
|
+
title = response.css('h1::text').get()
|
|
248
|
+
email = response.css('.email::text').get()
|
|
249
|
+
|
|
250
|
+
# 清洗和验证数据
|
|
251
|
+
clean_title = clean_text(title) if title else None
|
|
252
|
+
is_valid_email = validate_email(email) if email else False
|
|
253
|
+
|
|
254
|
+
# 构建下一页URL
|
|
255
|
+
next_page_url = build_url("https://example.com", "/page/2")
|
|
256
|
+
|
|
257
|
+
# 处理数据...
|
|
258
|
+
""")
|
crawlo/core/enhanced_engine.py
DELETED
|
@@ -1,190 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
增强的引擎实现
|
|
5
|
-
解决大规模请求生成时的并发控制和背压问题
|
|
6
|
-
"""
|
|
7
|
-
import asyncio
|
|
8
|
-
|
|
9
|
-
from crawlo.core.engine import Engine as BaseEngine
|
|
10
|
-
from crawlo.utils.log import get_logger
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class EnhancedEngine(BaseEngine):
|
|
14
|
-
"""
|
|
15
|
-
增强的引擎实现
|
|
16
|
-
|
|
17
|
-
主要改进:
|
|
18
|
-
1. 智能的请求生成控制
|
|
19
|
-
2. 背压感知的调度
|
|
20
|
-
3. 动态并发调整
|
|
21
|
-
"""
|
|
22
|
-
|
|
23
|
-
def __init__(self, crawler):
|
|
24
|
-
super().__init__(crawler)
|
|
25
|
-
|
|
26
|
-
# 增强控制参数
|
|
27
|
-
self.max_queue_size = self.settings.get_int('SCHEDULER_MAX_QUEUE_SIZE', 200)
|
|
28
|
-
self.generation_batch_size = 10
|
|
29
|
-
self.generation_interval = 0.05
|
|
30
|
-
self.backpressure_ratio = 0.8 # 队列达到80%时启动背压
|
|
31
|
-
|
|
32
|
-
# 状态跟踪
|
|
33
|
-
self._generation_paused = False
|
|
34
|
-
self._last_generation_time = 0
|
|
35
|
-
self._generation_stats = {
|
|
36
|
-
'total_generated': 0,
|
|
37
|
-
'backpressure_events': 0
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
41
|
-
|
|
42
|
-
async def crawl(self):
|
|
43
|
-
"""
|
|
44
|
-
增强的爬取循环
|
|
45
|
-
支持智能请求生成和背压控制
|
|
46
|
-
"""
|
|
47
|
-
generation_task = None
|
|
48
|
-
|
|
49
|
-
try:
|
|
50
|
-
# 启动请求生成任务
|
|
51
|
-
if self.start_requests:
|
|
52
|
-
generation_task = asyncio.create_task(
|
|
53
|
-
self._controlled_request_generation()
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
# 主爬取循环
|
|
57
|
-
while self.running:
|
|
58
|
-
# 获取并处理请求
|
|
59
|
-
if request := await self._get_next_request():
|
|
60
|
-
await self._crawl(request)
|
|
61
|
-
|
|
62
|
-
# 检查退出条件
|
|
63
|
-
if await self._should_exit():
|
|
64
|
-
break
|
|
65
|
-
|
|
66
|
-
# 短暂休息避免忙等
|
|
67
|
-
await asyncio.sleep(0.001)
|
|
68
|
-
|
|
69
|
-
finally:
|
|
70
|
-
# 清理生成任务
|
|
71
|
-
if generation_task and not generation_task.done():
|
|
72
|
-
generation_task.cancel()
|
|
73
|
-
try:
|
|
74
|
-
await generation_task
|
|
75
|
-
except asyncio.CancelledError:
|
|
76
|
-
pass
|
|
77
|
-
|
|
78
|
-
await self.close_spider()
|
|
79
|
-
|
|
80
|
-
async def _controlled_request_generation(self):
|
|
81
|
-
"""受控的请求生成"""
|
|
82
|
-
self.logger.info("🎛️ 启动受控请求生成")
|
|
83
|
-
|
|
84
|
-
batch = []
|
|
85
|
-
total_generated = 0
|
|
86
|
-
|
|
87
|
-
try:
|
|
88
|
-
for request in self.start_requests:
|
|
89
|
-
batch.append(request)
|
|
90
|
-
|
|
91
|
-
# 批量处理
|
|
92
|
-
if len(batch) >= self.generation_batch_size:
|
|
93
|
-
generated = await self._process_generation_batch(batch)
|
|
94
|
-
total_generated += generated
|
|
95
|
-
batch = []
|
|
96
|
-
|
|
97
|
-
# 背压检查
|
|
98
|
-
if await self._should_pause_generation():
|
|
99
|
-
await self._wait_for_capacity()
|
|
100
|
-
|
|
101
|
-
# 处理剩余请求
|
|
102
|
-
if batch:
|
|
103
|
-
generated = await self._process_generation_batch(batch)
|
|
104
|
-
total_generated += generated
|
|
105
|
-
|
|
106
|
-
except Exception as e:
|
|
107
|
-
self.logger.error(f"❌ 请求生成失败: {e}")
|
|
108
|
-
|
|
109
|
-
finally:
|
|
110
|
-
self.start_requests = None
|
|
111
|
-
self.logger.info(f"🎉 请求生成完成,总计: {total_generated}")
|
|
112
|
-
|
|
113
|
-
async def _process_generation_batch(self, batch) -> int:
|
|
114
|
-
"""处理一批请求"""
|
|
115
|
-
generated = 0
|
|
116
|
-
|
|
117
|
-
for request in batch:
|
|
118
|
-
if not self.running:
|
|
119
|
-
break
|
|
120
|
-
|
|
121
|
-
# 等待队列有空间
|
|
122
|
-
while await self._is_queue_full() and self.running:
|
|
123
|
-
await asyncio.sleep(0.1)
|
|
124
|
-
|
|
125
|
-
if self.running:
|
|
126
|
-
await self.enqueue_request(request)
|
|
127
|
-
generated += 1
|
|
128
|
-
self._generation_stats['total_generated'] += 1
|
|
129
|
-
|
|
130
|
-
# 控制生成速度
|
|
131
|
-
if self.generation_interval > 0:
|
|
132
|
-
await asyncio.sleep(self.generation_interval)
|
|
133
|
-
|
|
134
|
-
return generated
|
|
135
|
-
|
|
136
|
-
async def _should_pause_generation(self) -> bool:
|
|
137
|
-
"""判断是否应该暂停生成"""
|
|
138
|
-
# 检查队列大小
|
|
139
|
-
if await self._is_queue_full():
|
|
140
|
-
return True
|
|
141
|
-
|
|
142
|
-
# 检查任务管理器负载
|
|
143
|
-
if self.task_manager:
|
|
144
|
-
current_tasks = len(self.task_manager.current_task)
|
|
145
|
-
if hasattr(self.task_manager, 'semaphore'):
|
|
146
|
-
max_concurrency = getattr(self.task_manager.semaphore, '_initial_value', 8)
|
|
147
|
-
if current_tasks >= max_concurrency * self.backpressure_ratio:
|
|
148
|
-
return True
|
|
149
|
-
|
|
150
|
-
return False
|
|
151
|
-
|
|
152
|
-
async def _is_queue_full(self) -> bool:
|
|
153
|
-
"""检查队列是否已满"""
|
|
154
|
-
if not self.scheduler:
|
|
155
|
-
return False
|
|
156
|
-
|
|
157
|
-
queue_size = len(self.scheduler)
|
|
158
|
-
return queue_size >= self.max_queue_size * self.backpressure_ratio
|
|
159
|
-
|
|
160
|
-
async def _wait_for_capacity(self):
|
|
161
|
-
"""等待系统有足够容量"""
|
|
162
|
-
self._generation_stats['backpressure_events'] += 1
|
|
163
|
-
self.logger.debug("⏸️ 触发背压,暂停请求生成")
|
|
164
|
-
|
|
165
|
-
wait_time = 0.1
|
|
166
|
-
max_wait = 2.0
|
|
167
|
-
|
|
168
|
-
while await self._should_pause_generation() and self.running:
|
|
169
|
-
await asyncio.sleep(wait_time)
|
|
170
|
-
wait_time = min(wait_time * 1.1, max_wait)
|
|
171
|
-
|
|
172
|
-
async def _should_exit(self) -> bool:
|
|
173
|
-
"""检查是否应该退出"""
|
|
174
|
-
# 没有启动请求,且所有队列都空闲
|
|
175
|
-
if (self.start_requests is None and
|
|
176
|
-
self.scheduler.idle() and
|
|
177
|
-
self.downloader.idle() and
|
|
178
|
-
self.task_manager.all_done() and
|
|
179
|
-
self.processor.idle()):
|
|
180
|
-
return True
|
|
181
|
-
|
|
182
|
-
return False
|
|
183
|
-
|
|
184
|
-
def get_generation_stats(self) -> dict:
|
|
185
|
-
"""获取生成统计"""
|
|
186
|
-
return {
|
|
187
|
-
**self._generation_stats,
|
|
188
|
-
'queue_size': len(self.scheduler) if self.scheduler else 0,
|
|
189
|
-
'active_tasks': len(self.task_manager.current_task) if self.task_manager else 0
|
|
190
|
-
}
|
crawlo-1.1.4.dist-info/RECORD
DELETED
|
@@ -1,117 +0,0 @@
|
|
|
1
|
-
crawlo/__init__.py,sha256=esOolburYDjtF43D5N9Kh6TSQW2yKcz888ilhBSinBc,825
|
|
2
|
-
crawlo/__version__.py,sha256=XxXhu8-QnuD9hA8Ah0WX5rgpt_DwOQmAwcK-FtpngyQ,22
|
|
3
|
-
crawlo/cli.py,sha256=CtR2Pfa7SyRxEKPaXqt-6E6K5Vq5z3rfdAI95UO4cbU,1166
|
|
4
|
-
crawlo/config.py,sha256=i0Amz6wNPgv_aVcdCBRRlcwuZLSa87cH9OEmTQvB97Q,8329
|
|
5
|
-
crawlo/crawler.py,sha256=v6i5tjgSOtbMoqiw1qdgKx1cY4kcVcd5l5bUTWtJNNU,36461
|
|
6
|
-
crawlo/event.py,sha256=7-y6HNv_EIJSYQNzsj0mVK-Gg4ON3wdQeMdQjfFJPlw,313
|
|
7
|
-
crawlo/exceptions.py,sha256=sMay0wnWLfc_FXWslqxm60qz6b66LXs3EdN_w8ygE9k,1166
|
|
8
|
-
crawlo/mode_manager.py,sha256=WIxrq9S3EAH0D71LH1AxvcqXomeABqoXgtUN4A--DKY,6702
|
|
9
|
-
crawlo/project.py,sha256=xWN2eTAjf_Pza-wWvvV4JjScQRWxe9hXlztX81ccUMc,5182
|
|
10
|
-
crawlo/stats_collector.py,sha256=NkO09CB-220qz5rxFcD_dedGfr2VPFrDo4hya0Zh8Qc,1577
|
|
11
|
-
crawlo/subscriber.py,sha256=gioTIqRdEwVG-bwIiQonbk1vWWAqTh9hzVkrqZ1AfP0,5006
|
|
12
|
-
crawlo/task_manager.py,sha256=19InAxS9oJ9EMj20Aw2urN-v6BeC22dkgPuW-B9-4UI,819
|
|
13
|
-
crawlo/commands/__init__.py,sha256=AMYjXG7ulE8dPVmgWVo0uqXsaCYUUZYmmu2-7kFzH1M,342
|
|
14
|
-
crawlo/commands/check.py,sha256=172OiAxnX5wwSlszUsyPgMZwAoIbGDTdfhtRz309ilc,22843
|
|
15
|
-
crawlo/commands/genspider.py,sha256=-jGJdfXLsefX_H1ydQ2wirdu6p6wmhClzVXY_0L-1aE,5050
|
|
16
|
-
crawlo/commands/list.py,sha256=yByqQeZBgvjewOKxpnOobpeJ7Hnbs-CWsoyITqZu2ZY,5781
|
|
17
|
-
crawlo/commands/run.py,sha256=8Qngjsl8Q4RBdO39a__wKGsheY2PFuPit2hds_jwEbM,10524
|
|
18
|
-
crawlo/commands/startproject.py,sha256=bzNgpkKzUEggY2m7Iw810mSPe8wOPFBqSCO0jZX3z_g,7138
|
|
19
|
-
crawlo/commands/stats.py,sha256=6pAgkEi8MBnCer2rWmKpaTYr1jaM6HeMG9owAvEzJyY,6064
|
|
20
|
-
crawlo/commands/utils.py,sha256=nohMvUU2zLvX0XzXk6KeCNxP0EvSWj9DiVLxM_7tD5o,5106
|
|
21
|
-
crawlo/core/__init__.py,sha256=PnFyJdVNHBoPmV1sW0AHQXijeoSTQ8cMYrbNM1JK8kA,41
|
|
22
|
-
crawlo/core/engine.py,sha256=8Dcew1XyxChW5Fz1wFEWpJlPrQb2hKDWKul8e61S-Q0,6662
|
|
23
|
-
crawlo/core/enhanced_engine.py,sha256=9I9Uxdy2oAz8zDGTzEiytuKu__VDVmIN8zwZKfrD8bw,6254
|
|
24
|
-
crawlo/core/processor.py,sha256=qmCqAeqhwYu-UE86evYesaGt9qpuSIfH-ZIZKcXFCZc,1140
|
|
25
|
-
crawlo/core/scheduler.py,sha256=fiU-Q-lzyC3B6ih8NSWqjP1Xw_ryNVb_4dLUARtWRBE,5804
|
|
26
|
-
crawlo/downloader/__init__.py,sha256=tl0mE54reR-PuJYSsXsKP2VY5uzvq4lITxZwKKjNzPs,7663
|
|
27
|
-
crawlo/downloader/aiohttp_downloader.py,sha256=UKupGYPOWrscAVsjhFgKYElTa9tbEeltqV7nuWqjIeE,8005
|
|
28
|
-
crawlo/downloader/cffi_downloader.py,sha256=-GVfSIhi1Ip56suSiGf8jnUE2EBF1P56vw0uxLh_T6I,10440
|
|
29
|
-
crawlo/downloader/httpx_downloader.py,sha256=7jfQfvAtfk8yD_mvwUbWLhYOxMM7r1nudiU7m_Jl9wc,12037
|
|
30
|
-
crawlo/extension/__init__.py,sha256=Sg588p6UhyrwFNTiD2wqGW-i3xgLX6HlLuQPKT7mayE,1526
|
|
31
|
-
crawlo/extension/health_check.py,sha256=IVaaVo_0CcZtf1LoCAYXIBvs3wZ7hdmT6U4-NYWAgP0,5527
|
|
32
|
-
crawlo/extension/log_interval.py,sha256=VCIeNqXcWDnxj4m6l77cjqgRzV8LfsPMb22X0Xc1Vwc,2417
|
|
33
|
-
crawlo/extension/log_stats.py,sha256=Ssxz6R1YpWIj5WJvQ2cJ9F5oR7FUFdj-ITc9lV92SSU,2908
|
|
34
|
-
crawlo/extension/logging_extension.py,sha256=ET6VAu1J2qNMz4NnG1G3zQLRhbsvV7l6xRIuQLE6DaE,1626
|
|
35
|
-
crawlo/extension/memory_monitor.py,sha256=gg-GK5RD9XhnrN_zp3KTmPKyWDmKLMv_OTY-HxSxBNI,3664
|
|
36
|
-
crawlo/extension/performance_profiler.py,sha256=NvQuuvE83dXJ-1URpN8OF9za9J1l7xhVbV22JynPQpA,4235
|
|
37
|
-
crawlo/extension/request_recorder.py,sha256=RC23yzXClnVv9j2ljQvjBkUfWznfnDHsrQejKhE9y5E,4074
|
|
38
|
-
crawlo/filters/__init__.py,sha256=XC_Q4ykZtSNYizYlAcehVwBBNO3lZ2zuWwafzXiuWyQ,4241
|
|
39
|
-
crawlo/filters/aioredis_filter.py,sha256=WhkFZcVAym9wLSUa8WTVctYfEibjxG42umtmacO1IY0,8370
|
|
40
|
-
crawlo/filters/memory_filter.py,sha256=VJO0UFRYGxmV8dj4G1subsQ-FtvPcGLbvd7IVtqXnOs,9260
|
|
41
|
-
crawlo/items/__init__.py,sha256=bqekZrRlDhxfWie0UbCs656TptYseoe9QJ67I4E7Elk,386
|
|
42
|
-
crawlo/items/base.py,sha256=tAYrPJgblp3ZEihDXvappdYc6pGdim6x2_9QSmMKI2o,577
|
|
43
|
-
crawlo/items/fields.py,sha256=wMlakQTsEwyrlLzMt1gI4pScLQZMqd3E1xcfH4dbSqk,1801
|
|
44
|
-
crawlo/items/items.py,sha256=e-3nXI9ckD64vcDxxQiAU6ufbtJMs09gbZQcYjxgwHY,3374
|
|
45
|
-
crawlo/middleware/__init__.py,sha256=ldaGFNbiJnK9Fx12Vdf9fDNfzXxoETtShp5r-vodtw0,549
|
|
46
|
-
crawlo/middleware/default_header.py,sha256=i_Uj07JObyeZFxL7ZAZmvZsHvA1HGtkNab1sA0d-nWI,1067
|
|
47
|
-
crawlo/middleware/download_delay.py,sha256=2M-TchDA7MwyTfYy0Hzh_bW9wlHlpiP-oQlys7crTj0,966
|
|
48
|
-
crawlo/middleware/middleware_manager.py,sha256=j1hkWRFB5rnC5SnB7oXWE5eUNv8blS9krDIDM5fIDs8,6213
|
|
49
|
-
crawlo/middleware/proxy.py,sha256=m2ZZ50En9hUtgrqSqA6hItGT74xMqccHFPhZshutIco,9811
|
|
50
|
-
crawlo/middleware/request_ignore.py,sha256=QI2z4fUnJ-4xvPTZAmsL-GqR4RFHS1xq9iDr5KFrMco,997
|
|
51
|
-
crawlo/middleware/response_code.py,sha256=tmef2QVl3JCiTMii6VQkASlOY2OyqmOPoOfNxIK1eF8,659
|
|
52
|
-
crawlo/middleware/response_filter.py,sha256=ep8ZxDlfIefi9YqK8dPASEp5TTDRo9QEY_jMceC411s,837
|
|
53
|
-
crawlo/middleware/retry.py,sha256=-7zpRURugiTTm4QYUSUlbnURD5mcT2Ji0yHvCgY1wGc,4124
|
|
54
|
-
crawlo/network/__init__.py,sha256=BLPERYPo22g1BXrW--wUnlolrdFUmOPjgOB8XQQJlck,397
|
|
55
|
-
crawlo/network/request.py,sha256=tPAiOVJyF3Kk-midqydTGXgv5M5tsYJRtwUUJTrUsrE,11075
|
|
56
|
-
crawlo/network/response.py,sha256=cUvdjsB2cQ-qWEKHNGIkwWGgCg-EnQ81xTrjrUOVno0,9738
|
|
57
|
-
crawlo/pipelines/__init__.py,sha256=lrdVDjeHLNkA4_MAwI1auk_I9xfeU1SlBWXiammb6lc,616
|
|
58
|
-
crawlo/pipelines/bloom_dedup_pipeline.py,sha256=QQxGFGEoMHN4Vx2kq7G_i1o9pmuXp8clZebilOar3fk,5642
|
|
59
|
-
crawlo/pipelines/console_pipeline.py,sha256=KABkR3J-rqO0Awox7lizxKR2XuHfVhWPiVRgIybwwu4,1248
|
|
60
|
-
crawlo/pipelines/csv_pipeline.py,sha256=6FBT2AoU6iNU-5NfgWRq7-JpF9dK2nBokjxx-y4jIas,12174
|
|
61
|
-
crawlo/pipelines/database_dedup_pipeline.py,sha256=wVBXEGArFR3uxoN7yfJSOarBmtGrJpOqowAqa7OUs98,8000
|
|
62
|
-
crawlo/pipelines/json_pipeline.py,sha256=vlu1nqbD2mtqtExt9cL5nibx1CwJM1RNqd4WGjZRHAY,8367
|
|
63
|
-
crawlo/pipelines/memory_dedup_pipeline.py,sha256=5jeL2jEq7sioYmXlzfkx-LNSbWyChrXeWx8d15YEZOA,3839
|
|
64
|
-
crawlo/pipelines/mongo_pipeline.py,sha256=k7gNqAO-g2MtIfArphC6z5ZzkKVRkBKcv-2ImziPFA0,5706
|
|
65
|
-
crawlo/pipelines/mysql_pipeline.py,sha256=cwgJvRORTRea_Eep2coBaMf3G8PQVTQA1qrnIlDZApc,13480
|
|
66
|
-
crawlo/pipelines/pipeline_manager.py,sha256=VrbebOYiqrobtKhp5II18w-odCICdWkmRg5WPK0Emz4,2112
|
|
67
|
-
crawlo/pipelines/redis_dedup_pipeline.py,sha256=TaokJ4wP5-Cxf-ueFJdh4SX58hchT0QzZ5RBDXHDN64,6003
|
|
68
|
-
crawlo/queue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
69
|
-
crawlo/queue/pqueue.py,sha256=yzF900ko2RReVNZtfk_VV3PzSXEUALI6SHf97geuu6s,1058
|
|
70
|
-
crawlo/queue/queue_manager.py,sha256=NMR0Fo8XFBg6_g7htq4D80cS6Ilo0EKt5QtyF-KxNuM,11467
|
|
71
|
-
crawlo/queue/redis_priority_queue.py,sha256=boJCKqcKxRw9XCCzaHy5qmrX9DvzPiQBzOkBHI5odfE,8116
|
|
72
|
-
crawlo/settings/__init__.py,sha256=xsukVKn_h2Hopm1Nj-bXkhbfyS62QTTvJi7fhZUwR9M,123
|
|
73
|
-
crawlo/settings/default_settings.py,sha256=B4_61tNJvqzVvyqt9AtRV7Iq5q8G4pJOExcN0ve7S_A,11559
|
|
74
|
-
crawlo/settings/setting_manager.py,sha256=SxKB1aCWh4OySM_bH9cYng9I3PAmrSP-Q8XOZEWEwbI,2899
|
|
75
|
-
crawlo/spider/__init__.py,sha256=Z_rK23l5yt-DuwJPg8bcqodM_FIs4-iHLaKOimGumcE,20452
|
|
76
|
-
crawlo/templates/crawlo.cfg.tmpl,sha256=9BAmwEibS5Tvy6HIcGXPb0BGeuesmibebmTW0iAEkmo,230
|
|
77
|
-
crawlo/templates/project/__init__.py.tmpl,sha256=f3ETIXw_O6K-lkL6lXM5znMPJW1FZYGFrwDs2BnHcnQ,58
|
|
78
|
-
crawlo/templates/project/items.py.tmpl,sha256=mt1Mm--H2Ouos3r7JPkYh0r33rgYJf1YOMz0OZy8TYs,297
|
|
79
|
-
crawlo/templates/project/middlewares.py.tmpl,sha256=jpmj7b7Zb7d3nVyxcaVNdp4KqSts6l2cPSqn_oJUSrM,3775
|
|
80
|
-
crawlo/templates/project/pipelines.py.tmpl,sha256=k_4MJnwZ6GPqVwJSEDURUlTxWybmts4vHrF0de2vgAk,2620
|
|
81
|
-
crawlo/templates/project/run.py.tmpl,sha256=ktkYOslcCh9mpklg6yE5VqfATx3Frj_jNT5z5gHjQ4o,8177
|
|
82
|
-
crawlo/templates/project/settings.py.tmpl,sha256=O_teIARjzRD3aMvPnuIgjaDHdjwW-3beyzfo1QH-Hr8,9580
|
|
83
|
-
crawlo/templates/project/spiders/__init__.py.tmpl,sha256=j_YKsw6HQMJyqlk3WUouP3bsr-XVxshRoSNakHBc00g,106
|
|
84
|
-
crawlo/templates/spider/spider.py.tmpl,sha256=a8S9j43z5gE4auMhf_OnnuVHSZN3JbMDu8Bczu8zIZY,4944
|
|
85
|
-
crawlo/utils/__init__.py,sha256=BDORpyjMN7VGPKImnCDKSkprS-petgD7ezc9rMlBvb0,123
|
|
86
|
-
crawlo/utils/controlled_spider_mixin.py,sha256=VjT30pNW_YIgmTD0nb7DDl2D3HvpnAYFzgSgV3fxFN0,16475
|
|
87
|
-
crawlo/utils/date_tools.py,sha256=0yG0tzGb1VFgWDJJ_cow2LJfz3kj_w2MqSjmfKKESl8,6961
|
|
88
|
-
crawlo/utils/db_helper.py,sha256=3ib5-agrlwf2t5S_QtLRYH75wvJDlYbRqRmDEbpH5Bo,10559
|
|
89
|
-
crawlo/utils/func_tools.py,sha256=WUZEGpWMuDDX7g-QySM7iaiC74erW2SSkZoUvDw1NjM,2369
|
|
90
|
-
crawlo/utils/large_scale_config.py,sha256=j7wQ5ty7pQlBRygw2vhRJ7OI19RYBZKPfYMP3WeF2WI,8154
|
|
91
|
-
crawlo/utils/large_scale_helper.py,sha256=JJqcGSI6VaVe3MSL6IWjmCp8XQIu6T4U-BvBLSttr_s,12157
|
|
92
|
-
crawlo/utils/log.py,sha256=A3lPyhD8kD88cV23KOL-_eT8g69xGQ5L1toDB2AO0mc,4005
|
|
93
|
-
crawlo/utils/queue_helper.py,sha256=xpUUTOqlU1xz2Pb9NKAVGo3AfAO-7Xvx8Lm1q65Dgck,4743
|
|
94
|
-
crawlo/utils/request.py,sha256=yoLB2rY8d78vgPjIWpdhY5SalIKjyLIvTG_UH6EMdVI,8798
|
|
95
|
-
crawlo/utils/request_serializer.py,sha256=bPoSQqE2ksiMyP3WiPB3w3UqZs4f_LgkAw4Pj0qyBDo,8565
|
|
96
|
-
crawlo/utils/spider_loader.py,sha256=pEDUsYOTGjszA6KgjiMlYN4GS5fP4uakkhcp3JTFFQY,2187
|
|
97
|
-
crawlo/utils/system.py,sha256=HvWV1acxou0Rn0L7pNq4CnV_GWFeU0Tmjy3_nLD8M64,237
|
|
98
|
-
crawlo/utils/tools.py,sha256=5Uv25Wy4m_ndZY0-n-eX-t3PxvaZ6wR3-Wvx-o7_Vrs,271
|
|
99
|
-
crawlo/utils/url.py,sha256=rlgX2VlJv6JvLmCDTsbxzMSXE6R5ZL_0dLEqprsA-JU,1482
|
|
100
|
-
examples/__init__.py,sha256=6i631BPnS_TR_BWUjtjB5CBO-zv9kRkwQTQvSya2wHE,123
|
|
101
|
-
examples/controlled_spider_example.py,sha256=SP_k4mdKPvD1JCPs9UCm68jcy2Frg84vvXv9-14RC6I,7776
|
|
102
|
-
tests/__init__.py,sha256=scL1IPVT1iucth7v8ffrjRdeW7QrC_Y7AMmFVMdTY1Y,129
|
|
103
|
-
tests/test_final_validation.py,sha256=fBxf_6YcAEa_HyV_oGAXmmVHY4i6FdA4J6klCmc36hQ,4925
|
|
104
|
-
tests/test_proxy_health_check.py,sha256=xo3QMP1YNw9hu7JDHZOYCUZmFFKLJpHSh4SbxXhCRPQ,1091
|
|
105
|
-
tests/test_proxy_middleware_integration.py,sha256=zcl7fR9Toc-I-stSUTzKZPwcfh3kgrpjI5SbkZ6AVmE,4305
|
|
106
|
-
tests/test_proxy_providers.py,sha256=XwWZCywTYguSsUxSm6fsbaoH1p9dKjqSIx9-sqKZehA,1693
|
|
107
|
-
tests/test_proxy_stats.py,sha256=Til_yksrRz2yBVw-yJi5-36LhNW3vTwpXTm4BdR9PUM,507
|
|
108
|
-
tests/test_proxy_strategies.py,sha256=ZkziozkvZd3KWOQnpHQ8Upd3WpyoX7gN0qFGluNm348,1809
|
|
109
|
-
tests/test_redis_config.py,sha256=TqzFRojc6esGXjGhUCvSLYQDUTAgEJsty9vRVuNraMU,893
|
|
110
|
-
tests/test_redis_queue.py,sha256=o6xViXxJcdx-1eMcG3vhAQEIm8h346HnZb7JXs7ZjwM,6622
|
|
111
|
-
tests/test_request_serialization.py,sha256=8sVdppAsohJ5u-m1WvablCndwL-M_36YPLdGKwgeznM,2289
|
|
112
|
-
tests/test_scheduler.py,sha256=-FOkTWzaMdr6yfO1Msu74hI_GgSfD7iRxO-cFA-9Iyk,7442
|
|
113
|
-
crawlo-1.1.4.dist-info/METADATA,sha256=2I2NA0BR-MWoPZmRUkWrUQYMjuPiUi9mrogIYPWpASU,19781
|
|
114
|
-
crawlo-1.1.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
115
|
-
crawlo-1.1.4.dist-info/entry_points.txt,sha256=5HoVoTSPxI8SCa5B7pQYxLSrkOdiunyO9tqNsLMv52g,43
|
|
116
|
-
crawlo-1.1.4.dist-info/top_level.txt,sha256=keG_67pbZ_wZL2dmDRA9RMaNHTaV_x_oxZ9DKNgwvR0,22
|
|
117
|
-
crawlo-1.1.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|