crawlo 1.4.6__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +2 -1
- crawlo/__version__.py +1 -1
- crawlo/cli.py +2 -2
- crawlo/commands/check.py +1 -1
- crawlo/commands/help.py +5 -3
- crawlo/commands/list.py +1 -1
- crawlo/commands/run.py +49 -11
- crawlo/commands/stats.py +1 -1
- crawlo/config.py +12 -4
- crawlo/config_validator.py +1 -1
- crawlo/core/engine.py +20 -7
- crawlo/core/processor.py +1 -1
- crawlo/core/scheduler.py +4 -5
- crawlo/crawler.py +51 -10
- crawlo/downloader/__init__.py +7 -3
- crawlo/downloader/aiohttp_downloader.py +18 -18
- crawlo/downloader/cffi_downloader.py +5 -2
- crawlo/downloader/httpx_downloader.py +9 -3
- crawlo/downloader/hybrid_downloader.py +2 -2
- crawlo/downloader/playwright_downloader.py +38 -15
- crawlo/downloader/selenium_downloader.py +16 -2
- crawlo/event.py +42 -8
- crawlo/exceptions.py +157 -24
- crawlo/extension/__init__.py +10 -9
- crawlo/extension/health_check.py +7 -7
- crawlo/extension/log_interval.py +6 -6
- crawlo/extension/log_stats.py +2 -2
- crawlo/extension/logging_extension.py +4 -12
- crawlo/extension/memory_monitor.py +5 -5
- crawlo/extension/performance_profiler.py +5 -5
- crawlo/extension/request_recorder.py +6 -6
- crawlo/factories/base.py +1 -1
- crawlo/factories/crawler.py +61 -60
- crawlo/factories/utils.py +135 -0
- crawlo/filters/__init__.py +19 -2
- crawlo/filters/aioredis_filter.py +133 -49
- crawlo/filters/memory_filter.py +6 -21
- crawlo/framework.py +22 -8
- crawlo/initialization/built_in.py +24 -67
- crawlo/initialization/core.py +65 -19
- crawlo/initialization/phases.py +83 -2
- crawlo/initialization/registry.py +5 -7
- crawlo/initialization/utils.py +49 -0
- crawlo/logging/__init__.py +6 -10
- crawlo/logging/config.py +106 -22
- crawlo/logging/factory.py +12 -8
- crawlo/logging/manager.py +19 -27
- crawlo/middleware/__init__.py +72 -9
- crawlo/middleware/default_header.py +2 -2
- crawlo/middleware/download_delay.py +2 -2
- crawlo/middleware/middleware_manager.py +6 -6
- crawlo/middleware/offsite.py +2 -2
- crawlo/middleware/proxy.py +2 -2
- crawlo/middleware/request_ignore.py +4 -4
- crawlo/middleware/response_code.py +2 -2
- crawlo/middleware/response_filter.py +2 -2
- crawlo/middleware/retry.py +1 -1
- crawlo/mode_manager.py +38 -4
- crawlo/network/request.py +54 -26
- crawlo/network/response.py +69 -135
- crawlo/pipelines/__init__.py +40 -9
- crawlo/pipelines/base_pipeline.py +452 -0
- crawlo/pipelines/bloom_dedup_pipeline.py +4 -5
- crawlo/pipelines/console_pipeline.py +2 -2
- crawlo/pipelines/csv_pipeline.py +4 -4
- crawlo/pipelines/database_dedup_pipeline.py +4 -5
- crawlo/pipelines/json_pipeline.py +4 -4
- crawlo/pipelines/memory_dedup_pipeline.py +4 -5
- crawlo/pipelines/mongo_pipeline.py +23 -14
- crawlo/pipelines/mysql_pipeline.py +31 -39
- crawlo/pipelines/pipeline_manager.py +8 -8
- crawlo/pipelines/redis_dedup_pipeline.py +13 -14
- crawlo/project.py +1 -1
- crawlo/queue/__init__.py +10 -0
- crawlo/queue/queue_manager.py +79 -13
- crawlo/queue/redis_priority_queue.py +196 -47
- crawlo/settings/default_settings.py +16 -6
- crawlo/spider/__init__.py +6 -5
- crawlo/stats_collector.py +2 -2
- crawlo/task_manager.py +1 -1
- crawlo/templates/crawlo.cfg.tmpl +3 -3
- crawlo/templates/project/__init__.py.tmpl +1 -3
- crawlo/templates/project/items.py.tmpl +2 -6
- crawlo/templates/project/middlewares.py.tmpl +1 -1
- crawlo/templates/project/pipelines.py.tmpl +1 -2
- crawlo/templates/project/settings.py.tmpl +12 -10
- crawlo/templates/project/settings_distributed.py.tmpl +14 -13
- crawlo/templates/project/settings_gentle.py.tmpl +21 -23
- crawlo/templates/project/settings_high_performance.py.tmpl +21 -23
- crawlo/templates/project/settings_minimal.py.tmpl +10 -8
- crawlo/templates/project/settings_simple.py.tmpl +21 -23
- crawlo/templates/run.py.tmpl +1 -1
- crawlo/templates/spider/spider.py.tmpl +4 -12
- crawlo/templates/spiders_init.py.tmpl +3 -8
- crawlo/tools/__init__.py +0 -103
- crawlo/tools/scenario_adapter.py +1 -1
- crawlo/utils/__init__.py +25 -1
- crawlo/utils/batch_processor.py +23 -6
- crawlo/utils/config_manager.py +442 -0
- crawlo/utils/controlled_spider_mixin.py +1 -1
- crawlo/utils/db_helper.py +1 -1
- crawlo/utils/encoding_helper.py +190 -0
- crawlo/utils/error_handler.py +2 -2
- crawlo/utils/large_scale_helper.py +1 -1
- crawlo/utils/leak_detector.py +335 -0
- crawlo/utils/mongo_connection_pool.py +157 -0
- crawlo/utils/mysql_connection_pool.py +197 -0
- crawlo/utils/performance_monitor.py +1 -1
- crawlo/utils/redis_checker.py +91 -0
- crawlo/utils/redis_connection_pool.py +260 -70
- crawlo/utils/redis_key_validator.py +1 -1
- crawlo/utils/request.py +24 -2
- crawlo/utils/request_serializer.py +1 -1
- crawlo/utils/resource_manager.py +337 -0
- crawlo/utils/response_helper.py +113 -0
- crawlo/utils/selector_helper.py +3 -2
- crawlo/utils/singleton.py +70 -0
- crawlo/utils/spider_loader.py +1 -1
- crawlo/utils/text_helper.py +1 -1
- crawlo-1.4.8.dist-info/METADATA +831 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/RECORD +131 -145
- tests/advanced_tools_example.py +10 -68
- tests/distributed_dedup_test.py +467 -0
- tests/monitor_redis_dedup.sh +72 -0
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
- tests/simple_cli_test.py +55 -0
- tests/test_cli_arguments.py +119 -0
- tests/test_dedup_fix.py +10 -10
- crawlo/logging/async_handler.py +0 -181
- crawlo/logging/monitor.py +0 -153
- crawlo/logging/sampler.py +0 -167
- crawlo/tools/authenticated_proxy.py +0 -241
- crawlo/tools/data_formatter.py +0 -226
- crawlo/tools/data_validator.py +0 -181
- crawlo/tools/encoding_converter.py +0 -127
- crawlo/tools/network_diagnostic.py +0 -365
- crawlo/tools/request_tools.py +0 -83
- crawlo/tools/retry_mechanism.py +0 -224
- crawlo/utils/env_config.py +0 -143
- crawlo/utils/large_scale_config.py +0 -287
- crawlo/utils/log.py +0 -80
- crawlo/utils/system.py +0 -11
- crawlo/utils/tools.py +0 -5
- crawlo/utils/url.py +0 -40
- crawlo-1.4.6.dist-info/METADATA +0 -329
- tests/env_config_example.py +0 -134
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
- tests/test_authenticated_proxy.py +0 -142
- tests/test_comprehensive.py +0 -147
- tests/test_dynamic_downloaders_proxy.py +0 -125
- tests/test_dynamic_proxy.py +0 -93
- tests/test_dynamic_proxy_config.py +0 -147
- tests/test_dynamic_proxy_real.py +0 -110
- tests/test_env_config.py +0 -122
- tests/test_framework_env_usage.py +0 -104
- tests/test_large_scale_config.py +0 -113
- tests/test_proxy_api.py +0 -265
- tests/test_real_scenario_proxy.py +0 -196
- tests/tools_example.py +0 -261
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
tests/tools_example.py
DELETED
|
@@ -1,261 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
Crawlo框架工具包使用示例
|
|
5
|
-
"""
|
|
6
|
-
import asyncio
|
|
7
|
-
from crawlo.tools import (
|
|
8
|
-
# 日期工具
|
|
9
|
-
parse_time,
|
|
10
|
-
format_time,
|
|
11
|
-
time_diff,
|
|
12
|
-
|
|
13
|
-
# 数据清洗工具
|
|
14
|
-
clean_text,
|
|
15
|
-
format_currency,
|
|
16
|
-
extract_emails,
|
|
17
|
-
|
|
18
|
-
# 数据验证工具
|
|
19
|
-
validate_email,
|
|
20
|
-
validate_url,
|
|
21
|
-
validate_phone,
|
|
22
|
-
|
|
23
|
-
# 请求处理工具
|
|
24
|
-
build_url,
|
|
25
|
-
add_query_params,
|
|
26
|
-
merge_headers,
|
|
27
|
-
|
|
28
|
-
# 反爬虫应对工具
|
|
29
|
-
get_random_user_agent,
|
|
30
|
-
rotate_proxy,
|
|
31
|
-
|
|
32
|
-
# 带认证代理工具
|
|
33
|
-
AuthenticatedProxy,
|
|
34
|
-
create_proxy_config,
|
|
35
|
-
get_proxy_info,
|
|
36
|
-
|
|
37
|
-
# 分布式协调工具
|
|
38
|
-
generate_task_id,
|
|
39
|
-
get_cluster_info
|
|
40
|
-
)
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def demo_date_tools():
|
|
44
|
-
"""演示日期工具的使用"""
|
|
45
|
-
print("=== 日期工具演示 ===\n")
|
|
46
|
-
|
|
47
|
-
# 解析时间
|
|
48
|
-
time_str = "2025-09-10 14:30:00"
|
|
49
|
-
parsed_time = parse_time(time_str)
|
|
50
|
-
print(f"解析时间: {time_str} -> {parsed_time}")
|
|
51
|
-
|
|
52
|
-
# 格式化时间
|
|
53
|
-
formatted_time = format_time(parsed_time, "%Y年%m月%d日 %H:%M:%S")
|
|
54
|
-
print(f"格式化时间: {parsed_time} -> {formatted_time}")
|
|
55
|
-
|
|
56
|
-
# 时间差计算
|
|
57
|
-
time_str2 = "2025-09-11 16:45:30"
|
|
58
|
-
parsed_time2 = parse_time(time_str2)
|
|
59
|
-
diff = time_diff(parsed_time2, parsed_time)
|
|
60
|
-
print(f"时间差: {time_str2} - {time_str} = {diff} 秒")
|
|
61
|
-
|
|
62
|
-
print()
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def demo_data_cleaning_tools():
|
|
66
|
-
"""演示数据清洗工具的使用"""
|
|
67
|
-
print("=== 数据清洗工具演示 ===\n")
|
|
68
|
-
|
|
69
|
-
# 清洗文本
|
|
70
|
-
dirty_text = "<p>这是一个 <b>测试</b>&文本</p>"
|
|
71
|
-
clean_result = clean_text(dirty_text)
|
|
72
|
-
print(f"清洗文本: {dirty_text} -> {clean_result}")
|
|
73
|
-
|
|
74
|
-
# 格式化货币
|
|
75
|
-
price = 1234.567
|
|
76
|
-
formatted_price = format_currency(price, "¥", 2)
|
|
77
|
-
print(f"格式化货币: {price} -> {formatted_price}")
|
|
78
|
-
|
|
79
|
-
# 提取邮箱
|
|
80
|
-
text_with_email = "联系邮箱: test@example.com, support@crawler.com"
|
|
81
|
-
emails = extract_emails(text_with_email)
|
|
82
|
-
print(f"提取邮箱: {text_with_email} -> {emails}")
|
|
83
|
-
|
|
84
|
-
print()
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def demo_data_validation_tools():
|
|
88
|
-
"""演示数据验证工具的使用"""
|
|
89
|
-
print("=== 数据验证工具演示 ===\n")
|
|
90
|
-
|
|
91
|
-
# 验证邮箱
|
|
92
|
-
email = "test@example.com"
|
|
93
|
-
is_valid_email = validate_email(email)
|
|
94
|
-
print(f"验证邮箱: {email} -> {'有效' if is_valid_email else '无效'}")
|
|
95
|
-
|
|
96
|
-
# 验证无效邮箱
|
|
97
|
-
invalid_email = "invalid-email"
|
|
98
|
-
is_valid_invalid = validate_email(invalid_email)
|
|
99
|
-
print(f"验证邮箱: {invalid_email} -> {'有效' if is_valid_invalid else '无效'}")
|
|
100
|
-
|
|
101
|
-
# 验证URL
|
|
102
|
-
url = "https://example.com/path?param=value"
|
|
103
|
-
is_valid_url = validate_url(url)
|
|
104
|
-
print(f"验证URL: {url} -> {'有效' if is_valid_url else '无效'}")
|
|
105
|
-
|
|
106
|
-
# 验证电话号码
|
|
107
|
-
phone = "13812345678"
|
|
108
|
-
is_valid_phone = validate_phone(phone)
|
|
109
|
-
print(f"验证电话: {phone} -> {'有效' if is_valid_phone else '无效'}")
|
|
110
|
-
|
|
111
|
-
print()
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def demo_request_handling_tools():
|
|
115
|
-
"""演示请求处理工具的使用"""
|
|
116
|
-
print("=== 请求处理工具演示 ===\n")
|
|
117
|
-
|
|
118
|
-
# 构建URL
|
|
119
|
-
base_url = "https://api.example.com"
|
|
120
|
-
path = "/v1/users"
|
|
121
|
-
query_params = {"page": 1, "limit": 10}
|
|
122
|
-
full_url = build_url(base_url, path, query_params)
|
|
123
|
-
print(f"构建URL: {base_url} + {path} + {query_params} -> {full_url}")
|
|
124
|
-
|
|
125
|
-
# 添加查询参数
|
|
126
|
-
existing_url = "https://api.example.com/v1/users?page=1"
|
|
127
|
-
new_params = {"sort": "name", "order": "asc"}
|
|
128
|
-
updated_url = add_query_params(existing_url, new_params)
|
|
129
|
-
print(f"添加参数: {existing_url} + {new_params} -> {updated_url}")
|
|
130
|
-
|
|
131
|
-
# 合并请求头
|
|
132
|
-
base_headers = {"Content-Type": "application/json", "Accept": "application/json"}
|
|
133
|
-
additional_headers = {"Authorization": "Bearer token123", "User-Agent": "Crawlo/1.0"}
|
|
134
|
-
merged_headers = merge_headers(base_headers, additional_headers)
|
|
135
|
-
print(f"合并请求头:")
|
|
136
|
-
print(f" 基础头: {base_headers}")
|
|
137
|
-
print(f" 额外头: {additional_headers}")
|
|
138
|
-
print(f" 合并后: {merged_headers}")
|
|
139
|
-
|
|
140
|
-
print()
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
def demo_anti_crawler_tools():
|
|
144
|
-
"""演示反爬虫应对工具的使用"""
|
|
145
|
-
print("=== 反爬虫应对工具演示 ===\n")
|
|
146
|
-
|
|
147
|
-
# 获取随机User-Agent
|
|
148
|
-
user_agent = get_random_user_agent()
|
|
149
|
-
print(f"随机User-Agent: {user_agent[:50]}...")
|
|
150
|
-
|
|
151
|
-
# 轮换代理
|
|
152
|
-
proxy = rotate_proxy()
|
|
153
|
-
print(f"轮换代理: {proxy}")
|
|
154
|
-
|
|
155
|
-
print()
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
def demo_authenticated_proxy_tools():
|
|
159
|
-
"""演示带认证代理工具的使用"""
|
|
160
|
-
print("=== 带认证代理工具演示 ===\n")
|
|
161
|
-
|
|
162
|
-
# 创建带认证的代理
|
|
163
|
-
proxy_url = "http://username:password@proxy.example.com:8080"
|
|
164
|
-
proxy = AuthenticatedProxy(proxy_url)
|
|
165
|
-
|
|
166
|
-
print(f"代理URL: {proxy}")
|
|
167
|
-
print(f"清洁URL: {proxy.clean_url}")
|
|
168
|
-
print(f"用户名: {proxy.username}")
|
|
169
|
-
print(f"密码: {proxy.password}")
|
|
170
|
-
print(f"代理字典: {proxy.proxy_dict}")
|
|
171
|
-
print(f"认证凭据: {proxy.get_auth_credentials()}")
|
|
172
|
-
print(f"认证头: {proxy.get_auth_header()}")
|
|
173
|
-
print(f"是否有效: {proxy.is_valid()}")
|
|
174
|
-
|
|
175
|
-
# 创建代理配置
|
|
176
|
-
proxy_config = create_proxy_config(proxy_url)
|
|
177
|
-
print(f"\n代理配置: {proxy_config}")
|
|
178
|
-
|
|
179
|
-
# 获取代理信息
|
|
180
|
-
proxy_info = get_proxy_info(proxy_url)
|
|
181
|
-
print(f"代理信息: {proxy_info}")
|
|
182
|
-
|
|
183
|
-
print()
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
async def demo_distributed_coordinator_tools():
|
|
187
|
-
"""演示分布式协调工具的使用"""
|
|
188
|
-
print("=== 分布式协调工具演示 ===\n")
|
|
189
|
-
|
|
190
|
-
# 生成任务ID
|
|
191
|
-
url = "https://example.com/page/1"
|
|
192
|
-
spider_name = "example_spider"
|
|
193
|
-
task_id = generate_task_id(url, spider_name)
|
|
194
|
-
print(f"生成任务ID: URL={url}, Spider={spider_name} -> {task_id}")
|
|
195
|
-
|
|
196
|
-
# 获取集群信息
|
|
197
|
-
cluster_info = await get_cluster_info()
|
|
198
|
-
print(f"集群信息: {cluster_info}")
|
|
199
|
-
|
|
200
|
-
print()
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
if __name__ == '__main__':
|
|
204
|
-
# 运行演示
|
|
205
|
-
demo_date_tools()
|
|
206
|
-
demo_data_cleaning_tools()
|
|
207
|
-
demo_data_validation_tools()
|
|
208
|
-
demo_request_handling_tools()
|
|
209
|
-
demo_anti_crawler_tools()
|
|
210
|
-
demo_authenticated_proxy_tools()
|
|
211
|
-
|
|
212
|
-
# 运行异步演示
|
|
213
|
-
asyncio.run(demo_distributed_coordinator_tools())
|
|
214
|
-
|
|
215
|
-
print("=== 在爬虫中使用工具包 ===\n")
|
|
216
|
-
print("在爬虫项目中,您可以这样使用工具包:")
|
|
217
|
-
print("""
|
|
218
|
-
from crawlo import Spider, Request
|
|
219
|
-
from crawlo.tools import (
|
|
220
|
-
clean_text,
|
|
221
|
-
validate_email,
|
|
222
|
-
get_random_user_agent,
|
|
223
|
-
build_url,
|
|
224
|
-
AuthenticatedProxy
|
|
225
|
-
)
|
|
226
|
-
|
|
227
|
-
class ExampleSpider(Spider):
|
|
228
|
-
def start_requests(self):
|
|
229
|
-
headers = {"User-Agent": get_random_user_agent()}
|
|
230
|
-
|
|
231
|
-
# 使用带认证的代理
|
|
232
|
-
proxy_url = "http://username:password@proxy.example.com:8080"
|
|
233
|
-
proxy = AuthenticatedProxy(proxy_url)
|
|
234
|
-
|
|
235
|
-
request = Request("https://example.com", headers=headers)
|
|
236
|
-
# 根据下载器类型设置代理
|
|
237
|
-
downloader_type = self.crawler.settings.get("DOWNLOADER_TYPE", "aiohttp")
|
|
238
|
-
if downloader_type == "aiohttp":
|
|
239
|
-
request.proxy = proxy.clean_url
|
|
240
|
-
auth = proxy.get_auth_credentials()
|
|
241
|
-
if auth:
|
|
242
|
-
request.meta["proxy_auth"] = auth
|
|
243
|
-
else:
|
|
244
|
-
request.proxy = proxy.proxy_dict
|
|
245
|
-
|
|
246
|
-
yield request
|
|
247
|
-
|
|
248
|
-
def parse(self, response):
|
|
249
|
-
# 提取数据
|
|
250
|
-
title = response.css('h1::text').get()
|
|
251
|
-
email = response.css('.email::text').get()
|
|
252
|
-
|
|
253
|
-
# 清洗和验证数据
|
|
254
|
-
clean_title = clean_text(title) if title else None
|
|
255
|
-
is_valid_email = validate_email(email) if email else False
|
|
256
|
-
|
|
257
|
-
# 构建下一页URL
|
|
258
|
-
next_page_url = build_url("https://example.com", "/page/2")
|
|
259
|
-
|
|
260
|
-
# 处理数据...
|
|
261
|
-
""")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|