crawlo 1.1.9__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +65 -65
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +142 -132
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +292 -292
- crawlo/commands/startproject.py +418 -418
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +252 -252
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +354 -345
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +143 -136
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +220 -220
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +213 -213
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +280 -280
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -32
- crawlo/middleware/download_delay.py +105 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +116 -0
- crawlo/middleware/proxy.py +366 -272
- crawlo/middleware/request_ignore.py +88 -30
- crawlo/middleware/response_code.py +164 -18
- crawlo/middleware/response_filter.py +138 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +316 -316
- crawlo/pipelines/pipeline_manager.py +61 -61
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +187 -187
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +337 -334
- crawlo/queue/redis_priority_queue.py +298 -298
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +226 -219
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +130 -130
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -109
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/run.py.tmpl +45 -45
- crawlo/templates/project/settings.py.tmpl +327 -326
- crawlo/templates/project/settings_distributed.py.tmpl +119 -119
- crawlo/templates/project/settings_gentle.py.tmpl +94 -94
- crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
- crawlo/templates/project/settings_simple.py.tmpl +68 -68
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +143 -141
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +260 -260
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +359 -359
- crawlo/utils/env_config.py +105 -105
- crawlo/utils/error_handler.py +125 -125
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +284 -284
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +334 -334
- crawlo/utils/redis_key_validator.py +199 -199
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.2.1.dist-info/METADATA +692 -0
- crawlo-1.2.1.dist-info/RECORD +220 -0
- examples/__init__.py +7 -7
- examples/aiohttp_settings.py +42 -0
- examples/curl_cffi_settings.py +41 -0
- examples/default_header_middleware_example.py +107 -0
- examples/default_header_spider_example.py +129 -0
- examples/download_delay_middleware_example.py +160 -0
- examples/httpx_settings.py +42 -0
- examples/multi_downloader_proxy_example.py +81 -0
- examples/offsite_middleware_example.py +55 -0
- examples/offsite_spider_example.py +107 -0
- examples/proxy_spider_example.py +166 -0
- examples/request_ignore_middleware_example.py +51 -0
- examples/request_ignore_spider_example.py +99 -0
- examples/response_code_middleware_example.py +52 -0
- examples/response_filter_middleware_example.py +67 -0
- examples/tong_hua_shun_settings.py +62 -0
- examples/tong_hua_shun_spider.py +170 -0
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_validator.py +193 -193
- tests/test_crawlo_proxy_integration.py +173 -0
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +159 -0
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +222 -0
- tests/test_downloader_proxy_compatibility.py +269 -0
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_offsite_middleware.py +222 -0
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +265 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +122 -0
- tests/test_proxy_middleware_enhanced.py +217 -0
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +174 -231
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_real_scenario_proxy.py +196 -0
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +183 -0
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +350 -0
- tests/test_response_filter_middleware.py +428 -0
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +242 -0
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.1.9.dist-info/METADATA +0 -626
- crawlo-1.1.9.dist-info/RECORD +0 -190
- {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/WHEEL +0 -0
- {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/top_level.txt +0 -0
tests/test_dynamic_proxy_real.py
CHANGED
|
@@ -1,110 +1,110 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
实际测试动态下载器(Playwright)通过代理访问网站
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
import asyncio
|
|
8
|
-
from crawlo.spider import Spider
|
|
9
|
-
from crawlo.network.request import Request
|
|
10
|
-
from crawlo.tools import AuthenticatedProxy
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class ProxyTestSpider(Spider):
|
|
14
|
-
"""代理测试爬虫"""
|
|
15
|
-
name = "proxy_test_spider" # 添加name属性
|
|
16
|
-
|
|
17
|
-
# 自定义配置
|
|
18
|
-
custom_settings = {
|
|
19
|
-
"DOWNLOADER_TYPE": "playwright",
|
|
20
|
-
"PLAYWRIGHT_BROWSER_TYPE": "chromium",
|
|
21
|
-
"PLAYWRIGHT_HEADLESS": True,
|
|
22
|
-
# 配置带认证的代理
|
|
23
|
-
"PLAYWRIGHT_PROXY": {
|
|
24
|
-
"server": "http://182.201.243.186:58111",
|
|
25
|
-
"username": "dwe20241014",
|
|
26
|
-
"password": "Dwe0101014"
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
def __init__(self, **kwargs):
|
|
31
|
-
super().__init__(**kwargs)
|
|
32
|
-
print("代理测试爬虫初始化完成")
|
|
33
|
-
print(f"代理配置: {self.custom_settings.get('PLAYWRIGHT_PROXY')}")
|
|
34
|
-
|
|
35
|
-
def start_requests(self):
|
|
36
|
-
"""开始请求"""
|
|
37
|
-
urls = [
|
|
38
|
-
"https://httpbin.org/ip", # 查看IP地址
|
|
39
|
-
"https://httpbin.org/headers", # 查看请求头
|
|
40
|
-
]
|
|
41
|
-
|
|
42
|
-
for url in urls:
|
|
43
|
-
request = Request(url, callback=self.parse)
|
|
44
|
-
yield request
|
|
45
|
-
|
|
46
|
-
def parse(self, response):
|
|
47
|
-
"""解析响应"""
|
|
48
|
-
print(f"\n=== 响应信息 ===")
|
|
49
|
-
print(f"URL: {response.url}")
|
|
50
|
-
print(f"状态码: {response.status_code}")
|
|
51
|
-
print(f"响应内容: {response.text[:500]}")
|
|
52
|
-
|
|
53
|
-
# 保存响应内容
|
|
54
|
-
filename = response.url.split("/")[-1].replace("?", "_").replace("&", "_")
|
|
55
|
-
with open(f"proxy_test_{filename}.html", "w", encoding="utf-8") as f:
|
|
56
|
-
f.write(response.text)
|
|
57
|
-
print(f"响应已保存到 proxy_test_{filename}.html")
|
|
58
|
-
|
|
59
|
-
# 返回一个简单的item
|
|
60
|
-
yield {"url": response.url, "status": response.status_code}
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
# 异步运行函数
|
|
64
|
-
async def run_spider():
|
|
65
|
-
"""运行爬虫"""
|
|
66
|
-
print("开始测试动态下载器通过代理访问网站...")
|
|
67
|
-
|
|
68
|
-
# 创建爬虫实例
|
|
69
|
-
spider = ProxyTestSpider()
|
|
70
|
-
|
|
71
|
-
# 创建一个简单的crawler模拟器
|
|
72
|
-
class MockCrawler:
|
|
73
|
-
def __init__(self):
|
|
74
|
-
from crawlo.settings.setting_manager import SettingManager
|
|
75
|
-
self.settings = SettingManager()
|
|
76
|
-
# 应用爬虫的自定义设置
|
|
77
|
-
if hasattr(spider, 'custom_settings'):
|
|
78
|
-
for key, value in spider.custom_settings.items():
|
|
79
|
-
self.settings.set(key, value)
|
|
80
|
-
|
|
81
|
-
crawler = MockCrawler()
|
|
82
|
-
|
|
83
|
-
# 创建爬虫实例并绑定crawler
|
|
84
|
-
spider_instance = spider.create_instance(crawler)
|
|
85
|
-
|
|
86
|
-
# 执行初始请求
|
|
87
|
-
requests = list(spider_instance.start_requests())
|
|
88
|
-
print(f"生成了 {len(requests)} 个请求")
|
|
89
|
-
|
|
90
|
-
# 使用Playwright下载器处理请求
|
|
91
|
-
try:
|
|
92
|
-
from crawlo.downloader import PlaywrightDownloader
|
|
93
|
-
downloader = PlaywrightDownloader(crawler)
|
|
94
|
-
await downloader.download(requests[0]) # 测试第一个请求
|
|
95
|
-
print("Playwright下载器测试成功!")
|
|
96
|
-
except Exception as e:
|
|
97
|
-
print(f"Playwright下载器测试失败: {e}")
|
|
98
|
-
import traceback
|
|
99
|
-
traceback.print_exc()
|
|
100
|
-
|
|
101
|
-
print("\n测试完成!")
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
async def main():
|
|
105
|
-
"""主函数"""
|
|
106
|
-
await run_spider()
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
if __name__ == "__main__":
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
实际测试动态下载器(Playwright)通过代理访问网站
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
from crawlo.spider import Spider
|
|
9
|
+
from crawlo.network.request import Request
|
|
10
|
+
from crawlo.tools import AuthenticatedProxy
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ProxyTestSpider(Spider):
|
|
14
|
+
"""代理测试爬虫"""
|
|
15
|
+
name = "proxy_test_spider" # 添加name属性
|
|
16
|
+
|
|
17
|
+
# 自定义配置
|
|
18
|
+
custom_settings = {
|
|
19
|
+
"DOWNLOADER_TYPE": "playwright",
|
|
20
|
+
"PLAYWRIGHT_BROWSER_TYPE": "chromium",
|
|
21
|
+
"PLAYWRIGHT_HEADLESS": True,
|
|
22
|
+
# 配置带认证的代理
|
|
23
|
+
"PLAYWRIGHT_PROXY": {
|
|
24
|
+
"server": "http://182.201.243.186:58111",
|
|
25
|
+
"username": "dwe20241014",
|
|
26
|
+
"password": "Dwe0101014"
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
def __init__(self, **kwargs):
|
|
31
|
+
super().__init__(**kwargs)
|
|
32
|
+
print("代理测试爬虫初始化完成")
|
|
33
|
+
print(f"代理配置: {self.custom_settings.get('PLAYWRIGHT_PROXY')}")
|
|
34
|
+
|
|
35
|
+
def start_requests(self):
|
|
36
|
+
"""开始请求"""
|
|
37
|
+
urls = [
|
|
38
|
+
"https://httpbin.org/ip", # 查看IP地址
|
|
39
|
+
"https://httpbin.org/headers", # 查看请求头
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
for url in urls:
|
|
43
|
+
request = Request(url, callback=self.parse)
|
|
44
|
+
yield request
|
|
45
|
+
|
|
46
|
+
def parse(self, response):
|
|
47
|
+
"""解析响应"""
|
|
48
|
+
print(f"\n=== 响应信息 ===")
|
|
49
|
+
print(f"URL: {response.url}")
|
|
50
|
+
print(f"状态码: {response.status_code}")
|
|
51
|
+
print(f"响应内容: {response.text[:500]}")
|
|
52
|
+
|
|
53
|
+
# 保存响应内容
|
|
54
|
+
filename = response.url.split("/")[-1].replace("?", "_").replace("&", "_")
|
|
55
|
+
with open(f"proxy_test_{filename}.html", "w", encoding="utf-8") as f:
|
|
56
|
+
f.write(response.text)
|
|
57
|
+
print(f"响应已保存到 proxy_test_{filename}.html")
|
|
58
|
+
|
|
59
|
+
# 返回一个简单的item
|
|
60
|
+
yield {"url": response.url, "status": response.status_code}
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# 异步运行函数
|
|
64
|
+
async def run_spider():
|
|
65
|
+
"""运行爬虫"""
|
|
66
|
+
print("开始测试动态下载器通过代理访问网站...")
|
|
67
|
+
|
|
68
|
+
# 创建爬虫实例
|
|
69
|
+
spider = ProxyTestSpider()
|
|
70
|
+
|
|
71
|
+
# 创建一个简单的crawler模拟器
|
|
72
|
+
class MockCrawler:
|
|
73
|
+
def __init__(self):
|
|
74
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
75
|
+
self.settings = SettingManager()
|
|
76
|
+
# 应用爬虫的自定义设置
|
|
77
|
+
if hasattr(spider, 'custom_settings'):
|
|
78
|
+
for key, value in spider.custom_settings.items():
|
|
79
|
+
self.settings.set(key, value)
|
|
80
|
+
|
|
81
|
+
crawler = MockCrawler()
|
|
82
|
+
|
|
83
|
+
# 创建爬虫实例并绑定crawler
|
|
84
|
+
spider_instance = spider.create_instance(crawler)
|
|
85
|
+
|
|
86
|
+
# 执行初始请求
|
|
87
|
+
requests = list(spider_instance.start_requests())
|
|
88
|
+
print(f"生成了 {len(requests)} 个请求")
|
|
89
|
+
|
|
90
|
+
# 使用Playwright下载器处理请求
|
|
91
|
+
try:
|
|
92
|
+
from crawlo.downloader import PlaywrightDownloader
|
|
93
|
+
downloader = PlaywrightDownloader(crawler)
|
|
94
|
+
await downloader.download(requests[0]) # 测试第一个请求
|
|
95
|
+
print("Playwright下载器测试成功!")
|
|
96
|
+
except Exception as e:
|
|
97
|
+
print(f"Playwright下载器测试失败: {e}")
|
|
98
|
+
import traceback
|
|
99
|
+
traceback.print_exc()
|
|
100
|
+
|
|
101
|
+
print("\n测试完成!")
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
async def main():
|
|
105
|
+
"""主函数"""
|
|
106
|
+
await run_spider()
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
if __name__ == "__main__":
|
|
110
110
|
asyncio.run(main())
|