crawlo 1.4.4__py3-none-any.whl → 1.4.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +11 -15
- crawlo/__version__.py +1 -1
- crawlo/commands/startproject.py +24 -0
- crawlo/core/engine.py +2 -2
- crawlo/core/scheduler.py +4 -4
- crawlo/crawler.py +8 -7
- crawlo/downloader/__init__.py +5 -2
- crawlo/extension/__init__.py +2 -2
- crawlo/filters/aioredis_filter.py +8 -1
- crawlo/filters/memory_filter.py +8 -1
- crawlo/initialization/built_in.py +13 -4
- crawlo/initialization/core.py +5 -4
- crawlo/interfaces.py +24 -0
- crawlo/middleware/__init__.py +7 -4
- crawlo/middleware/middleware_manager.py +15 -8
- crawlo/mode_manager.py +45 -11
- crawlo/network/response.py +374 -69
- crawlo/pipelines/mysql_pipeline.py +6 -6
- crawlo/pipelines/pipeline_manager.py +2 -2
- crawlo/project.py +2 -4
- crawlo/settings/default_settings.py +4 -0
- crawlo/task_manager.py +2 -2
- crawlo/templates/project/items.py.tmpl +2 -2
- crawlo/templates/project/middlewares.py.tmpl +9 -89
- crawlo/templates/project/pipelines.py.tmpl +8 -68
- crawlo/tools/__init__.py +0 -11
- crawlo/utils/__init__.py +17 -1
- crawlo/utils/db_helper.py +220 -319
- crawlo/utils/error_handler.py +313 -67
- crawlo/utils/fingerprint.py +3 -4
- crawlo/utils/misc.py +82 -0
- crawlo/utils/request.py +55 -66
- crawlo/utils/selector_helper.py +138 -0
- crawlo/utils/spider_loader.py +185 -45
- crawlo/utils/text_helper.py +95 -0
- crawlo-1.4.5.dist-info/METADATA +329 -0
- {crawlo-1.4.4.dist-info → crawlo-1.4.5.dist-info}/RECORD +76 -49
- tests/bug_check_test.py +251 -0
- tests/direct_selector_helper_test.py +97 -0
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -0
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -0
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -0
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +85 -0
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -0
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +162 -0
- tests/ofweek_scrapy/scrapy.cfg +11 -0
- tests/performance_comparison.py +4 -5
- tests/simple_crawlo_test.py +1 -2
- tests/simple_follow_test.py +39 -0
- tests/simple_response_selector_test.py +95 -0
- tests/simple_selector_helper_test.py +155 -0
- tests/simple_selector_test.py +208 -0
- tests/simple_url_test.py +74 -0
- tests/test_crawler_process_import.py +39 -0
- tests/test_crawler_process_spider_modules.py +48 -0
- tests/test_edge_cases.py +7 -5
- tests/test_encoding_core.py +57 -0
- tests/test_encoding_detection.py +127 -0
- tests/test_factory_compatibility.py +197 -0
- tests/test_optimized_selector_naming.py +101 -0
- tests/test_priority_behavior.py +18 -18
- tests/test_response_follow.py +105 -0
- tests/test_response_selector_methods.py +93 -0
- tests/test_response_url_methods.py +71 -0
- tests/test_response_urljoin.py +87 -0
- tests/test_scrapy_style_encoding.py +113 -0
- tests/test_selector_helper.py +101 -0
- tests/test_selector_optimizations.py +147 -0
- tests/test_spider_loader.py +50 -0
- tests/test_spider_loader_comprehensive.py +70 -0
- tests/test_spiders/__init__.py +1 -0
- tests/test_spiders/test_spider.py +10 -0
- crawlo/tools/anti_crawler.py +0 -269
- crawlo/utils/class_loader.py +0 -26
- crawlo/utils/enhanced_error_handler.py +0 -357
- crawlo-1.4.4.dist-info/METADATA +0 -190
- tests/simple_log_test.py +0 -58
- tests/simple_test.py +0 -48
- tests/test_framework_logger.py +0 -67
- tests/test_framework_startup.py +0 -65
- tests/test_mode_change.py +0 -73
- {crawlo-1.4.4.dist-info → crawlo-1.4.5.dist-info}/WHEEL +0 -0
- {crawlo-1.4.4.dist-info → crawlo-1.4.5.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.4.dist-info → crawlo-1.4.5.dist-info}/top_level.txt +0 -0
- /tests/{final_command_test_report.md → ofweek_scrapy/ofweek_scrapy/__init__.py} +0 -0
crawlo/task_manager.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/python
|
|
2
2
|
# -*- coding:UTF-8 -*-
|
|
3
|
+
import time
|
|
3
4
|
import asyncio
|
|
4
|
-
from asyncio import Task, Future, Semaphore
|
|
5
5
|
from typing import Set, Final
|
|
6
6
|
from collections import deque
|
|
7
|
-
import
|
|
7
|
+
from asyncio import Task, Future, Semaphore
|
|
8
8
|
from crawlo.utils.log import get_logger
|
|
9
9
|
|
|
10
10
|
|
|
@@ -2,118 +2,38 @@
|
|
|
2
2
|
"""
|
|
3
3
|
{{project_name}}.middlewares
|
|
4
4
|
============================
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
这是一个简单的示例中间件,您可以根据需要添加更多中间件。
|
|
5
|
+
自定义中间件示例
|
|
8
6
|
"""
|
|
9
7
|
|
|
10
|
-
import
|
|
11
|
-
from crawlo import Request, Response
|
|
8
|
+
from crawlo.network import Request, Response
|
|
12
9
|
from crawlo.utils.log import get_logger
|
|
13
10
|
|
|
14
11
|
|
|
15
|
-
class
|
|
12
|
+
class {{project_name|title}}Middleware:
|
|
16
13
|
"""
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
此中间件会:
|
|
20
|
-
1. 为请求添加随机 User-Agent
|
|
21
|
-
2. 记录请求和响应信息
|
|
22
|
-
3. 处理异常情况
|
|
14
|
+
{{project_name}} 项目的中间件
|
|
23
15
|
"""
|
|
24
16
|
|
|
25
17
|
def __init__(self):
|
|
26
18
|
self.logger = get_logger(self.__class__.__name__)
|
|
27
|
-
self.user_agents = [
|
|
28
|
-
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
|
|
29
|
-
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
|
|
30
|
-
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:135.0) Gecko/20100101 Firefox/135.0',
|
|
31
|
-
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/135.0',
|
|
32
|
-
]
|
|
33
19
|
|
|
34
20
|
def process_request(self, request, spider):
|
|
35
21
|
"""
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
Args:
|
|
39
|
-
request: 请求对象
|
|
40
|
-
spider: 爬虫实例
|
|
41
|
-
|
|
42
|
-
Returns:
|
|
43
|
-
None: 继续处理请求
|
|
44
|
-
Response: 返回响应对象(短路处理)
|
|
45
|
-
Request: 返回新请求对象(替换原请求)
|
|
22
|
+
在请求被下载器执行前调用
|
|
46
23
|
"""
|
|
47
|
-
|
|
48
|
-
if 'User-Agent' not in request.headers:
|
|
49
|
-
ua = random.choice(self.user_agents)
|
|
50
|
-
request.headers['User-Agent'] = ua
|
|
51
|
-
self.logger.debug(f"为请求 {request.url} 设置 User-Agent: {ua[:50]}...")
|
|
52
|
-
|
|
24
|
+
self.logger.info(f"处理请求: {request.url}")
|
|
53
25
|
return None
|
|
54
26
|
|
|
55
27
|
def process_response(self, request, response, spider):
|
|
56
28
|
"""
|
|
57
|
-
在响应被 Spider
|
|
58
|
-
|
|
59
|
-
Args:
|
|
60
|
-
request: 原始请求对象
|
|
61
|
-
response: 响应对象
|
|
62
|
-
spider: 爬虫实例
|
|
63
|
-
|
|
64
|
-
Returns:
|
|
65
|
-
Response: 处理后的响应对象
|
|
29
|
+
在响应被 Spider 处理前调用
|
|
66
30
|
"""
|
|
67
|
-
# 记录响应信息
|
|
68
31
|
self.logger.info(f"收到响应: {request.url} - 状态码: {response.status_code}")
|
|
69
|
-
|
|
70
|
-
# 可以在这里处理特殊状态码
|
|
71
|
-
if response.status_code == 403:
|
|
72
|
-
self.logger.warning(f"访问被拒绝: {request.url}")
|
|
73
|
-
|
|
74
32
|
return response
|
|
75
33
|
|
|
76
34
|
def process_exception(self, request, exception, spider):
|
|
77
35
|
"""
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
Args:
|
|
81
|
-
request: 请求对象
|
|
82
|
-
exception: 异常对象
|
|
83
|
-
spider: 爬虫实例
|
|
84
|
-
|
|
85
|
-
Returns:
|
|
86
|
-
None: 异常将继续传播
|
|
87
|
-
Response: 返回响应对象(处理异常)
|
|
88
|
-
Request: 返回新请求对象(重试请求)
|
|
36
|
+
在下载或处理过程中发生异常时调用
|
|
89
37
|
"""
|
|
90
38
|
self.logger.error(f"请求异常: {request.url} - {exception}")
|
|
91
|
-
return None
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
# ======================== 使用说明 ========================
|
|
95
|
-
#
|
|
96
|
-
# 在 settings.py 中启用中间件:
|
|
97
|
-
# MIDDLEWARES = [
|
|
98
|
-
# '{{project_name}}.middlewares.ExampleMiddleware',
|
|
99
|
-
# ]
|
|
100
|
-
#
|
|
101
|
-
# 您可以根据需要添加更多中间件,例如:
|
|
102
|
-
# 1. 请求处理中间件(修改请求头、设置代理等)
|
|
103
|
-
# 2. 响应处理中间件(解析、过滤等)
|
|
104
|
-
# 3. 异常处理中间件(重试、记录等)
|
|
105
|
-
#
|
|
106
|
-
# 每个中间件可以实现以下方法:
|
|
107
|
-
# - process_request: 处理请求
|
|
108
|
-
# - process_response: 处理响应
|
|
109
|
-
# - process_exception: 处理异常
|
|
110
|
-
#
|
|
111
|
-
# 注意:Crawlo框架提供了许多内置中间件,您可以直接使用:
|
|
112
|
-
# - DownloadDelayMiddleware: 控制请求延迟
|
|
113
|
-
# - ResponseCodeMiddleware: 处理HTTP状态码并记录统计信息
|
|
114
|
-
# - ResponseFilterMiddleware: 过滤特定状态码的响应
|
|
115
|
-
# - DefaultHeaderMiddleware: 添加默认请求头
|
|
116
|
-
# - ProxyMiddleware: 设置代理
|
|
117
|
-
# - RetryMiddleware: 处理重试逻辑
|
|
118
|
-
# - OffsiteMiddleware: 过滤站外请求
|
|
119
|
-
# ======================== 使用说明 ========================
|
|
39
|
+
return None
|
|
@@ -2,96 +2,36 @@
|
|
|
2
2
|
"""
|
|
3
3
|
{{project_name}}.pipelines
|
|
4
4
|
==========================
|
|
5
|
-
|
|
6
|
-
例如:清理、验证、去重、保存到数据库等。
|
|
7
|
-
|
|
8
|
-
这是一个简单的示例管道,您可以根据需要添加更多管道。
|
|
5
|
+
数据管道示例
|
|
9
6
|
"""
|
|
10
7
|
|
|
11
|
-
from datetime import datetime
|
|
12
8
|
from crawlo.exceptions import DropItem
|
|
13
9
|
from crawlo.utils.log import get_logger
|
|
14
10
|
|
|
15
11
|
|
|
16
|
-
class
|
|
12
|
+
class {{project_name|title}}Pipeline:
|
|
17
13
|
"""
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
此管道会:
|
|
21
|
-
1. 验证必要字段
|
|
22
|
-
2. 清理数据
|
|
23
|
-
3. 添加时间戳
|
|
24
|
-
4. 记录处理日志
|
|
14
|
+
{{project_name}} 项目的数据管道
|
|
25
15
|
"""
|
|
26
16
|
|
|
27
17
|
def __init__(self):
|
|
28
18
|
self.logger = get_logger(self.__class__.__name__)
|
|
29
|
-
self.item_count = 0
|
|
30
19
|
|
|
31
20
|
def process_item(self, item, spider):
|
|
32
21
|
"""
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
Args:
|
|
36
|
-
item: 要处理的数据项
|
|
37
|
-
spider: 爬虫实例
|
|
38
|
-
|
|
39
|
-
Returns:
|
|
40
|
-
处理后的数据项
|
|
41
|
-
|
|
42
|
-
Raises:
|
|
43
|
-
DropItem: 如果数据项无效则抛出此异常
|
|
22
|
+
处理数据项
|
|
44
23
|
"""
|
|
45
|
-
|
|
46
|
-
if not item.get('title') or not item.get('url'):
|
|
47
|
-
raise DropItem("缺少必要字段: title 或 url")
|
|
48
|
-
|
|
49
|
-
# 数据清理
|
|
50
|
-
item['title'] = str(item['title']).strip()
|
|
51
|
-
|
|
52
|
-
# 添加处理时间戳
|
|
53
|
-
item['processed_at'] = datetime.now().isoformat()
|
|
54
|
-
|
|
55
|
-
# 计数器
|
|
56
|
-
self.item_count += 1
|
|
57
|
-
|
|
58
|
-
# 记录日志
|
|
59
|
-
self.logger.info(f"处理第 {self.item_count} 个数据项: {item['title']}")
|
|
60
|
-
|
|
24
|
+
self.logger.info(f"处理数据项: {item}")
|
|
61
25
|
return item
|
|
62
26
|
|
|
63
27
|
def open_spider(self, spider):
|
|
64
28
|
"""
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
Args:
|
|
68
|
-
spider: 爬虫实例
|
|
29
|
+
爬虫启动时调用
|
|
69
30
|
"""
|
|
70
31
|
self.logger.info(f"管道已启动,准备处理爬虫 '{spider.name}' 的数据")
|
|
71
32
|
|
|
72
33
|
def close_spider(self, spider):
|
|
73
34
|
"""
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
Args:
|
|
77
|
-
spider: 爬虫实例
|
|
35
|
+
爬虫关闭时调用
|
|
78
36
|
"""
|
|
79
|
-
self.logger.info(
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
# ======================== 使用说明 ========================
|
|
83
|
-
#
|
|
84
|
-
# 在 settings.py 中启用管道:
|
|
85
|
-
# PIPELINES = [
|
|
86
|
-
# '{{project_name}}.pipelines.ExamplePipeline',
|
|
87
|
-
# ]
|
|
88
|
-
#
|
|
89
|
-
# 您可以根据需要添加更多管道,例如:
|
|
90
|
-
# 1. 数据验证管道
|
|
91
|
-
# 2. 去重管道
|
|
92
|
-
# 3. 数据存储管道(数据库、文件等)
|
|
93
|
-
# 4. 数据转换管道
|
|
94
|
-
#
|
|
95
|
-
# 每个管道都应该实现 process_item 方法,
|
|
96
|
-
# 可选实现 open_spider 和 close_spider 方法。
|
|
97
|
-
# ======================== 使用说明 ========================
|
|
37
|
+
self.logger.info("管道已关闭")
|
crawlo/tools/__init__.py
CHANGED
|
@@ -79,17 +79,6 @@ from .retry_mechanism import (
|
|
|
79
79
|
exponential_backoff
|
|
80
80
|
)
|
|
81
81
|
|
|
82
|
-
# 反爬虫应对工具
|
|
83
|
-
from .anti_crawler import (
|
|
84
|
-
ProxyPoolManager,
|
|
85
|
-
CaptchaHandler,
|
|
86
|
-
AntiCrawler,
|
|
87
|
-
get_random_user_agent,
|
|
88
|
-
rotate_proxy,
|
|
89
|
-
handle_captcha,
|
|
90
|
-
detect_rate_limiting
|
|
91
|
-
)
|
|
92
|
-
|
|
93
82
|
# 带认证代理工具
|
|
94
83
|
from .authenticated_proxy import (
|
|
95
84
|
AuthenticatedProxy,
|
crawlo/utils/__init__.py
CHANGED
|
@@ -4,6 +4,9 @@
|
|
|
4
4
|
# @Time : 2025-02-05 13:57
|
|
5
5
|
# @Author : oscar
|
|
6
6
|
# @Desc : 工具模块集合
|
|
7
|
+
|
|
8
|
+
提供用于处理parsel选择器的辅助函数,用于提取文本和属性等操作。
|
|
9
|
+
所有方法都采用了简洁直观的命名风格,便于记忆和使用。
|
|
7
10
|
"""
|
|
8
11
|
|
|
9
12
|
from ..tools.date_tools import (
|
|
@@ -20,6 +23,14 @@ from ..tools.date_tools import (
|
|
|
20
23
|
from_timestamp_with_tz
|
|
21
24
|
)
|
|
22
25
|
|
|
26
|
+
from .selector_helper import (
|
|
27
|
+
extract_text,
|
|
28
|
+
extract_texts,
|
|
29
|
+
extract_attr,
|
|
30
|
+
extract_attrs,
|
|
31
|
+
is_xpath
|
|
32
|
+
)
|
|
33
|
+
|
|
23
34
|
__all__ = [
|
|
24
35
|
"TimeUtils",
|
|
25
36
|
"parse_time",
|
|
@@ -31,5 +42,10 @@ __all__ = [
|
|
|
31
42
|
"to_timezone",
|
|
32
43
|
"to_utc",
|
|
33
44
|
"to_local",
|
|
34
|
-
"from_timestamp_with_tz"
|
|
45
|
+
"from_timestamp_with_tz",
|
|
46
|
+
"extract_text",
|
|
47
|
+
"extract_texts",
|
|
48
|
+
"extract_attr",
|
|
49
|
+
"extract_attrs",
|
|
50
|
+
"is_xpath"
|
|
35
51
|
]
|