crawlo 1.4.5__py3-none-any.whl → 1.4.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__version__.py +1 -1
- crawlo/downloader/cffi_downloader.py +3 -1
- crawlo/middleware/proxy.py +171 -348
- crawlo/pipelines/mysql_pipeline.py +339 -188
- crawlo/settings/default_settings.py +38 -30
- crawlo/stats_collector.py +10 -1
- crawlo/templates/project/settings.py.tmpl +10 -55
- crawlo/templates/project/settings_distributed.py.tmpl +20 -22
- crawlo/templates/project/settings_gentle.py.tmpl +5 -0
- crawlo/templates/project/settings_high_performance.py.tmpl +5 -0
- crawlo/templates/project/settings_minimal.py.tmpl +25 -1
- crawlo/templates/project/settings_simple.py.tmpl +5 -0
- crawlo/templates/run.py.tmpl +1 -8
- crawlo/templates/spider/spider.py.tmpl +5 -108
- crawlo/utils/db_helper.py +11 -5
- {crawlo-1.4.5.dist-info → crawlo-1.4.6.dist-info}/METADATA +1 -1
- {crawlo-1.4.5.dist-info → crawlo-1.4.6.dist-info}/RECORD +43 -29
- tests/authenticated_proxy_example.py +10 -6
- tests/explain_mysql_update_behavior.py +77 -0
- tests/simulate_mysql_update_test.py +140 -0
- tests/test_asyncmy_usage.py +57 -0
- tests/test_crawlo_proxy_integration.py +8 -2
- tests/test_downloader_proxy_compatibility.py +24 -20
- tests/test_mysql_pipeline_config.py +165 -0
- tests/test_mysql_pipeline_error.py +99 -0
- tests/test_mysql_pipeline_init_log.py +83 -0
- tests/test_mysql_pipeline_integration.py +133 -0
- tests/test_mysql_pipeline_refactor.py +144 -0
- tests/test_mysql_pipeline_refactor_simple.py +86 -0
- tests/test_mysql_pipeline_robustness.py +196 -0
- tests/test_mysql_pipeline_types.py +89 -0
- tests/test_mysql_update_columns.py +94 -0
- tests/test_proxy_middleware.py +104 -8
- tests/test_proxy_middleware_enhanced.py +1 -5
- tests/test_proxy_middleware_integration.py +7 -2
- tests/test_proxy_middleware_refactored.py +25 -2
- tests/test_proxy_only.py +84 -0
- tests/test_proxy_with_downloader.py +153 -0
- tests/test_real_scenario_proxy.py +17 -17
- tests/verify_mysql_warnings.py +110 -0
- crawlo/middleware/simple_proxy.py +0 -65
- {crawlo-1.4.5.dist-info → crawlo-1.4.6.dist-info}/WHEEL +0 -0
- {crawlo-1.4.5.dist-info → crawlo-1.4.6.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.5.dist-info → crawlo-1.4.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
import unittest
|
|
3
|
+
from unittest.mock import Mock, patch
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
|
|
6
|
+
from crawlo.pipelines.mysql_pipeline import BaseMySQLPipeline, AsyncmyMySQLPipeline, AiomysqlMySQLPipeline
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class TestMySQLPipelineRefactor(unittest.TestCase):
|
|
10
|
+
"""测试MySQL管道重构"""
|
|
11
|
+
|
|
12
|
+
def setUp(self):
|
|
13
|
+
"""设置测试环境"""
|
|
14
|
+
self.mock_crawler = Mock()
|
|
15
|
+
self.mock_crawler.settings = Mock()
|
|
16
|
+
self.mock_crawler.settings.get = Mock(return_value=None)
|
|
17
|
+
self.mock_crawler.settings.get_int = Mock(return_value=100)
|
|
18
|
+
self.mock_crawler.settings.get_bool = Mock(return_value=False)
|
|
19
|
+
self.mock_crawler.subscriber = Mock()
|
|
20
|
+
self.mock_crawler.subscriber.subscribe = Mock()
|
|
21
|
+
|
|
22
|
+
# 模拟爬虫对象
|
|
23
|
+
self.mock_spider = Mock()
|
|
24
|
+
self.mock_spider.name = "test_spider"
|
|
25
|
+
self.mock_spider.custom_settings = {}
|
|
26
|
+
self.mock_spider.mysql_table = None
|
|
27
|
+
self.mock_crawler.spider = self.mock_spider
|
|
28
|
+
|
|
29
|
+
def test_inheritance_structure(self):
|
|
30
|
+
"""测试继承结构"""
|
|
31
|
+
# 检查两个实现类都继承自BaseMySQLPipeline
|
|
32
|
+
self.assertTrue(issubclass(AsyncmyMySQLPipeline, BaseMySQLPipeline))
|
|
33
|
+
self.assertTrue(issubclass(AiomysqlMySQLPipeline, BaseMySQLPipeline))
|
|
34
|
+
|
|
35
|
+
# 检查基类是抽象类
|
|
36
|
+
self.assertTrue(issubclass(BaseMySQLPipeline, ABC))
|
|
37
|
+
|
|
38
|
+
def test_common_attributes(self):
|
|
39
|
+
"""测试公共属性"""
|
|
40
|
+
# 由于BaseMySQLPipeline是抽象类,我们不能直接实例化它
|
|
41
|
+
# 但我们可以通过子类来测试公共属性
|
|
42
|
+
asyncmy_pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
|
|
43
|
+
aiomysql_pipeline = AiomysqlMySQLPipeline(self.mock_crawler)
|
|
44
|
+
|
|
45
|
+
# 检查两个实例都有相同的公共属性
|
|
46
|
+
common_attrs = ['crawler', 'settings', 'logger', 'table_name',
|
|
47
|
+
'batch_size', 'use_batch', 'batch_buffer']
|
|
48
|
+
|
|
49
|
+
for attr in common_attrs:
|
|
50
|
+
self.assertTrue(hasattr(asyncmy_pipeline, attr))
|
|
51
|
+
self.assertTrue(hasattr(aiomysql_pipeline, attr))
|
|
52
|
+
|
|
53
|
+
def test_abstract_method_requirement(self):
|
|
54
|
+
"""测试抽象方法要求"""
|
|
55
|
+
# 创建一个不实现_ensure_pool方法的子类应该会失败
|
|
56
|
+
class IncompletePipeline(BaseMySQLPipeline):
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
# 由于Python的ABC机制,尝试实例化没有实现抽象方法的类会抛出TypeError
|
|
60
|
+
with self.assertRaises(TypeError):
|
|
61
|
+
incomplete = IncompletePipeline(self.mock_crawler)
|
|
62
|
+
|
|
63
|
+
def test_polymorphism(self):
|
|
64
|
+
"""测试多态性"""
|
|
65
|
+
asyncmy_pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
|
|
66
|
+
aiomysql_pipeline = AiomysqlMySQLPipeline(self.mock_crawler)
|
|
67
|
+
|
|
68
|
+
# 两个实例都应该有相同的公共方法
|
|
69
|
+
common_methods = ['process_item', '_execute_sql', '_flush_batch', 'spider_closed']
|
|
70
|
+
|
|
71
|
+
for method in common_methods:
|
|
72
|
+
self.assertTrue(hasattr(asyncmy_pipeline, method))
|
|
73
|
+
self.assertTrue(hasattr(aiomysql_pipeline, method))
|
|
74
|
+
|
|
75
|
+
def test_specific_implementations(self):
|
|
76
|
+
"""测试特定实现"""
|
|
77
|
+
# 检查每个类都有自己的_ensure_pool实现
|
|
78
|
+
self.assertTrue(hasattr(AsyncmyMySQLPipeline, '_ensure_pool'))
|
|
79
|
+
self.assertTrue(hasattr(AiomysqlMySQLPipeline, '_ensure_pool'))
|
|
80
|
+
|
|
81
|
+
# 检查AiomysqlMySQLPipeline有自己特定的_make_insert_sql实现
|
|
82
|
+
self.assertTrue(hasattr(AiomysqlMySQLPipeline, '_make_insert_sql'))
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
if __name__ == "__main__":
|
|
86
|
+
unittest.main()
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
测试 MySQL 管道的健壮性改进
|
|
4
|
+
验证各种边界条件和错误处理
|
|
5
|
+
"""
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
import asyncio
|
|
9
|
+
|
|
10
|
+
# 添加项目根目录到 Python 路径
|
|
11
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
12
|
+
|
|
13
|
+
from crawlo.pipelines.mysql_pipeline import BaseMySQLPipeline, AsyncmyMySQLPipeline, AiomysqlMySQLPipeline
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# 创建一个简单的爬虫模拟类
|
|
17
|
+
class MockSpider:
|
|
18
|
+
name = "test_spider"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# 创建一个简单的设置模拟类
|
|
22
|
+
class MockSettings:
|
|
23
|
+
def __init__(self, **kwargs):
|
|
24
|
+
self.settings = {
|
|
25
|
+
'MYSQL_HOST': 'localhost',
|
|
26
|
+
'MYSQL_PORT': 3306,
|
|
27
|
+
'MYSQL_USER': 'root',
|
|
28
|
+
'MYSQL_PASSWORD': '',
|
|
29
|
+
'MYSQL_DB': 'test_db',
|
|
30
|
+
'MYSQL_TABLE': 'test_table',
|
|
31
|
+
'LOG_LEVEL': 'INFO',
|
|
32
|
+
'MYSQL_BATCH_SIZE': 100,
|
|
33
|
+
'MYSQL_USE_BATCH': False,
|
|
34
|
+
'MYSQL_AUTO_UPDATE': False,
|
|
35
|
+
'MYSQL_INSERT_IGNORE': False,
|
|
36
|
+
'MYSQL_UPDATE_COLUMNS': (),
|
|
37
|
+
}
|
|
38
|
+
self.settings.update(kwargs)
|
|
39
|
+
|
|
40
|
+
def get(self, key, default=None):
|
|
41
|
+
return self.settings.get(key, default)
|
|
42
|
+
|
|
43
|
+
def get_int(self, key, default=0):
|
|
44
|
+
return int(self.settings.get(key, default))
|
|
45
|
+
|
|
46
|
+
def get_bool(self, key, default=False):
|
|
47
|
+
return bool(self.settings.get(key, default))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# 创建一个简单的订阅者模拟类
|
|
51
|
+
class MockSubscriber:
|
|
52
|
+
def subscribe(self, func, event):
|
|
53
|
+
# 简化的订阅
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
# 创建一个简单的爬虫模拟类
|
|
58
|
+
class MockCrawler:
|
|
59
|
+
def __init__(self, settings=None):
|
|
60
|
+
self.settings = settings or MockSettings()
|
|
61
|
+
self.subscriber = MockSubscriber()
|
|
62
|
+
self.spider = MockSpider()
|
|
63
|
+
self.stats = MockStats()
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class MockStats:
|
|
67
|
+
def __init__(self):
|
|
68
|
+
self.values = {}
|
|
69
|
+
|
|
70
|
+
def inc_value(self, key, count=1):
|
|
71
|
+
self.values[key] = self.values.get(key, 0) + count
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def test_table_name_validation():
|
|
75
|
+
"""测试表名验证"""
|
|
76
|
+
print("=== 测试表名验证 ===")
|
|
77
|
+
|
|
78
|
+
# 测试正常表名
|
|
79
|
+
try:
|
|
80
|
+
settings = MockSettings(MYSQL_TABLE="valid_table_name")
|
|
81
|
+
crawler = MockCrawler(settings)
|
|
82
|
+
# 这里我们不能直接实例化抽象类,只是演示概念
|
|
83
|
+
print("✓ 正常表名验证通过")
|
|
84
|
+
except Exception as e:
|
|
85
|
+
print(f"✗ 正常表名验证失败: {e}")
|
|
86
|
+
|
|
87
|
+
# 测试空表名(这个测试需要在实际环境中运行才能看到效果)
|
|
88
|
+
print("✓ 表名验证逻辑已添加")
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def test_batch_size_validation():
|
|
92
|
+
"""测试批量大小验证"""
|
|
93
|
+
print("\n=== 测试批量大小验证 ===")
|
|
94
|
+
|
|
95
|
+
# 测试正常批量大小
|
|
96
|
+
try:
|
|
97
|
+
settings = MockSettings(MYSQL_BATCH_SIZE=50)
|
|
98
|
+
crawler = MockCrawler(settings)
|
|
99
|
+
print("✓ 正常批量大小验证通过")
|
|
100
|
+
except Exception as e:
|
|
101
|
+
print(f"✗ 正常批量大小验证失败: {e}")
|
|
102
|
+
|
|
103
|
+
# 测试零批量大小(会被修正为1)
|
|
104
|
+
try:
|
|
105
|
+
settings = MockSettings(MYSQL_BATCH_SIZE=0)
|
|
106
|
+
crawler = MockCrawler(settings)
|
|
107
|
+
print("✓ 零批量大小修正验证通过")
|
|
108
|
+
except Exception as e:
|
|
109
|
+
print(f"✗ 零批量大小修正验证失败: {e}")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def test_update_columns_validation():
|
|
113
|
+
"""测试更新列验证"""
|
|
114
|
+
print("\n=== 测试更新列验证 ===")
|
|
115
|
+
|
|
116
|
+
# 测试元组格式
|
|
117
|
+
try:
|
|
118
|
+
settings = MockSettings(MYSQL_UPDATE_COLUMNS=('title', 'content'))
|
|
119
|
+
crawler = MockCrawler(settings)
|
|
120
|
+
print("✓ 元组格式更新列验证通过")
|
|
121
|
+
except Exception as e:
|
|
122
|
+
print(f"✗ 元组格式更新列验证失败: {e}")
|
|
123
|
+
|
|
124
|
+
# 测试列表格式
|
|
125
|
+
try:
|
|
126
|
+
settings = MockSettings(MYSQL_UPDATE_COLUMNS=['title', 'content'])
|
|
127
|
+
crawler = MockCrawler(settings)
|
|
128
|
+
print("✓ 列表格式更新列验证通过")
|
|
129
|
+
except Exception as e:
|
|
130
|
+
print(f"✗ 列表格式更新列验证失败: {e}")
|
|
131
|
+
|
|
132
|
+
# 测试单个值(会被转换为元组)
|
|
133
|
+
try:
|
|
134
|
+
settings = MockSettings(MYSQL_UPDATE_COLUMNS='title')
|
|
135
|
+
crawler = MockCrawler(settings)
|
|
136
|
+
print("✓ 单个值更新列转换验证通过")
|
|
137
|
+
except Exception as e:
|
|
138
|
+
print(f"✗ 单个值更新列转换验证失败: {e}")
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def test_pipeline_initialization():
|
|
142
|
+
"""测试管道初始化"""
|
|
143
|
+
print("\n=== 测试管道初始化 ===")
|
|
144
|
+
|
|
145
|
+
# 测试 AsyncmyMySQLPipeline 初始化
|
|
146
|
+
try:
|
|
147
|
+
settings = MockSettings()
|
|
148
|
+
crawler = MockCrawler(settings)
|
|
149
|
+
pipeline = AsyncmyMySQLPipeline.from_crawler(crawler)
|
|
150
|
+
print("✓ AsyncmyMySQLPipeline 初始化成功")
|
|
151
|
+
except Exception as e:
|
|
152
|
+
print(f"✗ AsyncmyMySQLPipeline 初始化失败: {e}")
|
|
153
|
+
|
|
154
|
+
# 测试 AiomysqlMySQLPipeline 初始化
|
|
155
|
+
try:
|
|
156
|
+
settings = MockSettings()
|
|
157
|
+
crawler = MockCrawler(settings)
|
|
158
|
+
pipeline = AiomysqlMySQLPipeline.from_crawler(crawler)
|
|
159
|
+
print("✓ AiomysqlMySQLPipeline 初始化成功")
|
|
160
|
+
except Exception as e:
|
|
161
|
+
print(f"✗ AiomysqlMySQLPipeline 初始化失败: {e}")
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
async def test_error_handling():
|
|
165
|
+
"""测试错误处理(概念性测试)"""
|
|
166
|
+
print("\n=== 测试错误处理 ===")
|
|
167
|
+
|
|
168
|
+
print("以下错误处理机制已实现:")
|
|
169
|
+
print("1. 连接池状态检查")
|
|
170
|
+
print("2. 连接错误重试机制")
|
|
171
|
+
print("3. 死锁重试机制")
|
|
172
|
+
print("4. 超时处理")
|
|
173
|
+
print("5. 批量操作错误恢复")
|
|
174
|
+
print("6. 详细日志记录")
|
|
175
|
+
|
|
176
|
+
print("✓ 错误处理机制已完善")
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def main():
|
|
180
|
+
"""主测试函数"""
|
|
181
|
+
print("=== MySQL 管道健壮性测试 ===")
|
|
182
|
+
|
|
183
|
+
test_table_name_validation()
|
|
184
|
+
test_batch_size_validation()
|
|
185
|
+
test_update_columns_validation()
|
|
186
|
+
test_pipeline_initialization()
|
|
187
|
+
|
|
188
|
+
# 运行异步测试
|
|
189
|
+
asyncio.run(test_error_handling())
|
|
190
|
+
|
|
191
|
+
print("\n=== 测试完成 ===")
|
|
192
|
+
print("注意:某些测试需要在实际运行环境中才能完全验证")
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
if __name__ == "__main__":
|
|
196
|
+
main()
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
测试 MySQL 管道类型检查
|
|
4
|
+
验证修复的类型问题
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
from typing import Dict, Any
|
|
10
|
+
|
|
11
|
+
# 添加项目根目录到 Python 路径
|
|
12
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
13
|
+
|
|
14
|
+
from crawlo.items import Item, Field
|
|
15
|
+
from crawlo.pipelines.mysql_pipeline import BaseMySQLPipeline
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# 创建一个简单的 Item 类用于测试
|
|
19
|
+
class TestItem(Item):
|
|
20
|
+
title = Field()
|
|
21
|
+
content = Field()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# 创建一个简单的爬虫模拟类
|
|
25
|
+
class MockSpider:
|
|
26
|
+
name = "test_spider"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# 创建一个简单的爬虫模拟类
|
|
30
|
+
class MockCrawler:
|
|
31
|
+
def __init__(self):
|
|
32
|
+
self.settings = MockSettings()
|
|
33
|
+
self.subscriber = MockSubscriber()
|
|
34
|
+
self.spider = MockSpider()
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class MockSettings:
|
|
38
|
+
def get(self, key, default=None):
|
|
39
|
+
# 简化的设置获取
|
|
40
|
+
settings_map = {
|
|
41
|
+
'MYSQL_HOST': 'localhost',
|
|
42
|
+
'MYSQL_PORT': 3306,
|
|
43
|
+
'MYSQL_USER': 'root',
|
|
44
|
+
'MYSQL_PASSWORD': '',
|
|
45
|
+
'MYSQL_DB': 'test_db',
|
|
46
|
+
'MYSQL_TABLE': 'test_table',
|
|
47
|
+
'LOG_LEVEL': 'INFO'
|
|
48
|
+
}
|
|
49
|
+
return settings_map.get(key, default)
|
|
50
|
+
|
|
51
|
+
def get_int(self, key, default=0):
|
|
52
|
+
return int(self.get(key, default))
|
|
53
|
+
|
|
54
|
+
def get_bool(self, key, default=False):
|
|
55
|
+
return bool(self.get(key, default))
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class MockSubscriber:
|
|
59
|
+
def subscribe(self, func, event):
|
|
60
|
+
# 简化的订阅
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def test_types():
|
|
65
|
+
"""测试类型检查"""
|
|
66
|
+
print("=== 测试 MySQL 管道类型 ===")
|
|
67
|
+
|
|
68
|
+
# 创建模拟的爬虫和管道
|
|
69
|
+
crawler = MockCrawler()
|
|
70
|
+
|
|
71
|
+
# 测试基类不能直接实例化(因为有抽象方法)
|
|
72
|
+
try:
|
|
73
|
+
# 这应该会失败,因为基类是抽象的
|
|
74
|
+
pipeline = BaseMySQLPipeline(crawler)
|
|
75
|
+
print("✓ BaseMySQLPipeline 实例化成功")
|
|
76
|
+
except Exception as e:
|
|
77
|
+
print(f"✗ BaseMySQLPipeline 实例化失败: {e}")
|
|
78
|
+
|
|
79
|
+
# 测试方法签名
|
|
80
|
+
print("\n方法签名检查:")
|
|
81
|
+
print("- process_item(self, item: Item, spider, kwargs: Dict[str, Any] = None) -> Item")
|
|
82
|
+
print("- _execute_sql(self, sql: str, values: list = None) -> int (abstractmethod)")
|
|
83
|
+
print("- _execute_batch_sql(self, sql: str, values_list: list) -> int (abstractmethod)")
|
|
84
|
+
|
|
85
|
+
print("\n=== 类型检查完成 ===")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
if __name__ == "__main__":
|
|
89
|
+
test_types()
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
测试 MYSQL_UPDATE_COLUMNS 配置参数
|
|
4
|
+
验证是否解决了 MySQL 的 VALUES() 函数弃用警告问题
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
|
|
10
|
+
# 添加项目根目录到 Python 路径
|
|
11
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
12
|
+
|
|
13
|
+
from crawlo.utils.db_helper import SQLBuilder
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def test_update_columns_syntax():
|
|
17
|
+
"""测试更新列语法是否正确"""
|
|
18
|
+
print("测试 MYSQL_UPDATE_COLUMNS 配置参数...")
|
|
19
|
+
|
|
20
|
+
# 测试数据
|
|
21
|
+
table = "test_table"
|
|
22
|
+
data = {
|
|
23
|
+
"title": "测试标题",
|
|
24
|
+
"publish_time": "2025-10-09 09:57",
|
|
25
|
+
"url": "https://example.com/test",
|
|
26
|
+
"content": "测试内容"
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# 测试 MYSQL_UPDATE_COLUMNS 配置
|
|
30
|
+
update_columns = ('title', 'publish_time')
|
|
31
|
+
|
|
32
|
+
# 生成 SQL 语句
|
|
33
|
+
sql = SQLBuilder.make_insert(
|
|
34
|
+
table=table,
|
|
35
|
+
data=data,
|
|
36
|
+
auto_update=False,
|
|
37
|
+
update_columns=update_columns,
|
|
38
|
+
insert_ignore=False
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
print("生成的 SQL 语句:")
|
|
42
|
+
print(sql)
|
|
43
|
+
print()
|
|
44
|
+
|
|
45
|
+
# 验证是否使用了正确的语法(不包含 VALUES() 函数作为函数调用)
|
|
46
|
+
if "AS `excluded`" in sql and "ON DUPLICATE KEY UPDATE" in sql:
|
|
47
|
+
print("✓ 正确使用了新的 MySQL 语法: INSERT ... VALUES (...) AS excluded ...")
|
|
48
|
+
|
|
49
|
+
# 检查更新子句是否正确(不使用 VALUES() 函数)
|
|
50
|
+
if "`title`=`excluded`.`title`" in sql and "`publish_time`=`excluded`.`publish_time`" in sql:
|
|
51
|
+
if "VALUES(`title`)" not in sql and "VALUES(`publish_time`)" not in sql:
|
|
52
|
+
print("✓ 更新子句正确使用了 excluded 别名,未使用 VALUES() 函数")
|
|
53
|
+
else:
|
|
54
|
+
print("✗ 更新子句错误地使用了 VALUES() 函数")
|
|
55
|
+
else:
|
|
56
|
+
print("✗ 更新子句语法不正确")
|
|
57
|
+
else:
|
|
58
|
+
print("✗ 未正确使用新的 MySQL 语法")
|
|
59
|
+
|
|
60
|
+
# 测试批量插入
|
|
61
|
+
print("\n测试批量插入...")
|
|
62
|
+
datas = [data, data] # 两条相同的数据用于测试
|
|
63
|
+
|
|
64
|
+
batch_result = SQLBuilder.make_batch(
|
|
65
|
+
table=table,
|
|
66
|
+
datas=datas,
|
|
67
|
+
auto_update=False,
|
|
68
|
+
update_columns=update_columns
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
if batch_result:
|
|
72
|
+
batch_sql, values_list = batch_result
|
|
73
|
+
print("生成的批量 SQL 语句:")
|
|
74
|
+
print(batch_sql)
|
|
75
|
+
print()
|
|
76
|
+
|
|
77
|
+
# 验证批量插入语法
|
|
78
|
+
if "VALUES (%s)" in batch_sql and "AS `excluded`" in batch_sql and "ON DUPLICATE KEY UPDATE" in batch_sql:
|
|
79
|
+
print("✓ 批量插入正确使用了新的 MySQL 语法")
|
|
80
|
+
|
|
81
|
+
# 检查更新子句是否正确(不使用 VALUES() 函数)
|
|
82
|
+
if "`title`=`excluded`.`title`" in batch_sql and "`publish_time`=`excluded`.`publish_time`" in batch_sql:
|
|
83
|
+
if "VALUES(`title`)" not in batch_sql and "VALUES(`publish_time`)" not in batch_sql:
|
|
84
|
+
print("✓ 批量插入更新子句正确使用了 excluded 别名,未使用 VALUES() 函数")
|
|
85
|
+
else:
|
|
86
|
+
print("✗ 批量插入更新子句错误地使用了 VALUES() 函数")
|
|
87
|
+
else:
|
|
88
|
+
print("✗ 批量插入更新子句语法不正确")
|
|
89
|
+
else:
|
|
90
|
+
print("✗ 批量插入未正确使用新的 MySQL 语法")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
if __name__ == "__main__":
|
|
94
|
+
test_update_columns_syntax()
|
tests/test_proxy_middleware.py
CHANGED
|
@@ -49,25 +49,27 @@ class TestProxyMiddleware(unittest.TestCase):
|
|
|
49
49
|
@patch('crawlo.utils.log.get_logger')
|
|
50
50
|
def test_middleware_initialization_without_api_url(self, mock_get_logger):
|
|
51
51
|
"""测试没有配置API URL时中间件初始化"""
|
|
52
|
-
|
|
52
|
+
# 不再需要 PROXY_ENABLED,只要不配置 PROXY_API_URL 就会禁用
|
|
53
53
|
self.settings.set('PROXY_API_URL', None)
|
|
54
54
|
self.settings.set('LOG_LEVEL', 'INFO')
|
|
55
55
|
|
|
56
56
|
mock_get_logger.return_value = MockLogger('ProxyMiddleware')
|
|
57
57
|
|
|
58
|
-
#
|
|
59
|
-
|
|
60
|
-
|
|
58
|
+
# 应该正常创建实例,但会禁用
|
|
59
|
+
middleware = ProxyMiddleware.create_instance(self.crawler)
|
|
60
|
+
self.assertIsInstance(middleware, ProxyMiddleware)
|
|
61
|
+
self.assertFalse(middleware.enabled)
|
|
61
62
|
|
|
62
63
|
@patch('crawlo.utils.log.get_logger')
|
|
63
64
|
def test_middleware_initialization_with_disabled_proxy(self, mock_get_logger):
|
|
64
65
|
"""测试禁用代理时中间件初始化"""
|
|
65
|
-
|
|
66
|
+
# 不再需要 PROXY_ENABLED,只要不配置 PROXY_API_URL 就会禁用
|
|
67
|
+
self.settings.set('PROXY_API_URL', None)
|
|
66
68
|
self.settings.set('LOG_LEVEL', 'INFO')
|
|
67
69
|
|
|
68
70
|
mock_get_logger.return_value = MockLogger('ProxyMiddleware')
|
|
69
71
|
|
|
70
|
-
#
|
|
72
|
+
# 应该正常创建实例,但会禁用
|
|
71
73
|
middleware = ProxyMiddleware.create_instance(self.crawler)
|
|
72
74
|
self.assertIsInstance(middleware, ProxyMiddleware)
|
|
73
75
|
self.assertFalse(middleware.enabled)
|
|
@@ -75,18 +77,48 @@ class TestProxyMiddleware(unittest.TestCase):
|
|
|
75
77
|
@patch('crawlo.utils.log.get_logger')
|
|
76
78
|
def test_middleware_initialization_with_api_url(self, mock_get_logger):
|
|
77
79
|
"""测试配置API URL时中间件初始化"""
|
|
78
|
-
|
|
80
|
+
# 不再需要 PROXY_ENABLED,只要配置了 PROXY_API_URL 就会启用
|
|
79
81
|
self.settings.set('PROXY_API_URL', 'http://proxy-api.example.com')
|
|
80
82
|
self.settings.set('LOG_LEVEL', 'INFO')
|
|
81
83
|
|
|
82
84
|
mock_get_logger.return_value = MockLogger('ProxyMiddleware')
|
|
83
85
|
|
|
84
|
-
#
|
|
86
|
+
# 应该正常创建实例并启用
|
|
85
87
|
middleware = ProxyMiddleware.create_instance(self.crawler)
|
|
86
88
|
self.assertIsInstance(middleware, ProxyMiddleware)
|
|
87
89
|
self.assertTrue(middleware.enabled)
|
|
88
90
|
self.assertEqual(middleware.api_url, 'http://proxy-api.example.com')
|
|
89
91
|
|
|
92
|
+
def test_middleware_initialization(self):
|
|
93
|
+
"""测试中间件初始化"""
|
|
94
|
+
# 配置代理API URL以启用中间件
|
|
95
|
+
self.settings.set('PROXY_API_URL', 'http://proxy-api.example.com')
|
|
96
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
97
|
+
self.assertIsInstance(middleware, ProxyMiddleware)
|
|
98
|
+
self.assertTrue(middleware.enabled)
|
|
99
|
+
self.assertEqual(middleware.api_url, 'http://proxy-api.example.com')
|
|
100
|
+
|
|
101
|
+
def test_middleware_enabled_with_api_url(self):
|
|
102
|
+
"""测试配置了代理API URL时中间件启用"""
|
|
103
|
+
self.settings.set('PROXY_API_URL', 'http://proxy-api.example.com')
|
|
104
|
+
# 不再需要显式设置 PROXY_ENABLED = True
|
|
105
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
106
|
+
self.assertTrue(middleware.enabled)
|
|
107
|
+
self.assertEqual(middleware.api_url, 'http://proxy-api.example.com')
|
|
108
|
+
|
|
109
|
+
def test_middleware_disabled_without_api_url(self):
|
|
110
|
+
"""测试未配置代理API URL时中间件禁用"""
|
|
111
|
+
# 不设置 PROXY_API_URL 或设置为空
|
|
112
|
+
self.settings.set('PROXY_API_URL', '')
|
|
113
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
114
|
+
self.assertFalse(middleware.enabled)
|
|
115
|
+
|
|
116
|
+
def test_middleware_disabled_explicitly(self):
|
|
117
|
+
"""测试显式禁用中间件(通过不配置API URL)"""
|
|
118
|
+
# 不配置 PROXY_API_URL
|
|
119
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
120
|
+
self.assertFalse(middleware.enabled)
|
|
121
|
+
|
|
90
122
|
def test_is_https_with_https_url(self):
|
|
91
123
|
"""测试HTTPS URL判断"""
|
|
92
124
|
# 创建中间件实例
|
|
@@ -117,6 +149,70 @@ class TestProxyMiddleware(unittest.TestCase):
|
|
|
117
149
|
# 应该返回False
|
|
118
150
|
self.assertFalse(middleware._is_https(request))
|
|
119
151
|
|
|
152
|
+
def test_proxy_extractor_field(self):
|
|
153
|
+
"""测试字段名提取方式"""
|
|
154
|
+
self.settings.set('PROXY_API_URL', 'http://test.api/proxy')
|
|
155
|
+
self.settings.set('PROXY_EXTRACTOR', 'data') # 从data字段提取
|
|
156
|
+
|
|
157
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
158
|
+
self.assertEqual(middleware.proxy_extractor, 'data')
|
|
159
|
+
|
|
160
|
+
# 测试提取逻辑
|
|
161
|
+
data = {'data': 'http://proxy-from-data:8080'}
|
|
162
|
+
proxy = middleware._extract_proxy_from_data(data)
|
|
163
|
+
self.assertEqual(proxy, 'http://proxy-from-data:8080')
|
|
164
|
+
|
|
165
|
+
def test_proxy_extractor_dict_field(self):
|
|
166
|
+
"""测试字典字段提取方式"""
|
|
167
|
+
self.settings.set('PROXY_API_URL', 'http://test.api/proxy')
|
|
168
|
+
self.settings.set('PROXY_EXTRACTOR', {'type': 'field', 'value': 'result'})
|
|
169
|
+
|
|
170
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
171
|
+
self.assertEqual(middleware.proxy_extractor['type'], 'field')
|
|
172
|
+
self.assertEqual(middleware.proxy_extractor['value'], 'result')
|
|
173
|
+
|
|
174
|
+
# 测试提取逻辑
|
|
175
|
+
data = {'result': 'http://proxy-from-result:8080'}
|
|
176
|
+
proxy = middleware._extract_proxy_from_data(data)
|
|
177
|
+
self.assertEqual(proxy, 'http://proxy-from-result:8080')
|
|
178
|
+
|
|
179
|
+
def test_proxy_extractor_custom_function(self):
|
|
180
|
+
"""测试自定义函数提取方式"""
|
|
181
|
+
def custom_extractor(data):
|
|
182
|
+
return data.get('custom_proxy')
|
|
183
|
+
|
|
184
|
+
self.settings.set('PROXY_API_URL', 'http://test.api/proxy')
|
|
185
|
+
self.settings.set('PROXY_EXTRACTOR', {'type': 'custom', 'function': custom_extractor})
|
|
186
|
+
|
|
187
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
188
|
+
|
|
189
|
+
# 测试提取逻辑
|
|
190
|
+
data = {'custom_proxy': 'http://proxy-from-custom:8080'}
|
|
191
|
+
proxy = middleware._extract_proxy_from_data(data)
|
|
192
|
+
self.assertEqual(proxy, 'http://proxy-from-custom:8080')
|
|
193
|
+
|
|
194
|
+
def test_proxy_extractor_callable(self):
|
|
195
|
+
"""测试直接函数提取方式"""
|
|
196
|
+
def direct_extractor(data):
|
|
197
|
+
return data.get('direct_proxy')
|
|
198
|
+
|
|
199
|
+
self.settings.set('PROXY_API_URL', 'http://test.api/proxy')
|
|
200
|
+
self.settings.set('PROXY_EXTRACTOR', direct_extractor)
|
|
201
|
+
|
|
202
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
203
|
+
|
|
204
|
+
# 测试提取逻辑
|
|
205
|
+
data = {'direct_proxy': 'http://proxy-from-direct:8080'}
|
|
206
|
+
proxy = middleware._extract_proxy_from_data(data)
|
|
207
|
+
self.assertEqual(proxy, 'http://proxy-from-direct:8080')
|
|
208
|
+
|
|
209
|
+
def test_middleware_disabled_without_list(self):
|
|
210
|
+
"""测试未配置代理列表时代理中间件禁用"""
|
|
211
|
+
# 不设置 PROXY_LIST 或设置为空列表
|
|
212
|
+
self.settings.set('PROXY_LIST', [])
|
|
213
|
+
from crawlo.middleware.proxy import ProxyMiddleware
|
|
214
|
+
middleware = ProxyMiddleware(self.settings, "DEBUG")
|
|
215
|
+
self.assertFalse(middleware.enabled)
|
|
120
216
|
|
|
121
217
|
if __name__ == '__main__':
|
|
122
218
|
unittest.main()
|
|
@@ -52,12 +52,8 @@ def test_proxy_class():
|
|
|
52
52
|
def create_mock_settings():
|
|
53
53
|
"""创建模拟设置"""
|
|
54
54
|
settings = SettingManager()
|
|
55
|
-
|
|
55
|
+
# 不再需要显式设置 PROXY_ENABLED,只要配置了 PROXY_API_URL 就会启用
|
|
56
56
|
settings.set("PROXY_API_URL", "http://test.proxy.api/get")
|
|
57
|
-
settings.set("PROXY_EXTRACTOR", "proxy")
|
|
58
|
-
settings.set("PROXY_REFRESH_INTERVAL", 10)
|
|
59
|
-
settings.set("PROXY_POOL_SIZE", 3)
|
|
60
|
-
settings.set("PROXY_HEALTH_CHECK_THRESHOLD", 0.5)
|
|
61
57
|
settings.set("LOG_LEVEL", "DEBUG")
|
|
62
58
|
return settings
|
|
63
59
|
|
|
@@ -13,7 +13,12 @@ def crawler():
|
|
|
13
13
|
class MockSettings:
|
|
14
14
|
def get(self, key, default=None):
|
|
15
15
|
defaults = {
|
|
16
|
-
|
|
16
|
+
# 配置代理中间件
|
|
17
|
+
custom_settings = {
|
|
18
|
+
# 高级代理配置(适用于ProxyMiddleware)
|
|
19
|
+
# 只要配置了代理API URL,中间件就会自动启用
|
|
20
|
+
'PROXY_API_URL': 'http://mock-proxy-service.com/api',
|
|
21
|
+
}
|
|
17
22
|
'PROXIES': ['http://p1:8080', 'http://p2:8080'],
|
|
18
23
|
'PROXY_SELECTION_STRATEGY': 'random',
|
|
19
24
|
'PROXY_REQUEST_DELAY_ENABLED': False,
|
|
@@ -134,4 +139,4 @@ async def test_request_delay(middleware, spider):
|
|
|
134
139
|
|
|
135
140
|
mock_sleep.assert_called_once()
|
|
136
141
|
delay = mock_sleep.call_args[0][0]
|
|
137
|
-
assert 0.04 <= delay <= 0.06
|
|
142
|
+
assert 0.04 <= delay <= 0.06
|