crawlo 1.4.4__py3-none-any.whl → 1.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (120) hide show
  1. crawlo/__init__.py +11 -15
  2. crawlo/__version__.py +1 -1
  3. crawlo/commands/startproject.py +24 -0
  4. crawlo/core/engine.py +2 -2
  5. crawlo/core/scheduler.py +4 -4
  6. crawlo/crawler.py +8 -7
  7. crawlo/downloader/__init__.py +5 -2
  8. crawlo/downloader/cffi_downloader.py +3 -1
  9. crawlo/extension/__init__.py +2 -2
  10. crawlo/filters/aioredis_filter.py +8 -1
  11. crawlo/filters/memory_filter.py +8 -1
  12. crawlo/initialization/built_in.py +13 -4
  13. crawlo/initialization/core.py +5 -4
  14. crawlo/interfaces.py +24 -0
  15. crawlo/middleware/__init__.py +7 -4
  16. crawlo/middleware/middleware_manager.py +15 -8
  17. crawlo/middleware/proxy.py +171 -348
  18. crawlo/mode_manager.py +45 -11
  19. crawlo/network/response.py +374 -69
  20. crawlo/pipelines/mysql_pipeline.py +340 -189
  21. crawlo/pipelines/pipeline_manager.py +2 -2
  22. crawlo/project.py +2 -4
  23. crawlo/settings/default_settings.py +42 -30
  24. crawlo/stats_collector.py +10 -1
  25. crawlo/task_manager.py +2 -2
  26. crawlo/templates/project/items.py.tmpl +2 -2
  27. crawlo/templates/project/middlewares.py.tmpl +9 -89
  28. crawlo/templates/project/pipelines.py.tmpl +8 -68
  29. crawlo/templates/project/settings.py.tmpl +10 -55
  30. crawlo/templates/project/settings_distributed.py.tmpl +20 -22
  31. crawlo/templates/project/settings_gentle.py.tmpl +5 -0
  32. crawlo/templates/project/settings_high_performance.py.tmpl +5 -0
  33. crawlo/templates/project/settings_minimal.py.tmpl +25 -1
  34. crawlo/templates/project/settings_simple.py.tmpl +5 -0
  35. crawlo/templates/run.py.tmpl +1 -8
  36. crawlo/templates/spider/spider.py.tmpl +5 -108
  37. crawlo/tools/__init__.py +0 -11
  38. crawlo/utils/__init__.py +17 -1
  39. crawlo/utils/db_helper.py +226 -319
  40. crawlo/utils/error_handler.py +313 -67
  41. crawlo/utils/fingerprint.py +3 -4
  42. crawlo/utils/misc.py +82 -0
  43. crawlo/utils/request.py +55 -66
  44. crawlo/utils/selector_helper.py +138 -0
  45. crawlo/utils/spider_loader.py +185 -45
  46. crawlo/utils/text_helper.py +95 -0
  47. crawlo-1.4.6.dist-info/METADATA +329 -0
  48. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/RECORD +110 -69
  49. tests/authenticated_proxy_example.py +10 -6
  50. tests/bug_check_test.py +251 -0
  51. tests/direct_selector_helper_test.py +97 -0
  52. tests/explain_mysql_update_behavior.py +77 -0
  53. tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -0
  54. tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -0
  55. tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -0
  56. tests/ofweek_scrapy/ofweek_scrapy/settings.py +85 -0
  57. tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -0
  58. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +162 -0
  59. tests/ofweek_scrapy/scrapy.cfg +11 -0
  60. tests/performance_comparison.py +4 -5
  61. tests/simple_crawlo_test.py +1 -2
  62. tests/simple_follow_test.py +39 -0
  63. tests/simple_response_selector_test.py +95 -0
  64. tests/simple_selector_helper_test.py +155 -0
  65. tests/simple_selector_test.py +208 -0
  66. tests/simple_url_test.py +74 -0
  67. tests/simulate_mysql_update_test.py +140 -0
  68. tests/test_asyncmy_usage.py +57 -0
  69. tests/test_crawler_process_import.py +39 -0
  70. tests/test_crawler_process_spider_modules.py +48 -0
  71. tests/test_crawlo_proxy_integration.py +8 -2
  72. tests/test_downloader_proxy_compatibility.py +24 -20
  73. tests/test_edge_cases.py +7 -5
  74. tests/test_encoding_core.py +57 -0
  75. tests/test_encoding_detection.py +127 -0
  76. tests/test_factory_compatibility.py +197 -0
  77. tests/test_mysql_pipeline_config.py +165 -0
  78. tests/test_mysql_pipeline_error.py +99 -0
  79. tests/test_mysql_pipeline_init_log.py +83 -0
  80. tests/test_mysql_pipeline_integration.py +133 -0
  81. tests/test_mysql_pipeline_refactor.py +144 -0
  82. tests/test_mysql_pipeline_refactor_simple.py +86 -0
  83. tests/test_mysql_pipeline_robustness.py +196 -0
  84. tests/test_mysql_pipeline_types.py +89 -0
  85. tests/test_mysql_update_columns.py +94 -0
  86. tests/test_optimized_selector_naming.py +101 -0
  87. tests/test_priority_behavior.py +18 -18
  88. tests/test_proxy_middleware.py +104 -8
  89. tests/test_proxy_middleware_enhanced.py +1 -5
  90. tests/test_proxy_middleware_integration.py +7 -2
  91. tests/test_proxy_middleware_refactored.py +25 -2
  92. tests/test_proxy_only.py +84 -0
  93. tests/test_proxy_with_downloader.py +153 -0
  94. tests/test_real_scenario_proxy.py +17 -17
  95. tests/test_response_follow.py +105 -0
  96. tests/test_response_selector_methods.py +93 -0
  97. tests/test_response_url_methods.py +71 -0
  98. tests/test_response_urljoin.py +87 -0
  99. tests/test_scrapy_style_encoding.py +113 -0
  100. tests/test_selector_helper.py +101 -0
  101. tests/test_selector_optimizations.py +147 -0
  102. tests/test_spider_loader.py +50 -0
  103. tests/test_spider_loader_comprehensive.py +70 -0
  104. tests/test_spiders/__init__.py +1 -0
  105. tests/test_spiders/test_spider.py +10 -0
  106. tests/verify_mysql_warnings.py +110 -0
  107. crawlo/middleware/simple_proxy.py +0 -65
  108. crawlo/tools/anti_crawler.py +0 -269
  109. crawlo/utils/class_loader.py +0 -26
  110. crawlo/utils/enhanced_error_handler.py +0 -357
  111. crawlo-1.4.4.dist-info/METADATA +0 -190
  112. tests/simple_log_test.py +0 -58
  113. tests/simple_test.py +0 -48
  114. tests/test_framework_logger.py +0 -67
  115. tests/test_framework_startup.py +0 -65
  116. tests/test_mode_change.py +0 -73
  117. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/WHEEL +0 -0
  118. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/entry_points.txt +0 -0
  119. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/top_level.txt +0 -0
  120. /tests/{final_command_test_report.md → ofweek_scrapy/ofweek_scrapy/__init__.py} +0 -0
@@ -0,0 +1,165 @@
1
+ # -*- coding: utf-8 -*-
2
+ import asyncio
3
+ import unittest
4
+ from unittest.mock import Mock, patch
5
+
6
+ from crawlo.pipelines.mysql_pipeline import BaseMySQLPipeline, AsyncmyMySQLPipeline, AiomysqlMySQLPipeline
7
+
8
+
9
+ class TestMySQLPipelineConfig(unittest.TestCase):
10
+ """测试MySQL管道配置"""
11
+
12
+ def setUp(self):
13
+ """设置测试环境"""
14
+ self.mock_crawler = Mock()
15
+ self.mock_crawler.settings = Mock()
16
+ self.mock_crawler.settings.get = Mock(return_value=None)
17
+ self.mock_crawler.settings.get_int = Mock(return_value=100)
18
+ self.mock_crawler.settings.get_bool = Mock(return_value=False)
19
+ self.mock_crawler.subscriber = Mock()
20
+ self.mock_crawler.subscriber.subscribe = Mock()
21
+
22
+ # 模拟爬虫对象
23
+ self.mock_spider = Mock()
24
+ self.mock_spider.name = "test_spider"
25
+ self.mock_spider.custom_settings = {}
26
+ self.mock_spider.mysql_table = None
27
+ self.mock_crawler.spider = self.mock_spider
28
+
29
+ def test_default_config_values(self):
30
+ """测试默认配置值"""
31
+ # 设置默认返回值(模拟默认配置文件中的值)
32
+ self.mock_crawler.settings.get_bool = Mock(side_effect=lambda key, default: {
33
+ 'MYSQL_AUTO_UPDATE': False,
34
+ 'MYSQL_INSERT_IGNORE': False,
35
+ 'MYSQL_USE_BATCH': False
36
+ }.get(key, default))
37
+
38
+ self.mock_crawler.settings.get = Mock(side_effect=lambda key, default=None: {
39
+ 'MYSQL_UPDATE_COLUMNS': ()
40
+ }.get(key, default))
41
+
42
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
43
+
44
+ # 验证默认配置值
45
+ self.assertEqual(pipeline.auto_update, False)
46
+ self.assertEqual(pipeline.insert_ignore, False)
47
+ self.assertEqual(pipeline.update_columns, ())
48
+
49
+ def test_custom_config_values(self):
50
+ """测试自定义配置值"""
51
+ # 设置自定义配置值
52
+ self.mock_crawler.settings.get_bool = Mock(side_effect=lambda key, default: {
53
+ 'MYSQL_AUTO_UPDATE': True,
54
+ 'MYSQL_INSERT_IGNORE': True
55
+ }.get(key, default))
56
+
57
+ self.mock_crawler.settings.get = Mock(side_effect=lambda key, default=None: {
58
+ 'MYSQL_UPDATE_COLUMNS': ('updated_at', 'view_count')
59
+ }.get(key, default))
60
+
61
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
62
+
63
+ # 验证自定义配置值
64
+ self.assertEqual(pipeline.auto_update, True)
65
+ self.assertEqual(pipeline.insert_ignore, True)
66
+ self.assertEqual(pipeline.update_columns, ('updated_at', 'view_count'))
67
+
68
+ def test_sql_generation_with_config(self):
69
+ """测试使用配置生成SQL"""
70
+ # 设置配置
71
+ self.mock_crawler.settings.get_bool = Mock(side_effect=lambda key, default: {
72
+ 'MYSQL_AUTO_UPDATE': True,
73
+ 'MYSQL_INSERT_IGNORE': False
74
+ }.get(key, default))
75
+
76
+ self.mock_crawler.settings.get = Mock(side_effect=lambda key, default=None: {
77
+ 'MYSQL_UPDATE_COLUMNS': ()
78
+ }.get(key, default))
79
+
80
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
81
+
82
+ # 测试数据
83
+ item_dict = {"id": 1, "name": "test"}
84
+
85
+ async def test_async():
86
+ with patch('crawlo.pipelines.mysql_pipeline.SQLBuilder.make_insert') as mock_make_insert:
87
+ mock_make_insert.return_value = "TEST SQL"
88
+
89
+ # 调用_make_insert_sql方法
90
+ result = await pipeline._make_insert_sql(item_dict)
91
+
92
+ # 验证调用了SQLBuilder.make_insert并传递了正确的参数
93
+ mock_make_insert.assert_called_once()
94
+ call_args = mock_make_insert.call_args
95
+ self.assertEqual(call_args[1]['auto_update'], True)
96
+ self.assertEqual(call_args[1]['insert_ignore'], False)
97
+ self.assertEqual(call_args[1]['update_columns'], ())
98
+
99
+ asyncio.run(test_async())
100
+
101
+ def test_sql_generation_with_kwargs_override(self):
102
+ """测试使用kwargs覆盖配置生成SQL"""
103
+ # 设置配置
104
+ self.mock_crawler.settings.get_bool = Mock(side_effect=lambda key, default: {
105
+ 'MYSQL_AUTO_UPDATE': False,
106
+ 'MYSQL_INSERT_IGNORE': False
107
+ }.get(key, default))
108
+
109
+ self.mock_crawler.settings.get = Mock(side_effect=lambda key, default=None: {
110
+ 'MYSQL_UPDATE_COLUMNS': ()
111
+ }.get(key, default))
112
+
113
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
114
+
115
+ # 测试数据
116
+ item_dict = {"id": 1, "name": "test"}
117
+
118
+ async def test_async():
119
+ with patch('crawlo.pipelines.mysql_pipeline.SQLBuilder.make_insert') as mock_make_insert:
120
+ mock_make_insert.return_value = "TEST SQL"
121
+
122
+ # 调用_make_insert_sql方法并传递kwargs
123
+ result = await pipeline._make_insert_sql(
124
+ item_dict,
125
+ auto_update=True,
126
+ insert_ignore=True
127
+ )
128
+
129
+ # 验证kwargs覆盖了配置
130
+ mock_make_insert.assert_called_once()
131
+ call_args = mock_make_insert.call_args
132
+ self.assertEqual(call_args[1]['auto_update'], True)
133
+ self.assertEqual(call_args[1]['insert_ignore'], True)
134
+ self.assertEqual(call_args[1]['update_columns'], ())
135
+
136
+ asyncio.run(test_async())
137
+
138
+ def test_batch_config_passing(self):
139
+ """测试批量处理中配置的传递"""
140
+ # 设置配置
141
+ self.mock_crawler.settings.get_bool = Mock(side_effect=lambda key, default: {
142
+ 'MYSQL_AUTO_UPDATE': True,
143
+ 'MYSQL_INSERT_IGNORE': False,
144
+ 'MYSQL_USE_BATCH': True
145
+ }.get(key, default))
146
+
147
+ self.mock_crawler.settings.get = Mock(side_effect=lambda key, default=None: {
148
+ 'MYSQL_UPDATE_COLUMNS': ('updated_at',)
149
+ }.get(key, default))
150
+
151
+ self.mock_crawler.settings.get_int = Mock(side_effect=lambda key, default=100: {
152
+ 'MYSQL_BATCH_SIZE': 2
153
+ }.get(key, default))
154
+
155
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
156
+
157
+ # 验证配置已正确设置
158
+ self.assertEqual(pipeline.auto_update, True)
159
+ self.assertEqual(pipeline.update_columns, ('updated_at',))
160
+ self.assertEqual(pipeline.use_batch, True)
161
+ self.assertEqual(pipeline.batch_size, 2)
162
+
163
+
164
+ if __name__ == "__main__":
165
+ unittest.main()
@@ -0,0 +1,99 @@
1
+ # -*- coding: utf-8 -*-
2
+ import asyncio
3
+ import unittest
4
+ from unittest.mock import Mock, patch, AsyncMock
5
+
6
+ from crawlo.pipelines.mysql_pipeline import AsyncmyMySQLPipeline, AiomysqlMySQLPipeline
7
+ from crawlo.exceptions import ItemDiscard
8
+
9
+
10
+ class TestMySQLPipelineError(unittest.TestCase):
11
+ """测试MySQL管道错误处理"""
12
+
13
+ def setUp(self):
14
+ """设置测试环境"""
15
+ self.mock_crawler = Mock()
16
+ self.mock_crawler.settings = Mock()
17
+ self.mock_crawler.settings.get = Mock(return_value=None)
18
+ self.mock_crawler.settings.get_int = Mock(return_value=100)
19
+ self.mock_crawler.settings.get_bool = Mock(return_value=False)
20
+ self.mock_crawler.subscriber = Mock()
21
+ self.mock_crawler.subscriber.subscribe = Mock()
22
+ self.mock_crawler.stats = Mock()
23
+ self.mock_crawler.stats.inc_value = Mock()
24
+
25
+ # 模拟爬虫对象
26
+ self.mock_spider = Mock()
27
+ self.mock_spider.name = "test_spider"
28
+ self.mock_spider.custom_settings = {}
29
+ self.mock_spider.mysql_table = None
30
+ self.mock_crawler.spider = self.mock_spider
31
+
32
+ def test_asyncmy_process_item_with_connection_error(self):
33
+ """测试AsyncmyMySQLPipeline处理连接错误"""
34
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
35
+
36
+ # 模拟连接池和数据库操作
37
+ mock_pool = AsyncMock()
38
+ mock_conn = AsyncMock()
39
+ mock_cursor = AsyncMock()
40
+
41
+ # 模拟acquire方法返回连接
42
+ mock_pool.acquire.return_value = mock_conn
43
+
44
+ # 模拟cursor方法返回游标
45
+ mock_conn.cursor.return_value = mock_cursor
46
+
47
+ # 模拟execute方法抛出异常
48
+ mock_cursor.execute.side_effect = Exception("测试异常")
49
+
50
+ # 设置管道的连接池
51
+ pipeline.pool = mock_pool
52
+ pipeline._pool_initialized = True
53
+
54
+ # 测试数据
55
+ test_item = {"id": 1, "name": "test"}
56
+
57
+ async def test_async():
58
+ with self.assertRaises(ItemDiscard) as context:
59
+ await pipeline.process_item(test_item, self.mock_spider)
60
+
61
+ # 验证错误信息
62
+ self.assertIn("MySQL插入失败", str(context.exception))
63
+
64
+ asyncio.run(test_async())
65
+
66
+ def test_execute_sql_with_exception(self):
67
+ """测试_execute_sql方法处理异常"""
68
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
69
+
70
+ # 模拟连接池和数据库操作
71
+ mock_pool = AsyncMock()
72
+ mock_conn = AsyncMock()
73
+ mock_cursor = AsyncMock()
74
+
75
+ # 模拟acquire方法返回连接
76
+ mock_pool.acquire.return_value = mock_conn
77
+
78
+ # 模拟cursor方法返回游标
79
+ mock_conn.cursor.return_value = mock_cursor
80
+
81
+ # 模拟execute方法抛出异常
82
+ mock_cursor.execute.side_effect = Exception("测试异常")
83
+
84
+ # 设置管道的连接池
85
+ pipeline.pool = mock_pool
86
+ pipeline._pool_initialized = True
87
+
88
+ async def test_async():
89
+ with self.assertRaises(ItemDiscard) as context:
90
+ await pipeline._execute_sql("SELECT 1")
91
+
92
+ # 验证错误信息
93
+ self.assertIn("MySQL插入失败", str(context.exception))
94
+
95
+ asyncio.run(test_async())
96
+
97
+
98
+ if __name__ == "__main__":
99
+ unittest.main()
@@ -0,0 +1,83 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ 测试 MySQL 管道初始化日志
4
+ 验证在管道初始化时是否正确打印日志
5
+ """
6
+ import sys
7
+ import os
8
+
9
+ # 添加项目根目录到 Python 路径
10
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
11
+
12
+ from crawlo.pipelines.mysql_pipeline import AsyncmyMySQLPipeline, AiomysqlMySQLPipeline
13
+
14
+
15
+ # 创建一个简单的爬虫模拟类
16
+ class MockSpider:
17
+ name = "test_spider"
18
+
19
+
20
+ # 创建一个简单的设置模拟类
21
+ class MockSettings:
22
+ def get(self, key, default=None):
23
+ # 简化的设置获取
24
+ settings_map = {
25
+ 'MYSQL_HOST': 'localhost',
26
+ 'MYSQL_PORT': 3306,
27
+ 'MYSQL_USER': 'root',
28
+ 'MYSQL_PASSWORD': '',
29
+ 'MYSQL_DB': 'test_db',
30
+ 'MYSQL_TABLE': 'test_table',
31
+ 'LOG_LEVEL': 'INFO'
32
+ }
33
+ return settings_map.get(key, default)
34
+
35
+ def get_int(self, key, default=0):
36
+ return int(self.get(key, default))
37
+
38
+ def get_bool(self, key, default=False):
39
+ return bool(self.get(key, default))
40
+
41
+
42
+ # 创建一个简单的订阅者模拟类
43
+ class MockSubscriber:
44
+ def subscribe(self, func, event):
45
+ # 简化的订阅
46
+ pass
47
+
48
+
49
+ # 创建一个简单的爬虫模拟类
50
+ class MockCrawler:
51
+ def __init__(self):
52
+ self.settings = MockSettings()
53
+ self.subscriber = MockSubscriber()
54
+ self.spider = MockSpider()
55
+
56
+
57
+ def test_pipeline_init_logs():
58
+ """测试管道初始化日志"""
59
+ print("=== Testing MySQL Pipeline Initialization Logs ===")
60
+
61
+ # 创建模拟的爬虫
62
+ crawler = MockCrawler()
63
+
64
+ print("1. Testing AsyncmyMySQLPipeline initialization...")
65
+ try:
66
+ asyncmy_pipeline = AsyncmyMySQLPipeline.from_crawler(crawler)
67
+ print(" ✓ AsyncmyMySQLPipeline initialized successfully")
68
+ except Exception as e:
69
+ print(f" ✗ AsyncmyMySQLPipeline initialization failed: {e}")
70
+
71
+ print("\n2. Testing AiomysqlMySQLPipeline initialization...")
72
+ try:
73
+ aiomysql_pipeline = AiomysqlMySQLPipeline.from_crawler(crawler)
74
+ print(" ✓ AiomysqlMySQLPipeline initialized successfully")
75
+ except Exception as e:
76
+ print(f" ✗ AiomysqlMySQLPipeline initialization failed: {e}")
77
+
78
+ print("\n=== Test completed ===")
79
+ print("Note: Actual log output can be seen when running in a full crawler environment")
80
+
81
+
82
+ if __name__ == "__main__":
83
+ test_pipeline_init_logs()
@@ -0,0 +1,133 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ MySQL 管道集成测试
4
+ 验证 MYSQL_UPDATE_COLUMNS 配置在实际使用中是否解决了 MySQL 警告问题
5
+ """
6
+ import asyncio
7
+ import sys
8
+ import os
9
+
10
+ # 添加项目根目录到 Python 路径
11
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
12
+
13
+ from crawlo.utils.db_helper import SQLBuilder
14
+
15
+
16
+ def test_complete_workflow():
17
+ """测试完整的数据插入工作流程"""
18
+ print("=== MySQL 管道集成测试 ===\n")
19
+
20
+ # 模拟实际使用的配置
21
+ table = "news_items"
22
+ item_data = {
23
+ 'title': '新一代OLED屏下光谱颜色传感技术:解锁显示新密码,重塑视觉新体验',
24
+ 'publish_time': '2025-10-09 09:57',
25
+ 'url': 'https://ee.ofweek.com/2025-10/ART-8460-2806-30671544.html',
26
+ 'source': '',
27
+ 'content': '在全球智能手机市场竞争日趋白热化的当下,消费者对手机屏幕显示效果的要求愈发严苛...'
28
+ }
29
+
30
+ print("1. 测试单条插入 SQL 生成...")
31
+ # 测试单条插入
32
+ single_sql = SQLBuilder.make_insert(
33
+ table=table,
34
+ data=item_data,
35
+ auto_update=False, # 不使用 REPLACE INTO
36
+ insert_ignore=False, # 不使用 INSERT IGNORE
37
+ update_columns=('title', 'publish_time') # 冲突时更新指定列
38
+ )
39
+
40
+ print("生成的单条插入 SQL:")
41
+ print(single_sql)
42
+ print()
43
+
44
+ # 验证语法正确性
45
+ if "AS `excluded`" in single_sql and "`title`=`excluded`.`title`" in single_sql:
46
+ print("✓ 单条插入正确使用了新的 MySQL 语法")
47
+ else:
48
+ print("✗ 单条插入语法不正确")
49
+
50
+ print("\n2. 测试批量插入 SQL 生成...")
51
+ # 测试批量插入
52
+ batch_data = [item_data, item_data] # 模拟重复数据
53
+ batch_result = SQLBuilder.make_batch(
54
+ table=table,
55
+ datas=batch_data,
56
+ auto_update=False,
57
+ update_columns=('title', 'publish_time')
58
+ )
59
+
60
+ if batch_result:
61
+ batch_sql, values_list = batch_result
62
+ print("生成的批量插入 SQL:")
63
+ print(batch_sql)
64
+ print(f"参数值列表数量: {len(values_list)}")
65
+ print()
66
+
67
+ # 验证语法正确性
68
+ if "AS `excluded`" in batch_sql and "`title`=`excluded`.`title`" in batch_sql:
69
+ print("✓ 批量插入正确使用了新的 MySQL 语法")
70
+ else:
71
+ print("✗ 批量插入语法不正确")
72
+
73
+ print("\n3. 测试不同配置组合...")
74
+
75
+ # 测试仅使用 INSERT IGNORE
76
+ ignore_sql = SQLBuilder.make_insert(
77
+ table=table,
78
+ data=item_data,
79
+ auto_update=False,
80
+ insert_ignore=True,
81
+ update_columns=() # 不指定更新列
82
+ )
83
+
84
+ print("INSERT IGNORE SQL:")
85
+ print(ignore_sql)
86
+ print()
87
+
88
+ if "INSERT IGNORE" in ignore_sql and "AS `excluded`" not in ignore_sql:
89
+ print("✓ INSERT IGNORE 模式正确")
90
+ else:
91
+ print("✗ INSERT IGNORE 模式不正确")
92
+
93
+ # 测试使用 REPLACE INTO
94
+ replace_sql = SQLBuilder.make_insert(
95
+ table=table,
96
+ data=item_data,
97
+ auto_update=True, # 使用 REPLACE INTO
98
+ insert_ignore=False,
99
+ update_columns=() # 不指定更新列
100
+ )
101
+
102
+ print("REPLACE INTO SQL:")
103
+ print(replace_sql)
104
+ print()
105
+
106
+ if "REPLACE INTO" in replace_sql and "AS `excluded`" not in replace_sql:
107
+ print("✓ REPLACE INTO 模式正确")
108
+ else:
109
+ print("✗ REPLACE INTO 模式不正确")
110
+
111
+ # 测试普通 INSERT
112
+ normal_sql = SQLBuilder.make_insert(
113
+ table=table,
114
+ data=item_data,
115
+ auto_update=False,
116
+ insert_ignore=False,
117
+ update_columns=() # 不指定更新列
118
+ )
119
+
120
+ print("普通 INSERT SQL:")
121
+ print(normal_sql)
122
+ print()
123
+
124
+ if "INSERT INTO" in normal_sql and "AS `excluded`" not in normal_sql:
125
+ print("✓ 普通 INSERT 模式正确")
126
+ else:
127
+ print("✗ 普通 INSERT 模式不正确")
128
+
129
+ print("\n=== 测试完成 ===")
130
+
131
+
132
+ if __name__ == "__main__":
133
+ test_complete_workflow()
@@ -0,0 +1,144 @@
1
+ # -*- coding: utf-8 -*-
2
+ import asyncio
3
+ import unittest
4
+ from unittest.mock import Mock, patch, AsyncMock
5
+
6
+ from crawlo.pipelines.mysql_pipeline import BaseMySQLPipeline, AsyncmyMySQLPipeline, AiomysqlMySQLPipeline
7
+
8
+
9
+ class TestBaseMySQLPipeline(unittest.TestCase):
10
+ """测试MySQL管道基类"""
11
+
12
+ def setUp(self):
13
+ """设置测试环境"""
14
+ self.mock_crawler = Mock()
15
+ self.mock_crawler.settings = Mock()
16
+ self.mock_crawler.settings.get = Mock(return_value=None)
17
+ self.mock_crawler.settings.get_int = Mock(return_value=100)
18
+ self.mock_crawler.settings.get_bool = Mock(return_value=False)
19
+ self.mock_crawler.subscriber = Mock()
20
+ self.mock_crawler.subscriber.subscribe = Mock()
21
+
22
+ # 模拟爬虫对象
23
+ self.mock_spider = Mock()
24
+ self.mock_spider.name = "test_spider"
25
+ self.mock_spider.custom_settings = {}
26
+ self.mock_spider.mysql_table = None
27
+ self.mock_crawler.spider = self.mock_spider
28
+
29
+ def test_base_init(self):
30
+ """测试基类初始化"""
31
+ # 创建一个实现基类的测试类
32
+ class TestMySQLPipeline(BaseMySQLPipeline):
33
+ async def _ensure_pool(self):
34
+ pass
35
+
36
+ pipeline = TestMySQLPipeline(self.mock_crawler)
37
+
38
+ # 验证属性初始化
39
+ self.assertEqual(pipeline.crawler, self.mock_crawler)
40
+ self.assertEqual(pipeline.settings, self.mock_crawler.settings)
41
+ self.assertEqual(pipeline.table_name, "test_spider_items")
42
+ self.assertEqual(pipeline.batch_size, 100)
43
+ self.assertEqual(pipeline.use_batch, False)
44
+ self.assertEqual(pipeline.batch_buffer, [])
45
+
46
+ # 验证订阅了关闭事件
47
+ self.mock_crawler.subscriber.subscribe.assert_called_once_with(
48
+ pipeline.spider_closed, event='spider_closed'
49
+ )
50
+
51
+ def test_make_insert_sql_default(self):
52
+ """测试默认的SQL生成方法"""
53
+ class TestMySQLPipeline(BaseMySQLPipeline):
54
+ async def _ensure_pool(self):
55
+ pass
56
+
57
+ pipeline = TestMySQLPipeline(self.mock_crawler)
58
+ item_dict = {"name": "test", "value": 123}
59
+
60
+ # 由于_make_insert_sql是异步方法,我们需要运行事件循环
61
+ async def test_async():
62
+ with patch('crawlo.pipelines.mysql_pipeline.SQLBuilder.make_insert') as mock_make_insert:
63
+ mock_make_insert.return_value = "TEST SQL"
64
+ result = await pipeline._make_insert_sql(item_dict)
65
+ mock_make_insert.assert_called_once_with(table=pipeline.table_name, data=item_dict)
66
+ self.assertEqual(result, "TEST SQL")
67
+
68
+ asyncio.run(test_async())
69
+
70
+
71
+ class TestAsyncmyMySQLPipeline(unittest.TestCase):
72
+ """测试AsyncmyMySQLPipeline实现"""
73
+
74
+ def setUp(self):
75
+ """设置测试环境"""
76
+ self.mock_crawler = Mock()
77
+ self.mock_crawler.settings = Mock()
78
+ self.mock_crawler.settings.get = Mock(return_value=None)
79
+ self.mock_crawler.settings.get_int = Mock(return_value=100)
80
+ self.mock_crawler.settings.get_bool = Mock(return_value=False)
81
+ self.mock_crawler.subscriber = Mock()
82
+ self.mock_crawler.subscriber.subscribe = Mock()
83
+
84
+ # 模拟爬虫对象
85
+ self.mock_spider = Mock()
86
+ self.mock_spider.name = "test_spider"
87
+ self.mock_spider.custom_settings = {}
88
+ self.mock_spider.mysql_table = None
89
+ self.mock_crawler.spider = self.mock_spider
90
+
91
+ def test_init(self):
92
+ """测试初始化"""
93
+ pipeline = AsyncmyMySQLPipeline(self.mock_crawler)
94
+
95
+ # 验证属性初始化
96
+ self.assertEqual(pipeline.crawler, self.mock_crawler)
97
+ self.assertEqual(pipeline.settings, self.mock_crawler.settings)
98
+ self.assertEqual(pipeline.table_name, "test_spider_items")
99
+
100
+ def test_from_crawler(self):
101
+ """测试from_crawler类方法"""
102
+ pipeline = AsyncmyMySQLPipeline.from_crawler(self.mock_crawler)
103
+ self.assertIsInstance(pipeline, AsyncmyMySQLPipeline)
104
+ self.assertEqual(pipeline.crawler, self.mock_crawler)
105
+
106
+
107
+ class TestAiomysqlMySQLPipeline(unittest.TestCase):
108
+ """测试AiomysqlMySQLPipeline实现"""
109
+
110
+ def setUp(self):
111
+ """设置测试环境"""
112
+ self.mock_crawler = Mock()
113
+ self.mock_crawler.settings = Mock()
114
+ self.mock_crawler.settings.get = Mock(return_value=None)
115
+ self.mock_crawler.settings.get_int = Mock(return_value=100)
116
+ self.mock_crawler.settings.get_bool = Mock(return_value=False)
117
+ self.mock_crawler.subscriber = Mock()
118
+ self.mock_crawler.subscriber.subscribe = Mock()
119
+
120
+ # 模拟爬虫对象
121
+ self.mock_spider = Mock()
122
+ self.mock_spider.name = "test_spider"
123
+ self.mock_spider.custom_settings = {}
124
+ self.mock_spider.mysql_table = None
125
+ self.mock_crawler.spider = self.mock_spider
126
+
127
+ def test_init(self):
128
+ """测试初始化"""
129
+ pipeline = AiomysqlMySQLPipeline(self.mock_crawler)
130
+
131
+ # 验证属性初始化
132
+ self.assertEqual(pipeline.crawler, self.mock_crawler)
133
+ self.assertEqual(pipeline.settings, self.mock_crawler.settings)
134
+ self.assertEqual(pipeline.table_name, "test_spider_items")
135
+
136
+ def test_from_crawler(self):
137
+ """测试from_crawler类方法"""
138
+ pipeline = AiomysqlMySQLPipeline.from_crawler(self.mock_crawler)
139
+ self.assertIsInstance(pipeline, AiomysqlMySQLPipeline)
140
+ self.assertEqual(pipeline.crawler, self.mock_crawler)
141
+
142
+
143
+ if __name__ == "__main__":
144
+ unittest.main()