crawlo 1.2.2__py3-none-any.whl → 1.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (222) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +81 -81
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +144 -142
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -292
  14. crawlo/commands/startproject.py +420 -418
  15. crawlo/commands/stats.py +188 -188
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -252
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -354
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -143
  23. crawlo/crawler.py +1110 -1027
  24. crawlo/data/__init__.py +6 -0
  25. crawlo/data/user_agents.py +108 -0
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +220 -220
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -213
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +37 -37
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +280 -280
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -53
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +135 -135
  54. crawlo/middleware/offsite.py +114 -115
  55. crawlo/middleware/proxy.py +367 -366
  56. crawlo/middleware/request_ignore.py +86 -87
  57. crawlo/middleware/response_code.py +163 -164
  58. crawlo/middleware/response_filter.py +136 -137
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -224
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -316
  73. crawlo/pipelines/pipeline_manager.py +61 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +165 -167
  75. crawlo/project.py +279 -187
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +337 -337
  78. crawlo/queue/redis_priority_queue.py +298 -298
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +217 -226
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -130
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/run.py.tmpl +47 -45
  92. crawlo/templates/project/settings.py.tmpl +350 -327
  93. crawlo/templates/project/settings_distributed.py.tmpl +160 -119
  94. crawlo/templates/project/settings_gentle.py.tmpl +133 -94
  95. crawlo/templates/project/settings_high_performance.py.tmpl +155 -151
  96. crawlo/templates/project/settings_simple.py.tmpl +108 -68
  97. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -260
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -359
  113. crawlo/utils/env_config.py +105 -105
  114. crawlo/utils/error_handler.py +123 -125
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -343
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -284
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +334 -334
  122. crawlo/utils/redis_key_validator.py +198 -199
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -219
  125. crawlo/utils/spider_loader.py +61 -62
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/METADATA +764 -692
  130. crawlo-1.2.4.dist-info/RECORD +206 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_validator.py +193 -193
  152. tests/test_crawlo_proxy_integration.py +172 -172
  153. tests/test_date_tools.py +123 -123
  154. tests/test_default_header_middleware.py +158 -158
  155. tests/test_double_crawlo_fix.py +207 -207
  156. tests/test_double_crawlo_fix_simple.py +124 -124
  157. tests/test_download_delay_middleware.py +221 -221
  158. tests/test_downloader_proxy_compatibility.py +268 -268
  159. tests/test_dynamic_downloaders_proxy.py +124 -124
  160. tests/test_dynamic_proxy.py +92 -92
  161. tests/test_dynamic_proxy_config.py +146 -146
  162. tests/test_dynamic_proxy_real.py +109 -109
  163. tests/test_edge_cases.py +303 -303
  164. tests/test_enhanced_error_handler.py +270 -270
  165. tests/test_env_config.py +121 -121
  166. tests/test_error_handler_compatibility.py +112 -112
  167. tests/test_final_validation.py +153 -153
  168. tests/test_framework_env_usage.py +103 -103
  169. tests/test_integration.py +356 -356
  170. tests/test_item_dedup_redis_key.py +122 -122
  171. tests/test_offsite_middleware.py +221 -221
  172. tests/test_parsel.py +29 -29
  173. tests/test_performance.py +327 -327
  174. tests/test_proxy_api.py +264 -264
  175. tests/test_proxy_health_check.py +32 -32
  176. tests/test_proxy_middleware.py +121 -121
  177. tests/test_proxy_middleware_enhanced.py +216 -216
  178. tests/test_proxy_middleware_integration.py +136 -136
  179. tests/test_proxy_providers.py +56 -56
  180. tests/test_proxy_stats.py +19 -19
  181. tests/test_proxy_strategies.py +59 -59
  182. tests/test_queue_manager_double_crawlo.py +173 -173
  183. tests/test_queue_manager_redis_key.py +176 -176
  184. tests/test_real_scenario_proxy.py +195 -195
  185. tests/test_redis_config.py +28 -28
  186. tests/test_redis_connection_pool.py +294 -294
  187. tests/test_redis_key_naming.py +181 -181
  188. tests/test_redis_key_validator.py +123 -123
  189. tests/test_redis_queue.py +224 -224
  190. tests/test_request_ignore_middleware.py +182 -182
  191. tests/test_request_serialization.py +70 -70
  192. tests/test_response_code_middleware.py +349 -349
  193. tests/test_response_filter_middleware.py +427 -427
  194. tests/test_response_improvements.py +152 -152
  195. tests/test_retry_middleware.py +241 -241
  196. tests/test_scheduler.py +241 -241
  197. tests/test_simple_response.py +61 -61
  198. tests/test_telecom_spider_redis_key.py +205 -205
  199. tests/test_template_content.py +87 -87
  200. tests/test_template_redis_key.py +134 -134
  201. tests/test_tools.py +153 -153
  202. tests/tools_example.py +257 -257
  203. crawlo-1.2.2.dist-info/RECORD +0 -220
  204. examples/aiohttp_settings.py +0 -42
  205. examples/curl_cffi_settings.py +0 -41
  206. examples/default_header_middleware_example.py +0 -107
  207. examples/default_header_spider_example.py +0 -129
  208. examples/download_delay_middleware_example.py +0 -160
  209. examples/httpx_settings.py +0 -42
  210. examples/multi_downloader_proxy_example.py +0 -81
  211. examples/offsite_middleware_example.py +0 -55
  212. examples/offsite_spider_example.py +0 -107
  213. examples/proxy_spider_example.py +0 -166
  214. examples/request_ignore_middleware_example.py +0 -51
  215. examples/request_ignore_spider_example.py +0 -99
  216. examples/response_code_middleware_example.py +0 -52
  217. examples/response_filter_middleware_example.py +0 -67
  218. examples/tong_hua_shun_settings.py +0 -62
  219. examples/tong_hua_shun_spider.py +0 -170
  220. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/WHEEL +0 -0
  221. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/entry_points.txt +0 -0
  222. {crawlo-1.2.2.dist-info → crawlo-1.2.4.dist-info}/top_level.txt +0 -0
@@ -1,154 +1,154 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 最终验证测试:确认分布式队列的 logger 序列化问题已完全解决
5
- """
6
- import asyncio
7
- import pickle
8
- import sys
9
- sys.path.insert(0, "..")
10
-
11
- from crawlo.network.request import Request
12
- from crawlo.spider import Spider
13
- from crawlo.core.scheduler import Scheduler
14
- from crawlo.queue.redis_priority_queue import RedisPriorityQueue
15
- from crawlo.utils.log import get_logger
16
- from unittest.mock import Mock
17
-
18
-
19
- class TestSpider(Spider):
20
- """测试爬虫"""
21
- name = "validation_spider"
22
-
23
- def __init__(self):
24
- super().__init__()
25
- # 故意添加多个 logger 来测试清理
26
- self.custom_logger = get_logger("custom")
27
- self.debug_logger = get_logger("debug")
28
- self.nested_data = {
29
- 'logger': get_logger("nested"),
30
- 'sub': {
31
- 'logger_ref': get_logger("sub_logger")
32
- }
33
- }
34
-
35
- def parse(self, response):
36
- # 验证主 logger 还在
37
- self.logger.info(f"✅ 主 logger 工作正常: {response.url}")
38
- return {"url": response.url, "status": "success"}
39
-
40
-
41
- def test_scheduler_cleaning():
42
- """测试调度器的 logger 清理"""
43
- print("🔍 测试调度器 logger 清理...")
44
-
45
- spider = TestSpider()
46
- request = Request(
47
- url="https://scheduler-test.com",
48
- callback=spider.parse,
49
- meta={"logger": get_logger("meta_logger")}
50
- )
51
-
52
- # Mock crawler 和 scheduler
53
- class MockCrawler:
54
- def __init__(self):
55
- self.spider = spider
56
-
57
- class MockScheduler(Scheduler):
58
- def __init__(self):
59
- self.crawler = MockCrawler()
60
- self.logger = get_logger("MockScheduler")
61
-
62
- scheduler = MockScheduler()
63
-
64
- # 清理前检查
65
- print(f" 🔧 清理前 - spider.logger: {spider.logger is not None}")
66
- print(f" 🔧 清理前 - spider.custom_logger: {spider.custom_logger is not None}")
67
- print(f" 🔧 清理前 - request.callback: {request.callback is not None}")
68
-
69
- # 执行清理
70
- cleaned_request = scheduler._deep_clean_loggers(request)
71
-
72
- # 清理后检查
73
- print(f" ✅ 清理后 - spider.logger: {spider.logger is not None}")
74
- print(f" ✅ 清理后 - spider.custom_logger: {spider.custom_logger is None}")
75
- print(f" ✅ 清理后 - request.callback: {cleaned_request.callback is None}")
76
-
77
- # 序列化测试
78
- try:
79
- serialized = pickle.dumps(cleaned_request)
80
- print(f" ✅ 调度器清理后序列化成功,大小: {len(serialized)} bytes")
81
- return True
82
- except Exception as e:
83
- print(f" ❌ 调度器清理后序列化失败: {e}")
84
- return False
85
-
86
-
87
- async def test_redis_queue_cleaning():
88
- """测试 Redis 队列的 logger 清理"""
89
- print("\\n🔍 测试 Redis 队列 logger 清理...")
90
-
91
- spider = TestSpider()
92
- request = Request(
93
- url="https://redis-test.com",
94
- callback=spider.parse,
95
- meta={"logger": get_logger("meta_logger")}
96
- )
97
-
98
- try:
99
- queue = RedisPriorityQueue(redis_url="redis://127.0.0.1:6379/0")
100
- await queue.connect()
101
-
102
- # 入队测试
103
- success = await queue.put(request, priority=0)
104
- print(f" ✅ Redis 队列入队成功: {success}")
105
-
106
- if success:
107
- # 出队测试
108
- retrieved = await queue.get(timeout=2.0)
109
- if retrieved:
110
- print(f" ✅ Redis 队列出队成功: {retrieved.url}")
111
- print(f" ✅ callback 信息保存: {'_callback_info' in retrieved.meta}")
112
- await queue.close()
113
- return True
114
- else:
115
- print(" ❌ 出队失败")
116
- await queue.close()
117
- return False
118
- else:
119
- await queue.close()
120
- return False
121
-
122
- except Exception as e:
123
- print(f" ❌ Redis 队列测试失败: {e}")
124
- return False
125
-
126
-
127
- async def main():
128
- """主测试函数"""
129
- print("🚀 开始最终验证测试...")
130
- print("=" * 60)
131
-
132
- # 测试 1: 调度器清理
133
- scheduler_ok = test_scheduler_cleaning()
134
-
135
- # 测试 2: Redis 队列清理
136
- redis_ok = await test_redis_queue_cleaning()
137
-
138
- print("\\n" + "=" * 60)
139
- print("📊 测试结果汇总:")
140
- print(f" 调度器 logger 清理: {'✅ 通过' if scheduler_ok else '❌ 失败'}")
141
- print(f" Redis 队列清理: {'✅ 通过' if redis_ok else '❌ 失败'}")
142
-
143
- if scheduler_ok and redis_ok:
144
- print("\\n🎉 所有测试通过!")
145
- print("✅ 分布式队列的 logger 序列化问题已完全修复!")
146
- print("✅ Crawlo 现在可以正常使用 Redis 分布式队列了!")
147
- return True
148
- else:
149
- print("\\n❌ 部分测试失败,需要进一步修复")
150
- return False
151
-
152
-
153
- if __name__ == "__main__":
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 最终验证测试:确认分布式队列的 logger 序列化问题已完全解决
5
+ """
6
+ import asyncio
7
+ import pickle
8
+ import sys
9
+ sys.path.insert(0, "..")
10
+
11
+ from crawlo.network.request import Request
12
+ from crawlo.spider import Spider
13
+ from crawlo.core.scheduler import Scheduler
14
+ from crawlo.queue.redis_priority_queue import RedisPriorityQueue
15
+ from crawlo.utils.log import get_logger
16
+ from unittest.mock import Mock
17
+
18
+
19
+ class TestSpider(Spider):
20
+ """测试爬虫"""
21
+ name = "validation_spider"
22
+
23
+ def __init__(self):
24
+ super().__init__()
25
+ # 故意添加多个 logger 来测试清理
26
+ self.custom_logger = get_logger("custom")
27
+ self.debug_logger = get_logger("debug")
28
+ self.nested_data = {
29
+ 'logger': get_logger("nested"),
30
+ 'sub': {
31
+ 'logger_ref': get_logger("sub_logger")
32
+ }
33
+ }
34
+
35
+ def parse(self, response):
36
+ # 验证主 logger 还在
37
+ self.logger.info(f"✅ 主 logger 工作正常: {response.url}")
38
+ return {"url": response.url, "status": "success"}
39
+
40
+
41
+ def test_scheduler_cleaning():
42
+ """测试调度器的 logger 清理"""
43
+ print("🔍 测试调度器 logger 清理...")
44
+
45
+ spider = TestSpider()
46
+ request = Request(
47
+ url="https://scheduler-test.com",
48
+ callback=spider.parse,
49
+ meta={"logger": get_logger("meta_logger")}
50
+ )
51
+
52
+ # Mock crawler 和 scheduler
53
+ class MockCrawler:
54
+ def __init__(self):
55
+ self.spider = spider
56
+
57
+ class MockScheduler(Scheduler):
58
+ def __init__(self):
59
+ self.crawler = MockCrawler()
60
+ self.logger = get_logger("MockScheduler")
61
+
62
+ scheduler = MockScheduler()
63
+
64
+ # 清理前检查
65
+ print(f" 🔧 清理前 - spider.logger: {spider.logger is not None}")
66
+ print(f" 🔧 清理前 - spider.custom_logger: {spider.custom_logger is not None}")
67
+ print(f" 🔧 清理前 - request.callback: {request.callback is not None}")
68
+
69
+ # 执行清理
70
+ cleaned_request = scheduler._deep_clean_loggers(request)
71
+
72
+ # 清理后检查
73
+ print(f" ✅ 清理后 - spider.logger: {spider.logger is not None}")
74
+ print(f" ✅ 清理后 - spider.custom_logger: {spider.custom_logger is None}")
75
+ print(f" ✅ 清理后 - request.callback: {cleaned_request.callback is None}")
76
+
77
+ # 序列化测试
78
+ try:
79
+ serialized = pickle.dumps(cleaned_request)
80
+ print(f" ✅ 调度器清理后序列化成功,大小: {len(serialized)} bytes")
81
+ return True
82
+ except Exception as e:
83
+ print(f" ❌ 调度器清理后序列化失败: {e}")
84
+ return False
85
+
86
+
87
+ async def test_redis_queue_cleaning():
88
+ """测试 Redis 队列的 logger 清理"""
89
+ print("\\n🔍 测试 Redis 队列 logger 清理...")
90
+
91
+ spider = TestSpider()
92
+ request = Request(
93
+ url="https://redis-test.com",
94
+ callback=spider.parse,
95
+ meta={"logger": get_logger("meta_logger")}
96
+ )
97
+
98
+ try:
99
+ queue = RedisPriorityQueue(redis_url="redis://127.0.0.1:6379/0")
100
+ await queue.connect()
101
+
102
+ # 入队测试
103
+ success = await queue.put(request, priority=0)
104
+ print(f" ✅ Redis 队列入队成功: {success}")
105
+
106
+ if success:
107
+ # 出队测试
108
+ retrieved = await queue.get(timeout=2.0)
109
+ if retrieved:
110
+ print(f" ✅ Redis 队列出队成功: {retrieved.url}")
111
+ print(f" ✅ callback 信息保存: {'_callback_info' in retrieved.meta}")
112
+ await queue.close()
113
+ return True
114
+ else:
115
+ print(" ❌ 出队失败")
116
+ await queue.close()
117
+ return False
118
+ else:
119
+ await queue.close()
120
+ return False
121
+
122
+ except Exception as e:
123
+ print(f" ❌ Redis 队列测试失败: {e}")
124
+ return False
125
+
126
+
127
+ async def main():
128
+ """主测试函数"""
129
+ print("🚀 开始最终验证测试...")
130
+ print("=" * 60)
131
+
132
+ # 测试 1: 调度器清理
133
+ scheduler_ok = test_scheduler_cleaning()
134
+
135
+ # 测试 2: Redis 队列清理
136
+ redis_ok = await test_redis_queue_cleaning()
137
+
138
+ print("\\n" + "=" * 60)
139
+ print("📊 测试结果汇总:")
140
+ print(f" 调度器 logger 清理: {'✅ 通过' if scheduler_ok else '❌ 失败'}")
141
+ print(f" Redis 队列清理: {'✅ 通过' if redis_ok else '❌ 失败'}")
142
+
143
+ if scheduler_ok and redis_ok:
144
+ print("\\n🎉 所有测试通过!")
145
+ print("✅ 分布式队列的 logger 序列化问题已完全修复!")
146
+ print("✅ Crawlo 现在可以正常使用 Redis 分布式队列了!")
147
+ return True
148
+ else:
149
+ print("\\n❌ 部分测试失败,需要进一步修复")
150
+ return False
151
+
152
+
153
+ if __name__ == "__main__":
154
154
  asyncio.run(main())
@@ -1,104 +1,104 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 框架环境变量使用测试
5
- 验证整个框架中环境变量的正确使用
6
- """
7
- import sys
8
- import os
9
- import unittest
10
- from unittest.mock import patch, MagicMock
11
-
12
- # 添加项目根目录到Python路径
13
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
14
-
15
- from crawlo.utils.env_config import get_env_var, get_redis_config, get_runtime_config
16
- from crawlo.settings.setting_manager import SettingManager
17
- from crawlo.settings import default_settings
18
- from crawlo.mode_manager import from_env
19
-
20
-
21
- class TestFrameworkEnvUsage(unittest.TestCase):
22
- """框架环境变量使用测试"""
23
-
24
- def test_default_settings_env_usage(self):
25
- """测试 default_settings.py 中的环境变量使用"""
26
- # 验证 default_settings.py 不直接使用 os.getenv
27
- import inspect
28
- import crawlo.settings.default_settings as default_settings_module
29
-
30
- source_code = inspect.getsource(default_settings_module)
31
- # 检查是否还有直接使用 os.getenv 的地方
32
- self.assertNotIn('os.getenv', source_code,
33
- "default_settings.py 不应该直接使用 os.getenv")
34
-
35
- # 但应该使用 env_config 工具
36
- self.assertIn('get_redis_config', source_code,
37
- "default_settings.py 应该使用 get_redis_config")
38
- self.assertIn('get_runtime_config', source_code,
39
- "default_settings.py 应该使用 get_runtime_config")
40
-
41
- def test_env_config_tool(self):
42
- """测试环境变量配置工具"""
43
- # 测试获取Redis配置
44
- with patch.dict(os.environ, {
45
- 'REDIS_HOST': 'test.redis.com',
46
- 'REDIS_PORT': '6380',
47
- 'REDIS_PASSWORD': 'test_pass',
48
- 'REDIS_DB': '2'
49
- }):
50
- redis_config = get_redis_config()
51
- self.assertEqual(redis_config['REDIS_HOST'], 'test.redis.com')
52
- self.assertEqual(redis_config['REDIS_PORT'], 6380)
53
- self.assertEqual(redis_config['REDIS_PASSWORD'], 'test_pass')
54
- self.assertEqual(redis_config['REDIS_DB'], 2)
55
-
56
- # 测试获取运行时配置
57
- with patch.dict(os.environ, {
58
- 'PROJECT_NAME': 'test_project',
59
- 'CRAWLO_MODE': 'distributed',
60
- 'CONCURRENCY': '16'
61
- }):
62
- runtime_config = get_runtime_config()
63
- self.assertEqual(runtime_config['PROJECT_NAME'], 'test_project')
64
- self.assertEqual(runtime_config['CRAWLO_MODE'], 'distributed')
65
- self.assertEqual(runtime_config['CONCURRENCY'], 16)
66
-
67
- def test_settings_manager_with_env(self):
68
- """测试设置管理器与环境变量的集成"""
69
- # 设置环境变量
70
- env_vars = {
71
- 'PROJECT_NAME': 'env_test_project',
72
- 'CONCURRENCY': '12',
73
- 'REDIS_HOST': 'env.redis.test',
74
- 'REDIS_PORT': '6381'
75
- }
76
-
77
- with patch.dict(os.environ, env_vars):
78
- # 重新导入 default_settings 模块以获取最新的环境变量
79
- import importlib
80
- import crawlo.settings.default_settings
81
- importlib.reload(crawlo.settings.default_settings)
82
-
83
- # 创建设置管理器
84
- settings = SettingManager()
85
- settings.set_settings(crawlo.settings.default_settings)
86
-
87
- # 验证环境变量被正确使用
88
- redis_config = get_redis_config()
89
- self.assertEqual(settings.get('REDIS_HOST'), redis_config['REDIS_HOST'])
90
-
91
- runtime_config = get_runtime_config()
92
- self.assertEqual(settings.get('PROJECT_NAME'), runtime_config['PROJECT_NAME'])
93
-
94
- def test_mode_manager_env_usage(self):
95
- """测试 mode_manager.py 中的环境变量使用"""
96
- # 验证 from_env 函数现在会抛出异常
97
- with self.assertRaises(RuntimeError) as context:
98
- from_env()
99
-
100
- self.assertIn("环境变量配置已移除", str(context.exception))
101
-
102
-
103
- if __name__ == '__main__':
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 框架环境变量使用测试
5
+ 验证整个框架中环境变量的正确使用
6
+ """
7
+ import sys
8
+ import os
9
+ import unittest
10
+ from unittest.mock import patch, MagicMock
11
+
12
+ # 添加项目根目录到Python路径
13
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
14
+
15
+ from crawlo.utils.env_config import get_env_var, get_redis_config, get_runtime_config
16
+ from crawlo.settings.setting_manager import SettingManager
17
+ from crawlo.settings import default_settings
18
+ from crawlo.mode_manager import from_env
19
+
20
+
21
+ class TestFrameworkEnvUsage(unittest.TestCase):
22
+ """框架环境变量使用测试"""
23
+
24
+ def test_default_settings_env_usage(self):
25
+ """测试 default_settings.py 中的环境变量使用"""
26
+ # 验证 default_settings.py 不直接使用 os.getenv
27
+ import inspect
28
+ import crawlo.settings.default_settings as default_settings_module
29
+
30
+ source_code = inspect.getsource(default_settings_module)
31
+ # 检查是否还有直接使用 os.getenv 的地方
32
+ self.assertNotIn('os.getenv', source_code,
33
+ "default_settings.py 不应该直接使用 os.getenv")
34
+
35
+ # 但应该使用 env_config 工具
36
+ self.assertIn('get_redis_config', source_code,
37
+ "default_settings.py 应该使用 get_redis_config")
38
+ self.assertIn('get_runtime_config', source_code,
39
+ "default_settings.py 应该使用 get_runtime_config")
40
+
41
+ def test_env_config_tool(self):
42
+ """测试环境变量配置工具"""
43
+ # 测试获取Redis配置
44
+ with patch.dict(os.environ, {
45
+ 'REDIS_HOST': 'test.redis.com',
46
+ 'REDIS_PORT': '6380',
47
+ 'REDIS_PASSWORD': 'test_pass',
48
+ 'REDIS_DB': '2'
49
+ }):
50
+ redis_config = get_redis_config()
51
+ self.assertEqual(redis_config['REDIS_HOST'], 'test.redis.com')
52
+ self.assertEqual(redis_config['REDIS_PORT'], 6380)
53
+ self.assertEqual(redis_config['REDIS_PASSWORD'], 'test_pass')
54
+ self.assertEqual(redis_config['REDIS_DB'], 2)
55
+
56
+ # 测试获取运行时配置
57
+ with patch.dict(os.environ, {
58
+ 'PROJECT_NAME': 'test_project',
59
+ 'CRAWLO_MODE': 'distributed',
60
+ 'CONCURRENCY': '16'
61
+ }):
62
+ runtime_config = get_runtime_config()
63
+ self.assertEqual(runtime_config['PROJECT_NAME'], 'test_project')
64
+ self.assertEqual(runtime_config['CRAWLO_MODE'], 'distributed')
65
+ self.assertEqual(runtime_config['CONCURRENCY'], 16)
66
+
67
+ def test_settings_manager_with_env(self):
68
+ """测试设置管理器与环境变量的集成"""
69
+ # 设置环境变量
70
+ env_vars = {
71
+ 'PROJECT_NAME': 'env_test_project',
72
+ 'CONCURRENCY': '12',
73
+ 'REDIS_HOST': 'env.redis.test',
74
+ 'REDIS_PORT': '6381'
75
+ }
76
+
77
+ with patch.dict(os.environ, env_vars):
78
+ # 重新导入 default_settings 模块以获取最新的环境变量
79
+ import importlib
80
+ import crawlo.settings.default_settings
81
+ importlib.reload(crawlo.settings.default_settings)
82
+
83
+ # 创建设置管理器
84
+ settings = SettingManager()
85
+ settings.set_settings(crawlo.settings.default_settings)
86
+
87
+ # 验证环境变量被正确使用
88
+ redis_config = get_redis_config()
89
+ self.assertEqual(settings.get('REDIS_HOST'), redis_config['REDIS_HOST'])
90
+
91
+ runtime_config = get_runtime_config()
92
+ self.assertEqual(settings.get('PROJECT_NAME'), runtime_config['PROJECT_NAME'])
93
+
94
+ def test_mode_manager_env_usage(self):
95
+ """测试 mode_manager.py 中的环境变量使用"""
96
+ # 验证 from_env 函数现在会抛出异常
97
+ with self.assertRaises(RuntimeError) as context:
98
+ from_env()
99
+
100
+ self.assertIn("环境变量配置已移除", str(context.exception))
101
+
102
+
103
+ if __name__ == '__main__':
104
104
  unittest.main()