crawlo 1.4.6__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +2 -1
- crawlo/__version__.py +1 -1
- crawlo/cli.py +2 -2
- crawlo/commands/check.py +1 -1
- crawlo/commands/help.py +5 -3
- crawlo/commands/list.py +1 -1
- crawlo/commands/run.py +49 -11
- crawlo/commands/stats.py +1 -1
- crawlo/config.py +12 -4
- crawlo/config_validator.py +1 -1
- crawlo/core/engine.py +20 -7
- crawlo/core/processor.py +1 -1
- crawlo/core/scheduler.py +4 -5
- crawlo/crawler.py +51 -10
- crawlo/downloader/__init__.py +7 -3
- crawlo/downloader/aiohttp_downloader.py +18 -18
- crawlo/downloader/cffi_downloader.py +5 -2
- crawlo/downloader/httpx_downloader.py +9 -3
- crawlo/downloader/hybrid_downloader.py +2 -2
- crawlo/downloader/playwright_downloader.py +38 -15
- crawlo/downloader/selenium_downloader.py +16 -2
- crawlo/event.py +42 -8
- crawlo/exceptions.py +157 -24
- crawlo/extension/__init__.py +10 -9
- crawlo/extension/health_check.py +7 -7
- crawlo/extension/log_interval.py +6 -6
- crawlo/extension/log_stats.py +2 -2
- crawlo/extension/logging_extension.py +4 -12
- crawlo/extension/memory_monitor.py +5 -5
- crawlo/extension/performance_profiler.py +5 -5
- crawlo/extension/request_recorder.py +6 -6
- crawlo/factories/base.py +1 -1
- crawlo/factories/crawler.py +61 -60
- crawlo/factories/utils.py +135 -0
- crawlo/filters/__init__.py +19 -2
- crawlo/filters/aioredis_filter.py +133 -49
- crawlo/filters/memory_filter.py +6 -21
- crawlo/framework.py +22 -8
- crawlo/initialization/built_in.py +24 -67
- crawlo/initialization/core.py +65 -19
- crawlo/initialization/phases.py +83 -2
- crawlo/initialization/registry.py +5 -7
- crawlo/initialization/utils.py +49 -0
- crawlo/logging/__init__.py +6 -10
- crawlo/logging/config.py +106 -22
- crawlo/logging/factory.py +12 -8
- crawlo/logging/manager.py +19 -27
- crawlo/middleware/__init__.py +72 -9
- crawlo/middleware/default_header.py +2 -2
- crawlo/middleware/download_delay.py +2 -2
- crawlo/middleware/middleware_manager.py +6 -6
- crawlo/middleware/offsite.py +2 -2
- crawlo/middleware/proxy.py +2 -2
- crawlo/middleware/request_ignore.py +4 -4
- crawlo/middleware/response_code.py +2 -2
- crawlo/middleware/response_filter.py +2 -2
- crawlo/middleware/retry.py +1 -1
- crawlo/mode_manager.py +38 -4
- crawlo/network/request.py +54 -26
- crawlo/network/response.py +69 -135
- crawlo/pipelines/__init__.py +40 -9
- crawlo/pipelines/base_pipeline.py +452 -0
- crawlo/pipelines/bloom_dedup_pipeline.py +4 -5
- crawlo/pipelines/console_pipeline.py +2 -2
- crawlo/pipelines/csv_pipeline.py +4 -4
- crawlo/pipelines/database_dedup_pipeline.py +4 -5
- crawlo/pipelines/json_pipeline.py +4 -4
- crawlo/pipelines/memory_dedup_pipeline.py +4 -5
- crawlo/pipelines/mongo_pipeline.py +23 -14
- crawlo/pipelines/mysql_pipeline.py +31 -39
- crawlo/pipelines/pipeline_manager.py +8 -8
- crawlo/pipelines/redis_dedup_pipeline.py +13 -14
- crawlo/project.py +1 -1
- crawlo/queue/__init__.py +10 -0
- crawlo/queue/queue_manager.py +79 -13
- crawlo/queue/redis_priority_queue.py +196 -47
- crawlo/settings/default_settings.py +16 -6
- crawlo/spider/__init__.py +6 -5
- crawlo/stats_collector.py +2 -2
- crawlo/task_manager.py +1 -1
- crawlo/templates/crawlo.cfg.tmpl +3 -3
- crawlo/templates/project/__init__.py.tmpl +1 -3
- crawlo/templates/project/items.py.tmpl +2 -6
- crawlo/templates/project/middlewares.py.tmpl +1 -1
- crawlo/templates/project/pipelines.py.tmpl +1 -2
- crawlo/templates/project/settings.py.tmpl +12 -10
- crawlo/templates/project/settings_distributed.py.tmpl +14 -13
- crawlo/templates/project/settings_gentle.py.tmpl +21 -23
- crawlo/templates/project/settings_high_performance.py.tmpl +21 -23
- crawlo/templates/project/settings_minimal.py.tmpl +10 -8
- crawlo/templates/project/settings_simple.py.tmpl +21 -23
- crawlo/templates/run.py.tmpl +1 -1
- crawlo/templates/spider/spider.py.tmpl +4 -12
- crawlo/templates/spiders_init.py.tmpl +3 -8
- crawlo/tools/__init__.py +0 -103
- crawlo/tools/scenario_adapter.py +1 -1
- crawlo/utils/__init__.py +25 -1
- crawlo/utils/batch_processor.py +23 -6
- crawlo/utils/config_manager.py +442 -0
- crawlo/utils/controlled_spider_mixin.py +1 -1
- crawlo/utils/db_helper.py +1 -1
- crawlo/utils/encoding_helper.py +190 -0
- crawlo/utils/error_handler.py +2 -2
- crawlo/utils/large_scale_helper.py +1 -1
- crawlo/utils/leak_detector.py +335 -0
- crawlo/utils/mongo_connection_pool.py +157 -0
- crawlo/utils/mysql_connection_pool.py +197 -0
- crawlo/utils/performance_monitor.py +1 -1
- crawlo/utils/redis_checker.py +91 -0
- crawlo/utils/redis_connection_pool.py +260 -70
- crawlo/utils/redis_key_validator.py +1 -1
- crawlo/utils/request.py +24 -2
- crawlo/utils/request_serializer.py +1 -1
- crawlo/utils/resource_manager.py +337 -0
- crawlo/utils/response_helper.py +113 -0
- crawlo/utils/selector_helper.py +3 -2
- crawlo/utils/singleton.py +70 -0
- crawlo/utils/spider_loader.py +1 -1
- crawlo/utils/text_helper.py +1 -1
- crawlo-1.4.8.dist-info/METADATA +831 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/RECORD +131 -145
- tests/advanced_tools_example.py +10 -68
- tests/distributed_dedup_test.py +467 -0
- tests/monitor_redis_dedup.sh +72 -0
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
- tests/simple_cli_test.py +55 -0
- tests/test_cli_arguments.py +119 -0
- tests/test_dedup_fix.py +10 -10
- crawlo/logging/async_handler.py +0 -181
- crawlo/logging/monitor.py +0 -153
- crawlo/logging/sampler.py +0 -167
- crawlo/tools/authenticated_proxy.py +0 -241
- crawlo/tools/data_formatter.py +0 -226
- crawlo/tools/data_validator.py +0 -181
- crawlo/tools/encoding_converter.py +0 -127
- crawlo/tools/network_diagnostic.py +0 -365
- crawlo/tools/request_tools.py +0 -83
- crawlo/tools/retry_mechanism.py +0 -224
- crawlo/utils/env_config.py +0 -143
- crawlo/utils/large_scale_config.py +0 -287
- crawlo/utils/log.py +0 -80
- crawlo/utils/system.py +0 -11
- crawlo/utils/tools.py +0 -5
- crawlo/utils/url.py +0 -40
- crawlo-1.4.6.dist-info/METADATA +0 -329
- tests/env_config_example.py +0 -134
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
- tests/test_authenticated_proxy.py +0 -142
- tests/test_comprehensive.py +0 -147
- tests/test_dynamic_downloaders_proxy.py +0 -125
- tests/test_dynamic_proxy.py +0 -93
- tests/test_dynamic_proxy_config.py +0 -147
- tests/test_dynamic_proxy_real.py +0 -110
- tests/test_env_config.py +0 -122
- tests/test_framework_env_usage.py +0 -104
- tests/test_large_scale_config.py +0 -113
- tests/test_proxy_api.py +0 -265
- tests/test_real_scenario_proxy.py +0 -196
- tests/tools_example.py +0 -261
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试CLI参数解析
|
|
5
|
+
验证crawlo run命令是否正确解析--log-level、--config和--concurrency参数
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
import unittest
|
|
11
|
+
from unittest.mock import patch, MagicMock
|
|
12
|
+
|
|
13
|
+
# 添加项目根目录到Python路径
|
|
14
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
15
|
+
|
|
16
|
+
from crawlo.commands.run import main as run_main
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class TestCLIArguments(unittest.TestCase):
|
|
20
|
+
|
|
21
|
+
def setUp(self):
|
|
22
|
+
"""测试前准备"""
|
|
23
|
+
# 保存原始的sys.path
|
|
24
|
+
self.original_path = sys.path[:]
|
|
25
|
+
|
|
26
|
+
def tearDown(self):
|
|
27
|
+
"""测试后清理"""
|
|
28
|
+
# 恢复原始的sys.path
|
|
29
|
+
sys.path = self.original_path[:]
|
|
30
|
+
|
|
31
|
+
@patch('crawlo.commands.run._find_project_root')
|
|
32
|
+
@patch('crawlo.commands.run.initialize_framework')
|
|
33
|
+
@patch('crawlo.commands.run.CrawlerProcess')
|
|
34
|
+
def test_log_level_argument(self, mock_crawler_process, mock_initialize, mock_find_project):
|
|
35
|
+
"""测试--log-level参数解析"""
|
|
36
|
+
# 模拟项目环境
|
|
37
|
+
mock_find_project.return_value = os.path.join(os.path.dirname(__file__), '..')
|
|
38
|
+
mock_initialize.return_value = {}
|
|
39
|
+
mock_process_instance = MagicMock()
|
|
40
|
+
mock_crawler_process.return_value = mock_process_instance
|
|
41
|
+
mock_process_instance.get_spider_names.return_value = ['test_spider']
|
|
42
|
+
mock_process_instance.is_spider_registered.return_value = True
|
|
43
|
+
mock_process_instance.get_spider_class.return_value = MagicMock(__name__='TestSpider')
|
|
44
|
+
|
|
45
|
+
# 测试参数解析
|
|
46
|
+
args = ['test_spider', '--log-level', 'DEBUG']
|
|
47
|
+
result = run_main(args)
|
|
48
|
+
|
|
49
|
+
# 验证initialize_framework被调用时传入了正确的日志级别
|
|
50
|
+
mock_initialize.assert_called_once()
|
|
51
|
+
call_args = mock_initialize.call_args
|
|
52
|
+
if call_args and call_args[0]: # 检查位置参数
|
|
53
|
+
settings = call_args[0][0]
|
|
54
|
+
self.assertEqual(settings.get('LOG_LEVEL'), 'DEBUG')
|
|
55
|
+
elif call_args and call_args[1]: # 检查关键字参数
|
|
56
|
+
settings = call_args[1].get('custom_settings', {})
|
|
57
|
+
self.assertEqual(settings.get('LOG_LEVEL'), 'DEBUG')
|
|
58
|
+
|
|
59
|
+
@patch('crawlo.commands.run._find_project_root')
|
|
60
|
+
@patch('crawlo.commands.run.initialize_framework')
|
|
61
|
+
@patch('crawlo.commands.run.CrawlerProcess')
|
|
62
|
+
def test_concurrency_argument(self, mock_crawler_process, mock_initialize, mock_find_project):
|
|
63
|
+
"""测试--concurrency参数解析"""
|
|
64
|
+
# 模拟项目环境
|
|
65
|
+
mock_find_project.return_value = os.path.join(os.path.dirname(__file__), '..')
|
|
66
|
+
mock_initialize.return_value = {}
|
|
67
|
+
mock_process_instance = MagicMock()
|
|
68
|
+
mock_crawler_process.return_value = mock_process_instance
|
|
69
|
+
mock_process_instance.get_spider_names.return_value = ['test_spider']
|
|
70
|
+
mock_process_instance.is_spider_registered.return_value = True
|
|
71
|
+
mock_process_instance.get_spider_class.return_value = MagicMock(__name__='TestSpider')
|
|
72
|
+
|
|
73
|
+
# 测试参数解析
|
|
74
|
+
args = ['test_spider', '--concurrency', '32']
|
|
75
|
+
result = run_main(args)
|
|
76
|
+
|
|
77
|
+
# 验证initialize_framework被调用时传入了正确的并发数
|
|
78
|
+
mock_initialize.assert_called_once()
|
|
79
|
+
call_args = mock_initialize.call_args
|
|
80
|
+
if call_args and call_args[0]: # 检查位置参数
|
|
81
|
+
settings = call_args[0][0]
|
|
82
|
+
self.assertEqual(settings.get('CONCURRENCY'), 32)
|
|
83
|
+
elif call_args and call_args[1]: # 检查关键字参数
|
|
84
|
+
settings = call_args[1].get('custom_settings', {})
|
|
85
|
+
self.assertEqual(settings.get('CONCURRENCY'), 32)
|
|
86
|
+
|
|
87
|
+
@patch('crawlo.commands.run._find_project_root')
|
|
88
|
+
@patch('crawlo.commands.run.initialize_framework')
|
|
89
|
+
@patch('crawlo.commands.run.CrawlerProcess')
|
|
90
|
+
def test_combined_arguments(self, mock_crawler_process, mock_initialize, mock_find_project):
|
|
91
|
+
"""测试组合参数解析"""
|
|
92
|
+
# 模拟项目环境
|
|
93
|
+
mock_find_project.return_value = os.path.join(os.path.dirname(__file__), '..')
|
|
94
|
+
mock_initialize.return_value = {}
|
|
95
|
+
mock_process_instance = MagicMock()
|
|
96
|
+
mock_crawler_process.return_value = mock_process_instance
|
|
97
|
+
mock_process_instance.get_spider_names.return_value = ['test_spider']
|
|
98
|
+
mock_process_instance.is_spider_registered.return_value = True
|
|
99
|
+
mock_process_instance.get_spider_class.return_value = MagicMock(__name__='TestSpider')
|
|
100
|
+
|
|
101
|
+
# 测试参数解析
|
|
102
|
+
args = ['test_spider', '--log-level', 'DEBUG', '--concurrency', '16']
|
|
103
|
+
result = run_main(args)
|
|
104
|
+
|
|
105
|
+
# 验证initialize_framework被调用时传入了正确的参数
|
|
106
|
+
mock_initialize.assert_called_once()
|
|
107
|
+
call_args = mock_initialize.call_args
|
|
108
|
+
if call_args and call_args[0]: # 检查位置参数
|
|
109
|
+
settings = call_args[0][0]
|
|
110
|
+
self.assertEqual(settings.get('LOG_LEVEL'), 'DEBUG')
|
|
111
|
+
self.assertEqual(settings.get('CONCURRENCY'), 16)
|
|
112
|
+
elif call_args and call_args[1]: # 检查关键字参数
|
|
113
|
+
settings = call_args[1].get('custom_settings', {})
|
|
114
|
+
self.assertEqual(settings.get('LOG_LEVEL'), 'DEBUG')
|
|
115
|
+
self.assertEqual(settings.get('CONCURRENCY'), 16)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
if __name__ == '__main__':
|
|
119
|
+
unittest.main()
|
tests/test_dedup_fix.py
CHANGED
|
@@ -19,7 +19,7 @@ from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
|
|
|
19
19
|
from crawlo.pipelines.memory_dedup_pipeline import MemoryDedupPipeline
|
|
20
20
|
from crawlo.pipelines.bloom_dedup_pipeline import BloomDedupPipeline
|
|
21
21
|
from crawlo.pipelines.pipeline_manager import PipelineManager
|
|
22
|
-
from crawlo.exceptions import ItemDiscard
|
|
22
|
+
from crawlo.exceptions import ItemDiscard
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
class TestDedupFix(unittest.TestCase):
|
|
@@ -61,7 +61,7 @@ class TestDedupFix(unittest.TestCase):
|
|
|
61
61
|
log_level='INFO'
|
|
62
62
|
)
|
|
63
63
|
|
|
64
|
-
# 验证抛出的是ItemDiscard
|
|
64
|
+
# 验证抛出的是ItemDiscard异常
|
|
65
65
|
with self.assertRaises(ItemDiscard) as context:
|
|
66
66
|
pipeline.process_item(self.test_item, Mock())
|
|
67
67
|
|
|
@@ -77,7 +77,7 @@ class TestDedupFix(unittest.TestCase):
|
|
|
77
77
|
fingerprint = pipeline._generate_item_fingerprint(self.test_item)
|
|
78
78
|
pipeline.seen_items.add(fingerprint)
|
|
79
79
|
|
|
80
|
-
# 验证抛出的是ItemDiscard
|
|
80
|
+
# 验证抛出的是ItemDiscard异常
|
|
81
81
|
with self.assertRaises(ItemDiscard) as context:
|
|
82
82
|
pipeline.process_item(self.test_item, Mock())
|
|
83
83
|
|
|
@@ -93,7 +93,7 @@ class TestDedupFix(unittest.TestCase):
|
|
|
93
93
|
fingerprint = pipeline._generate_item_fingerprint(self.test_item)
|
|
94
94
|
pipeline.bloom_filter.add(fingerprint)
|
|
95
95
|
|
|
96
|
-
# 验证抛出的是ItemDiscard
|
|
96
|
+
# 验证抛出的是ItemDiscard异常
|
|
97
97
|
with self.assertRaises(ItemDiscard) as context:
|
|
98
98
|
pipeline.process_item(self.test_item, Mock())
|
|
99
99
|
|
|
@@ -101,7 +101,7 @@ class TestDedupFix(unittest.TestCase):
|
|
|
101
101
|
self.assertIn("可能重复的数据项:", str(context.exception))
|
|
102
102
|
|
|
103
103
|
async def test_pipeline_manager_exception_handling(self):
|
|
104
|
-
"""
|
|
104
|
+
"""测试管道管理器能正确处理ItemDiscard异常"""
|
|
105
105
|
# 创建管道管理器实例
|
|
106
106
|
pipeline_manager = PipelineManager(self.mock_crawler)
|
|
107
107
|
|
|
@@ -148,7 +148,7 @@ class TestDedupFix(unittest.TestCase):
|
|
|
148
148
|
mock_mysql_method.assert_not_called()
|
|
149
149
|
|
|
150
150
|
async def test_pipeline_manager_dropitem_exception_handling(self):
|
|
151
|
-
"""测试管道管理器能正确处理
|
|
151
|
+
"""测试管道管理器能正确处理ItemDiscard异常(重复测试)"""
|
|
152
152
|
# 创建管道管理器实例
|
|
153
153
|
pipeline_manager = PipelineManager(self.mock_crawler)
|
|
154
154
|
|
|
@@ -162,9 +162,9 @@ class TestDedupFix(unittest.TestCase):
|
|
|
162
162
|
# 模拟管道方法列表
|
|
163
163
|
pipeline_manager.methods = []
|
|
164
164
|
|
|
165
|
-
# 创建模拟的去重管道方法(抛出
|
|
165
|
+
# 创建模拟的去重管道方法(抛出ItemDiscard异常)
|
|
166
166
|
mock_dedup_method = Mock()
|
|
167
|
-
mock_dedup_method.side_effect =
|
|
167
|
+
mock_dedup_method.side_effect = ItemDiscard("测试ItemDiscard异常")
|
|
168
168
|
|
|
169
169
|
# 创建模拟的MySQL管道方法
|
|
170
170
|
mock_mysql_method = Mock()
|
|
@@ -179,7 +179,7 @@ class TestDedupFix(unittest.TestCase):
|
|
|
179
179
|
# 设置common_call的副作用来模拟异常
|
|
180
180
|
async def mock_common_call_func(method, *args, **kwargs):
|
|
181
181
|
if method == mock_dedup_method:
|
|
182
|
-
raise
|
|
182
|
+
raise ItemDiscard("测试ItemDiscard异常")
|
|
183
183
|
return test_item
|
|
184
184
|
|
|
185
185
|
mock_common_call.side_effect = mock_common_call_func
|
|
@@ -187,7 +187,7 @@ class TestDedupFix(unittest.TestCase):
|
|
|
187
187
|
# 调用处理方法
|
|
188
188
|
await pipeline_manager.process_item(test_item)
|
|
189
189
|
|
|
190
|
-
# 验证
|
|
190
|
+
# 验证ItemDiscard异常被正确处理
|
|
191
191
|
# 验证create_task被调用了一次(item_discard事件)
|
|
192
192
|
self.assertEqual(mock_create_task.call_count, 1)
|
|
193
193
|
|
crawlo/logging/async_handler.py
DELETED
|
@@ -1,181 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
异步日志处理器
|
|
5
|
-
用于提高日志写入性能
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import asyncio
|
|
9
|
-
import logging
|
|
10
|
-
import threading
|
|
11
|
-
import queue
|
|
12
|
-
from typing import Optional
|
|
13
|
-
from concurrent_log_handler import ConcurrentRotatingFileHandler
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class AsyncLogHandler(logging.Handler):
|
|
17
|
-
"""
|
|
18
|
-
异步日志处理器
|
|
19
|
-
将日志记录放入队列中,由后台线程异步处理
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
def __init__(self, handler: logging.Handler, queue_size: int = 10000):
|
|
23
|
-
"""
|
|
24
|
-
初始化异步日志处理器
|
|
25
|
-
|
|
26
|
-
Args:
|
|
27
|
-
handler: 实际的日志处理器
|
|
28
|
-
queue_size: 队列大小
|
|
29
|
-
"""
|
|
30
|
-
super().__init__()
|
|
31
|
-
self._handler = handler
|
|
32
|
-
self._queue = queue.Queue(maxsize=queue_size)
|
|
33
|
-
self._thread: Optional[threading.Thread] = None
|
|
34
|
-
self._stop_event = threading.Event()
|
|
35
|
-
self._started = False
|
|
36
|
-
|
|
37
|
-
def start(self):
|
|
38
|
-
"""启动异步处理线程"""
|
|
39
|
-
if self._started:
|
|
40
|
-
return
|
|
41
|
-
|
|
42
|
-
self._started = True
|
|
43
|
-
self._stop_event.clear()
|
|
44
|
-
self._thread = threading.Thread(target=self._worker, daemon=True)
|
|
45
|
-
self._thread.start()
|
|
46
|
-
|
|
47
|
-
def stop(self):
|
|
48
|
-
"""停止异步处理线程"""
|
|
49
|
-
if not self._started:
|
|
50
|
-
return
|
|
51
|
-
|
|
52
|
-
self._started = False
|
|
53
|
-
self._stop_event.set()
|
|
54
|
-
|
|
55
|
-
# 发送一个哨兵消息来唤醒工作线程
|
|
56
|
-
try:
|
|
57
|
-
self._queue.put_nowait(None)
|
|
58
|
-
except queue.Full:
|
|
59
|
-
pass
|
|
60
|
-
|
|
61
|
-
# 等待线程结束
|
|
62
|
-
if self._thread and self._thread.is_alive():
|
|
63
|
-
self._thread.join(timeout=5.0)
|
|
64
|
-
|
|
65
|
-
# 关闭底层处理器
|
|
66
|
-
if self._handler:
|
|
67
|
-
self._handler.close()
|
|
68
|
-
|
|
69
|
-
def _worker(self):
|
|
70
|
-
"""工作线程函数"""
|
|
71
|
-
while not self._stop_event.is_set():
|
|
72
|
-
try:
|
|
73
|
-
# 从队列中获取日志记录
|
|
74
|
-
record = self._queue.get(timeout=1.0)
|
|
75
|
-
|
|
76
|
-
# 哨兵消息,表示停止
|
|
77
|
-
if record is None:
|
|
78
|
-
break
|
|
79
|
-
|
|
80
|
-
# 处理日志记录
|
|
81
|
-
try:
|
|
82
|
-
self._handler.emit(record)
|
|
83
|
-
except Exception:
|
|
84
|
-
pass # 忽略处理错误
|
|
85
|
-
|
|
86
|
-
self._queue.task_done()
|
|
87
|
-
|
|
88
|
-
except queue.Empty:
|
|
89
|
-
continue
|
|
90
|
-
except Exception:
|
|
91
|
-
if not self._stop_event.is_set():
|
|
92
|
-
continue
|
|
93
|
-
else:
|
|
94
|
-
break
|
|
95
|
-
|
|
96
|
-
def emit(self, record):
|
|
97
|
-
"""
|
|
98
|
-
发出日志记录
|
|
99
|
-
|
|
100
|
-
Args:
|
|
101
|
-
record: 日志记录
|
|
102
|
-
"""
|
|
103
|
-
if not self._started:
|
|
104
|
-
self.start()
|
|
105
|
-
|
|
106
|
-
# 将日志记录放入队列
|
|
107
|
-
try:
|
|
108
|
-
self._queue.put_nowait(record)
|
|
109
|
-
except queue.Full:
|
|
110
|
-
# 队列满时丢弃日志记录
|
|
111
|
-
pass
|
|
112
|
-
|
|
113
|
-
def flush(self):
|
|
114
|
-
"""刷新日志处理器"""
|
|
115
|
-
if self._handler:
|
|
116
|
-
self._handler.flush()
|
|
117
|
-
|
|
118
|
-
def close(self):
|
|
119
|
-
"""关闭日志处理器"""
|
|
120
|
-
self.stop()
|
|
121
|
-
super().close()
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
class AsyncConcurrentRotatingFileHandler(AsyncLogHandler):
|
|
125
|
-
"""
|
|
126
|
-
异步并发轮转文件处理器
|
|
127
|
-
结合了异步处理和并发轮转文件的功能
|
|
128
|
-
"""
|
|
129
|
-
|
|
130
|
-
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0,
|
|
131
|
-
encoding=None, delay=False, queue_size: int = 10000):
|
|
132
|
-
"""
|
|
133
|
-
初始化异步并发轮转文件处理器
|
|
134
|
-
|
|
135
|
-
Args:
|
|
136
|
-
filename: 日志文件名
|
|
137
|
-
mode: 文件打开模式
|
|
138
|
-
maxBytes: 最大文件大小
|
|
139
|
-
backupCount: 备份文件数量
|
|
140
|
-
encoding: 文件编码
|
|
141
|
-
delay: 是否延迟打开文件
|
|
142
|
-
queue_size: 队列大小
|
|
143
|
-
"""
|
|
144
|
-
handler = ConcurrentRotatingFileHandler(
|
|
145
|
-
filename=filename,
|
|
146
|
-
mode=mode,
|
|
147
|
-
maxBytes=maxBytes,
|
|
148
|
-
backupCount=backupCount,
|
|
149
|
-
encoding=encoding,
|
|
150
|
-
delay=delay
|
|
151
|
-
)
|
|
152
|
-
super().__init__(handler, queue_size)
|
|
153
|
-
|
|
154
|
-
@property
|
|
155
|
-
def baseFilename(self):
|
|
156
|
-
"""获取基础文件名"""
|
|
157
|
-
return self._handler.baseFilename if self._handler else None
|
|
158
|
-
|
|
159
|
-
@property
|
|
160
|
-
def maxBytes(self):
|
|
161
|
-
"""获取最大字节数"""
|
|
162
|
-
return self._handler.maxBytes if self._handler else 0
|
|
163
|
-
|
|
164
|
-
@property
|
|
165
|
-
def backupCount(self):
|
|
166
|
-
"""获取备份计数"""
|
|
167
|
-
return self._handler.backupCount if self._handler else 0
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
def wrap_handler_async(handler: logging.Handler, queue_size: int = 10000) -> AsyncLogHandler:
|
|
171
|
-
"""
|
|
172
|
-
将现有的日志处理器包装为异步处理器
|
|
173
|
-
|
|
174
|
-
Args:
|
|
175
|
-
handler: 要包装的日志处理器
|
|
176
|
-
queue_size: 队列大小
|
|
177
|
-
|
|
178
|
-
Returns:
|
|
179
|
-
异步日志处理器
|
|
180
|
-
"""
|
|
181
|
-
return AsyncLogHandler(handler, queue_size)
|
crawlo/logging/monitor.py
DELETED
|
@@ -1,153 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
日志性能监控器
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
import time
|
|
8
|
-
import threading
|
|
9
|
-
from typing import Dict, List
|
|
10
|
-
from collections import defaultdict, deque
|
|
11
|
-
from .manager import get_config
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class LogPerformanceMonitor:
|
|
15
|
-
"""
|
|
16
|
-
日志性能监控器
|
|
17
|
-
用于监控日志系统的性能指标
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
def __init__(self):
|
|
21
|
-
self._lock = threading.Lock()
|
|
22
|
-
self._log_stats: Dict[str, deque] = defaultdict(lambda: deque(maxlen=1000))
|
|
23
|
-
self._enabled = False
|
|
24
|
-
|
|
25
|
-
def enable_monitoring(self):
|
|
26
|
-
"""启用性能监控"""
|
|
27
|
-
with self._lock:
|
|
28
|
-
self._enabled = True
|
|
29
|
-
|
|
30
|
-
def disable_monitoring(self):
|
|
31
|
-
"""禁用性能监控"""
|
|
32
|
-
with self._lock:
|
|
33
|
-
self._enabled = False
|
|
34
|
-
|
|
35
|
-
def record_log_event(self, logger_name: str, level: str, message: str):
|
|
36
|
-
"""
|
|
37
|
-
记录日志事件
|
|
38
|
-
|
|
39
|
-
Args:
|
|
40
|
-
logger_name: Logger名称
|
|
41
|
-
level: 日志级别
|
|
42
|
-
message: 日志消息
|
|
43
|
-
"""
|
|
44
|
-
if not self._enabled:
|
|
45
|
-
return
|
|
46
|
-
|
|
47
|
-
with self._lock:
|
|
48
|
-
event = {
|
|
49
|
-
'timestamp': time.time(),
|
|
50
|
-
'level': level,
|
|
51
|
-
'message_length': len(message),
|
|
52
|
-
'thread_id': threading.get_ident()
|
|
53
|
-
}
|
|
54
|
-
self._log_stats[logger_name].append(event)
|
|
55
|
-
|
|
56
|
-
def get_statistics(self, logger_name: str = None) -> Dict:
|
|
57
|
-
"""
|
|
58
|
-
获取日志统计信息
|
|
59
|
-
|
|
60
|
-
Args:
|
|
61
|
-
logger_name: Logger名称,如果为None则返回所有统计信息
|
|
62
|
-
|
|
63
|
-
Returns:
|
|
64
|
-
统计信息字典
|
|
65
|
-
"""
|
|
66
|
-
with self._lock:
|
|
67
|
-
if logger_name:
|
|
68
|
-
return self._calculate_stats(logger_name, self._log_stats[logger_name])
|
|
69
|
-
else:
|
|
70
|
-
result = {}
|
|
71
|
-
for name, events in self._log_stats.items():
|
|
72
|
-
result[name] = self._calculate_stats(name, events)
|
|
73
|
-
return result
|
|
74
|
-
|
|
75
|
-
def _calculate_stats(self, logger_name: str, events: deque) -> Dict:
|
|
76
|
-
"""计算统计信息"""
|
|
77
|
-
if not events:
|
|
78
|
-
return {
|
|
79
|
-
'logger_name': logger_name,
|
|
80
|
-
'total_logs': 0,
|
|
81
|
-
'log_rates': {},
|
|
82
|
-
'avg_message_length': 0
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
# 计算日志级别分布
|
|
86
|
-
level_counts = defaultdict(int)
|
|
87
|
-
total_length = 0
|
|
88
|
-
|
|
89
|
-
for event in events:
|
|
90
|
-
level_counts[event['level']] += 1
|
|
91
|
-
total_length += event['message_length']
|
|
92
|
-
|
|
93
|
-
# 计算日志速率(每分钟)
|
|
94
|
-
if len(events) > 1:
|
|
95
|
-
time_span = events[-1]['timestamp'] - events[0]['timestamp']
|
|
96
|
-
if time_span > 0:
|
|
97
|
-
logs_per_minute = len(events) / (time_span / 60)
|
|
98
|
-
else:
|
|
99
|
-
logs_per_minute = len(events) * 60
|
|
100
|
-
else:
|
|
101
|
-
logs_per_minute = 0
|
|
102
|
-
|
|
103
|
-
return {
|
|
104
|
-
'logger_name': logger_name,
|
|
105
|
-
'total_logs': len(events),
|
|
106
|
-
'log_rates': {
|
|
107
|
-
'per_minute': logs_per_minute
|
|
108
|
-
},
|
|
109
|
-
'level_distribution': dict(level_counts),
|
|
110
|
-
'avg_message_length': total_length / len(events) if events else 0
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
def get_performance_report(self) -> str:
|
|
114
|
-
"""
|
|
115
|
-
获取性能报告
|
|
116
|
-
|
|
117
|
-
Returns:
|
|
118
|
-
格式化的性能报告字符串
|
|
119
|
-
"""
|
|
120
|
-
stats = self.get_statistics()
|
|
121
|
-
|
|
122
|
-
report = ["=" * 50]
|
|
123
|
-
report.append("日志系统性能报告")
|
|
124
|
-
report.append("=" * 50)
|
|
125
|
-
|
|
126
|
-
config = get_config()
|
|
127
|
-
if config:
|
|
128
|
-
report.append(f"日志文件: {config.file_path or 'N/A'}")
|
|
129
|
-
report.append(f"文件启用: {config.file_enabled}")
|
|
130
|
-
report.append(f"控制台启用: {config.console_enabled}")
|
|
131
|
-
report.append("-" * 50)
|
|
132
|
-
|
|
133
|
-
for logger_name, stat in stats.items():
|
|
134
|
-
report.append(f"Logger: {logger_name}")
|
|
135
|
-
report.append(f" 总日志数: {stat['total_logs']}")
|
|
136
|
-
report.append(f" 日志速率: {stat['log_rates']['per_minute']:.2f} 条/分钟")
|
|
137
|
-
report.append(f" 平均消息长度: {stat['avg_message_length']:.2f} 字符")
|
|
138
|
-
|
|
139
|
-
if 'level_distribution' in stat:
|
|
140
|
-
levels = ", ".join([f"{k}: {v}" for k, v in stat['level_distribution'].items()])
|
|
141
|
-
report.append(f" 级别分布: {levels}")
|
|
142
|
-
report.append("")
|
|
143
|
-
|
|
144
|
-
return "\n".join(report)
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
# 全局实例
|
|
148
|
-
_log_monitor = LogPerformanceMonitor()
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
def get_monitor() -> LogPerformanceMonitor:
|
|
152
|
-
"""获取日志监控器实例"""
|
|
153
|
-
return _log_monitor
|
crawlo/logging/sampler.py
DELETED
|
@@ -1,167 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
日志采样器
|
|
5
|
-
用于在高负载时减少日志输出
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import random
|
|
9
|
-
import time
|
|
10
|
-
import threading
|
|
11
|
-
from typing import Dict, Set
|
|
12
|
-
from collections import defaultdict
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class LogSampler:
|
|
16
|
-
"""
|
|
17
|
-
日志采样器
|
|
18
|
-
支持多种采样策略以减少日志输出量
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
def __init__(self):
|
|
22
|
-
self._lock = threading.RLock()
|
|
23
|
-
self._sample_rates: Dict[str, float] = {} # logger_name -> sample_rate
|
|
24
|
-
self._rate_limiters: Dict[str, TokenBucket] = {} # logger_name -> rate_limiter
|
|
25
|
-
self._message_counts: Dict[str, Dict[str, int]] = defaultdict(lambda: defaultdict(int))
|
|
26
|
-
self._time_windows: Dict[str, Dict[str, float]] = defaultdict(lambda: defaultdict(float))
|
|
27
|
-
|
|
28
|
-
def set_sample_rate(self, logger_name: str, rate: float):
|
|
29
|
-
"""
|
|
30
|
-
设置采样率
|
|
31
|
-
|
|
32
|
-
Args:
|
|
33
|
-
logger_name: Logger名称
|
|
34
|
-
rate: 采样率 (0.0-1.0),1.0表示全部记录,0.0表示不记录
|
|
35
|
-
"""
|
|
36
|
-
with self._lock:
|
|
37
|
-
self._sample_rates[logger_name] = max(0.0, min(1.0, rate))
|
|
38
|
-
|
|
39
|
-
def set_rate_limit(self, logger_name: str, messages_per_second: float):
|
|
40
|
-
"""
|
|
41
|
-
设置速率限制
|
|
42
|
-
|
|
43
|
-
Args:
|
|
44
|
-
logger_name: Logger名称
|
|
45
|
-
messages_per_second: 每秒最大消息数
|
|
46
|
-
"""
|
|
47
|
-
with self._lock:
|
|
48
|
-
self._rate_limiters[logger_name] = TokenBucket(messages_per_second, messages_per_second)
|
|
49
|
-
|
|
50
|
-
def should_log(self, logger_name: str, message: str = None, level: str = None) -> bool:
|
|
51
|
-
"""
|
|
52
|
-
判断是否应该记录日志
|
|
53
|
-
|
|
54
|
-
Args:
|
|
55
|
-
logger_name: Logger名称
|
|
56
|
-
message: 日志消息(用于去重)
|
|
57
|
-
level: 日志级别
|
|
58
|
-
|
|
59
|
-
Returns:
|
|
60
|
-
是否应该记录日志
|
|
61
|
-
"""
|
|
62
|
-
with self._lock:
|
|
63
|
-
# 检查采样率
|
|
64
|
-
if logger_name in self._sample_rates:
|
|
65
|
-
sample_rate = self._sample_rates[logger_name]
|
|
66
|
-
if random.random() > sample_rate:
|
|
67
|
-
return False
|
|
68
|
-
|
|
69
|
-
# 检查速率限制
|
|
70
|
-
if logger_name in self._rate_limiters:
|
|
71
|
-
if not self._rate_limiters[logger_name].consume(1):
|
|
72
|
-
return False
|
|
73
|
-
|
|
74
|
-
# 检查消息去重(相同消息在短时间内只记录一次)
|
|
75
|
-
if message:
|
|
76
|
-
key = f"{level}:{message}" if level else message
|
|
77
|
-
current_time = time.time()
|
|
78
|
-
|
|
79
|
-
# 如果距离上次记录超过60秒,重置计数
|
|
80
|
-
if current_time - self._time_windows[logger_name][key] > 60:
|
|
81
|
-
self._message_counts[logger_name][key] = 0
|
|
82
|
-
self._time_windows[logger_name][key] = current_time
|
|
83
|
-
|
|
84
|
-
# 限制相同消息的记录次数
|
|
85
|
-
if self._message_counts[logger_name][key] >= 5: # 最多记录5次相同消息
|
|
86
|
-
return False
|
|
87
|
-
|
|
88
|
-
self._message_counts[logger_name][key] += 1
|
|
89
|
-
|
|
90
|
-
return True
|
|
91
|
-
|
|
92
|
-
def reset(self):
|
|
93
|
-
"""重置采样器状态"""
|
|
94
|
-
with self._lock:
|
|
95
|
-
self._sample_rates.clear()
|
|
96
|
-
self._rate_limiters.clear()
|
|
97
|
-
self._message_counts.clear()
|
|
98
|
-
self._time_windows.clear()
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
class TokenBucket:
|
|
102
|
-
"""
|
|
103
|
-
令牌桶算法实现
|
|
104
|
-
用于速率限制
|
|
105
|
-
"""
|
|
106
|
-
|
|
107
|
-
def __init__(self, rate: float, capacity: float):
|
|
108
|
-
"""
|
|
109
|
-
初始化令牌桶
|
|
110
|
-
|
|
111
|
-
Args:
|
|
112
|
-
rate: 每秒生成的令牌数
|
|
113
|
-
capacity: 桶的最大容量
|
|
114
|
-
"""
|
|
115
|
-
self._rate = rate
|
|
116
|
-
self._capacity = capacity
|
|
117
|
-
self._tokens = capacity
|
|
118
|
-
self._last_time = time.time()
|
|
119
|
-
self._lock = threading.Lock()
|
|
120
|
-
|
|
121
|
-
def consume(self, tokens: float) -> bool:
|
|
122
|
-
"""
|
|
123
|
-
消费令牌
|
|
124
|
-
|
|
125
|
-
Args:
|
|
126
|
-
tokens: 要消费的令牌数
|
|
127
|
-
|
|
128
|
-
Returns:
|
|
129
|
-
是否消费成功
|
|
130
|
-
"""
|
|
131
|
-
with self._lock:
|
|
132
|
-
current_time = time.time()
|
|
133
|
-
# 补充令牌
|
|
134
|
-
elapsed = current_time - self._last_time
|
|
135
|
-
self._tokens = min(self._capacity, self._tokens + elapsed * self._rate)
|
|
136
|
-
self._last_time = current_time
|
|
137
|
-
|
|
138
|
-
# 尝试消费令牌
|
|
139
|
-
if self._tokens >= tokens:
|
|
140
|
-
self._tokens -= tokens
|
|
141
|
-
return True
|
|
142
|
-
else:
|
|
143
|
-
return False
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
# 全局实例
|
|
147
|
-
_log_sampler = LogSampler()
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
def get_sampler() -> LogSampler:
|
|
151
|
-
"""获取日志采样器实例"""
|
|
152
|
-
return _log_sampler
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
def should_log(logger_name: str, message: str = None, level: str = None) -> bool:
|
|
156
|
-
"""
|
|
157
|
-
判断是否应该记录日志的便捷函数
|
|
158
|
-
|
|
159
|
-
Args:
|
|
160
|
-
logger_name: Logger名称
|
|
161
|
-
message: 日志消息
|
|
162
|
-
level: 日志级别
|
|
163
|
-
|
|
164
|
-
Returns:
|
|
165
|
-
是否应该记录日志
|
|
166
|
-
"""
|
|
167
|
-
return get_sampler().should_log(logger_name, message, level)
|