crawlo 1.4.4__py3-none-any.whl → 1.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (120) hide show
  1. crawlo/__init__.py +11 -15
  2. crawlo/__version__.py +1 -1
  3. crawlo/commands/startproject.py +24 -0
  4. crawlo/core/engine.py +2 -2
  5. crawlo/core/scheduler.py +4 -4
  6. crawlo/crawler.py +8 -7
  7. crawlo/downloader/__init__.py +5 -2
  8. crawlo/downloader/cffi_downloader.py +3 -1
  9. crawlo/extension/__init__.py +2 -2
  10. crawlo/filters/aioredis_filter.py +8 -1
  11. crawlo/filters/memory_filter.py +8 -1
  12. crawlo/initialization/built_in.py +13 -4
  13. crawlo/initialization/core.py +5 -4
  14. crawlo/interfaces.py +24 -0
  15. crawlo/middleware/__init__.py +7 -4
  16. crawlo/middleware/middleware_manager.py +15 -8
  17. crawlo/middleware/proxy.py +171 -348
  18. crawlo/mode_manager.py +45 -11
  19. crawlo/network/response.py +374 -69
  20. crawlo/pipelines/mysql_pipeline.py +340 -189
  21. crawlo/pipelines/pipeline_manager.py +2 -2
  22. crawlo/project.py +2 -4
  23. crawlo/settings/default_settings.py +42 -30
  24. crawlo/stats_collector.py +10 -1
  25. crawlo/task_manager.py +2 -2
  26. crawlo/templates/project/items.py.tmpl +2 -2
  27. crawlo/templates/project/middlewares.py.tmpl +9 -89
  28. crawlo/templates/project/pipelines.py.tmpl +8 -68
  29. crawlo/templates/project/settings.py.tmpl +10 -55
  30. crawlo/templates/project/settings_distributed.py.tmpl +20 -22
  31. crawlo/templates/project/settings_gentle.py.tmpl +5 -0
  32. crawlo/templates/project/settings_high_performance.py.tmpl +5 -0
  33. crawlo/templates/project/settings_minimal.py.tmpl +25 -1
  34. crawlo/templates/project/settings_simple.py.tmpl +5 -0
  35. crawlo/templates/run.py.tmpl +1 -8
  36. crawlo/templates/spider/spider.py.tmpl +5 -108
  37. crawlo/tools/__init__.py +0 -11
  38. crawlo/utils/__init__.py +17 -1
  39. crawlo/utils/db_helper.py +226 -319
  40. crawlo/utils/error_handler.py +313 -67
  41. crawlo/utils/fingerprint.py +3 -4
  42. crawlo/utils/misc.py +82 -0
  43. crawlo/utils/request.py +55 -66
  44. crawlo/utils/selector_helper.py +138 -0
  45. crawlo/utils/spider_loader.py +185 -45
  46. crawlo/utils/text_helper.py +95 -0
  47. crawlo-1.4.6.dist-info/METADATA +329 -0
  48. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/RECORD +110 -69
  49. tests/authenticated_proxy_example.py +10 -6
  50. tests/bug_check_test.py +251 -0
  51. tests/direct_selector_helper_test.py +97 -0
  52. tests/explain_mysql_update_behavior.py +77 -0
  53. tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -0
  54. tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -0
  55. tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -0
  56. tests/ofweek_scrapy/ofweek_scrapy/settings.py +85 -0
  57. tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -0
  58. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +162 -0
  59. tests/ofweek_scrapy/scrapy.cfg +11 -0
  60. tests/performance_comparison.py +4 -5
  61. tests/simple_crawlo_test.py +1 -2
  62. tests/simple_follow_test.py +39 -0
  63. tests/simple_response_selector_test.py +95 -0
  64. tests/simple_selector_helper_test.py +155 -0
  65. tests/simple_selector_test.py +208 -0
  66. tests/simple_url_test.py +74 -0
  67. tests/simulate_mysql_update_test.py +140 -0
  68. tests/test_asyncmy_usage.py +57 -0
  69. tests/test_crawler_process_import.py +39 -0
  70. tests/test_crawler_process_spider_modules.py +48 -0
  71. tests/test_crawlo_proxy_integration.py +8 -2
  72. tests/test_downloader_proxy_compatibility.py +24 -20
  73. tests/test_edge_cases.py +7 -5
  74. tests/test_encoding_core.py +57 -0
  75. tests/test_encoding_detection.py +127 -0
  76. tests/test_factory_compatibility.py +197 -0
  77. tests/test_mysql_pipeline_config.py +165 -0
  78. tests/test_mysql_pipeline_error.py +99 -0
  79. tests/test_mysql_pipeline_init_log.py +83 -0
  80. tests/test_mysql_pipeline_integration.py +133 -0
  81. tests/test_mysql_pipeline_refactor.py +144 -0
  82. tests/test_mysql_pipeline_refactor_simple.py +86 -0
  83. tests/test_mysql_pipeline_robustness.py +196 -0
  84. tests/test_mysql_pipeline_types.py +89 -0
  85. tests/test_mysql_update_columns.py +94 -0
  86. tests/test_optimized_selector_naming.py +101 -0
  87. tests/test_priority_behavior.py +18 -18
  88. tests/test_proxy_middleware.py +104 -8
  89. tests/test_proxy_middleware_enhanced.py +1 -5
  90. tests/test_proxy_middleware_integration.py +7 -2
  91. tests/test_proxy_middleware_refactored.py +25 -2
  92. tests/test_proxy_only.py +84 -0
  93. tests/test_proxy_with_downloader.py +153 -0
  94. tests/test_real_scenario_proxy.py +17 -17
  95. tests/test_response_follow.py +105 -0
  96. tests/test_response_selector_methods.py +93 -0
  97. tests/test_response_url_methods.py +71 -0
  98. tests/test_response_urljoin.py +87 -0
  99. tests/test_scrapy_style_encoding.py +113 -0
  100. tests/test_selector_helper.py +101 -0
  101. tests/test_selector_optimizations.py +147 -0
  102. tests/test_spider_loader.py +50 -0
  103. tests/test_spider_loader_comprehensive.py +70 -0
  104. tests/test_spiders/__init__.py +1 -0
  105. tests/test_spiders/test_spider.py +10 -0
  106. tests/verify_mysql_warnings.py +110 -0
  107. crawlo/middleware/simple_proxy.py +0 -65
  108. crawlo/tools/anti_crawler.py +0 -269
  109. crawlo/utils/class_loader.py +0 -26
  110. crawlo/utils/enhanced_error_handler.py +0 -357
  111. crawlo-1.4.4.dist-info/METADATA +0 -190
  112. tests/simple_log_test.py +0 -58
  113. tests/simple_test.py +0 -48
  114. tests/test_framework_logger.py +0 -67
  115. tests/test_framework_startup.py +0 -65
  116. tests/test_mode_change.py +0 -73
  117. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/WHEEL +0 -0
  118. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/entry_points.txt +0 -0
  119. {crawlo-1.4.4.dist-info → crawlo-1.4.6.dist-info}/top_level.txt +0 -0
  120. /tests/{final_command_test_report.md → ofweek_scrapy/ofweek_scrapy/__init__.py} +0 -0
@@ -1,357 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- 误处理工具
5
- 提供更详细、更一致的错误处理和日志记录机制
6
- """
7
- import traceback
8
- from datetime import datetime
9
- from functools import wraps
10
- from typing import Optional, Callable, Any, Dict, List
11
-
12
- from crawlo.utils.log import get_logger
13
-
14
-
15
- class ErrorContext:
16
- """错误上下文信息"""
17
-
18
- def __init__(self, context: str = "", module: str = "", function: str = ""):
19
- self.context = context
20
- self.module = module
21
- self.function = function
22
- self.timestamp = datetime.now()
23
-
24
- def __str__(self):
25
- parts = []
26
- if self.module:
27
- parts.append(f"Module: {self.module}")
28
- if self.function:
29
- parts.append(f"Function: {self.function}")
30
- if self.context:
31
- parts.append(f"Context: {self.context}")
32
- parts.append(f"Time: {self.timestamp.strftime('%Y-%m-%d %H:%M:%S')}")
33
- return " | ".join(parts)
34
-
35
-
36
- class DetailedException(Exception):
37
- """带有详细信息的异常基类"""
38
-
39
- def __init__(self, message: str, context: Optional[ErrorContext] = None,
40
- error_code: Optional[str] = None, **kwargs):
41
- super().__init__(message)
42
- self.context = context
43
- self.error_code = error_code
44
- self.details = kwargs
45
- self.timestamp = datetime.now()
46
-
47
- def __str__(self):
48
- base_msg = super().__str__()
49
- if self.context:
50
- return f"{base_msg} ({self.context})"
51
- return base_msg
52
-
53
- def get_full_details(self) -> Dict:
54
- """获取完整的错误详情"""
55
- return {
56
- "message": str(self),
57
- "error_code": self.error_code,
58
- "context": str(self.context) if self.context else None,
59
- "details": self.details,
60
- "timestamp": self.timestamp.isoformat(),
61
- "exception_type": self.__class__.__name__
62
- }
63
-
64
-
65
- class EnhancedErrorHandler:
66
- """增强版错误处理器"""
67
-
68
- def __init__(self, logger_name: str = __name__, log_level: str = 'ERROR'):
69
- self.logger = get_logger(logger_name, log_level)
70
- self.error_history: List[Dict] = [] # 错误历史记录
71
- self.max_history_size = 100 # 最大历史记录数
72
-
73
- def handle_error(self, exception: Exception, context: Optional[ErrorContext] = None,
74
- raise_error: bool = True, log_error: bool = True,
75
- extra_info: Optional[Dict] = None) -> Dict:
76
- """
77
- 增强版错误处理
78
-
79
- Args:
80
- exception: 异常对象
81
- context: 错误上下文信息
82
- raise_error: 是否重新抛出异常
83
- log_error: 是否记录错误日志
84
- extra_info: 额外的错误信息
85
-
86
- Returns:
87
- 包含错误详情的字典
88
- """
89
- # 构建错误详情
90
- error_details = {
91
- "exception": exception,
92
- "exception_type": type(exception).__name__,
93
- "message": str(exception),
94
- "context": str(context) if context else None,
95
- "timestamp": datetime.now().isoformat(),
96
- "traceback": traceback.format_exc() if log_error else None,
97
- "extra_info": extra_info or {}
98
- }
99
-
100
- # 记录到历史
101
- self._record_error(error_details)
102
-
103
- # 记录日志
104
- if log_error:
105
- self._log_error(error_details)
106
-
107
- # 重新抛出异常
108
- if raise_error:
109
- raise exception
110
-
111
- return error_details
112
-
113
- def _log_error(self, error_details: Dict):
114
- """记录错误日志"""
115
- # 基本错误信息
116
- context_info = error_details.get("context", "")
117
- message = error_details["message"]
118
- error_msg = f"{message} [{context_info}]" if context_info else message
119
-
120
- # 记录错误
121
- self.logger.error(error_msg)
122
-
123
- # 记录详细信息
124
- if error_details.get("traceback"):
125
- self.logger.debug(f"详细错误信息:\n{error_details['traceback']}")
126
-
127
- # 记录额外信息
128
- if error_details.get("extra_info"):
129
- self.logger.debug(f"额外信息: {error_details['extra_info']}")
130
-
131
- def _record_error(self, error_details: Dict):
132
- """记录错误到历史"""
133
- self.error_history.append(error_details)
134
- # 限制历史记录大小
135
- if len(self.error_history) > self.max_history_size:
136
- self.error_history.pop(0)
137
-
138
- def safe_call(self, func: Callable, *args, default_return=None,
139
- context: Optional[ErrorContext] = None, **kwargs) -> Any:
140
- """
141
- 安全调用函数,捕获并处理异常
142
-
143
- Args:
144
- func: 要调用的函数
145
- *args: 函数参数
146
- default_return: 默认返回值
147
- context: 错误上下文
148
- **kwargs: 函数关键字参数
149
-
150
- Returns:
151
- 函数返回值或默认值
152
- """
153
- try:
154
- return func(*args, **kwargs)
155
- except Exception as e:
156
- self.handle_error(e, context=context, raise_error=False)
157
- return default_return
158
-
159
- def retry_on_failure(self, max_retries: int = 3, delay: float = 1.0,
160
- exceptions: tuple = (Exception,), backoff_factor: float = 1.0,
161
- context: Optional[ErrorContext] = None):
162
- """
163
- 装饰器:失败时重试(增强版)
164
-
165
- Args:
166
- max_retries: 最大重试次数
167
- delay: 初始重试间隔(秒)
168
- exceptions: 需要重试的异常类型
169
- backoff_factor: 退避因子(每次重试间隔乘以此因子)
170
- context: 错误上下文
171
- """
172
- def decorator(func):
173
- @wraps(func)
174
- async def async_wrapper(*args, **kwargs):
175
- last_exception = None
176
- current_delay = delay
177
-
178
- for attempt in range(max_retries + 1):
179
- try:
180
- return await func(*args, **kwargs)
181
- except exceptions as e:
182
- last_exception = e
183
- if attempt < max_retries:
184
- # 记录重试信息
185
- retry_context = ErrorContext(
186
- context=f"函数 {func.__name__} 执行失败 (尝试 {attempt + 1}/{max_retries + 1})",
187
- module=context.module if context else "",
188
- function=func.__name__
189
- ) if context else None
190
-
191
- self.logger.warning(
192
- f"函数 {func.__name__} 执行失败 (尝试 {attempt + 1}/{max_retries + 1}): {e}"
193
- )
194
-
195
- import asyncio
196
- await asyncio.sleep(current_delay)
197
- current_delay *= backoff_factor # 指数退避
198
- else:
199
- # 最后一次尝试失败
200
- final_context = ErrorContext(
201
- context=f"函数 {func.__name__} 执行失败,已达到最大重试次数",
202
- module=context.module if context else "",
203
- function=func.__name__
204
- ) if context else None
205
-
206
- self.logger.error(
207
- f"函数 {func.__name__} 执行失败,已达到最大重试次数: {e}"
208
- )
209
- raise last_exception
210
-
211
- @wraps(func)
212
- def sync_wrapper(*args, **kwargs):
213
- last_exception = None
214
- current_delay = delay
215
-
216
- for attempt in range(max_retries + 1):
217
- try:
218
- return func(*args, **kwargs)
219
- except exceptions as e:
220
- last_exception = e
221
- if attempt < max_retries:
222
- # 记录重试信息
223
- retry_context = ErrorContext(
224
- context=f"函数 {func.__name__} 执行失败 (尝试 {attempt + 1}/{max_retries + 1})",
225
- module=context.module if context else "",
226
- function=func.__name__
227
- ) if context else None
228
-
229
- self.logger.warning(
230
- f"函数 {func.__name__} 执行失败 (尝试 {attempt + 1}/{max_retries + 1}): {e}"
231
- )
232
-
233
- import time
234
- time.sleep(current_delay)
235
- current_delay *= backoff_factor # 指数退避
236
- else:
237
- # 最后一次尝试失败
238
- final_context = ErrorContext(
239
- context=f"函数 {func.__name__} 执行失败,已达到最大重试次数",
240
- module=context.module if context else "",
241
- function=func.__name__
242
- ) if context else None
243
-
244
- self.logger.error(
245
- f"函数 {func.__name__} 执行失败,已达到最大重试次数: {e}"
246
- )
247
- raise last_exception
248
-
249
- # 根据函数是否为异步函数返回相应的包装器
250
- import inspect
251
- if inspect.iscoroutinefunction(func):
252
- return async_wrapper
253
- else:
254
- return sync_wrapper
255
-
256
- return decorator
257
-
258
- def get_error_history(self) -> List[Dict]:
259
- """获取错误历史记录"""
260
- return self.error_history.copy()
261
-
262
- def clear_error_history(self):
263
- """清空错误历史记录"""
264
- self.error_history.clear()
265
-
266
-
267
- # 全局增强错误处理器实例
268
- enhanced_error_handler = EnhancedErrorHandler()
269
-
270
-
271
- def handle_exception(context: str = "", module: str = "", function: str = "",
272
- raise_error: bool = True, log_error: bool = True,
273
- error_code: Optional[str] = None):
274
- """
275
- 装饰器:处理函数异常(增强版)
276
-
277
- Args:
278
- context: 错误上下文描述
279
- module: 模块名称
280
- function: 函数名称
281
- raise_error: 是否重新抛出异常
282
- log_error: 是否记录错误日志
283
- error_code: 错误代码
284
- """
285
- def decorator(func):
286
- @wraps(func)
287
- async def async_wrapper(*args, **kwargs):
288
- try:
289
- return await func(*args, **kwargs)
290
- except Exception as e:
291
- error_context = ErrorContext(
292
- context=f"{context} - {func.__name__}",
293
- module=module,
294
- function=func.__name__
295
- )
296
-
297
- # 如果是详细异常,保留原有信息
298
- if isinstance(e, DetailedException):
299
- # 确保上下文信息完整
300
- if not e.context:
301
- e.context = error_context
302
- enhanced_error_handler.handle_error(
303
- e, context=e.context,
304
- raise_error=raise_error, log_error=log_error
305
- )
306
- else:
307
- # 包装为详细异常
308
- detailed_e = DetailedException(
309
- str(e), context=error_context, error_code=error_code
310
- )
311
- enhanced_error_handler.handle_error(
312
- detailed_e, context=error_context,
313
- raise_error=raise_error, log_error=log_error
314
- )
315
- if not raise_error:
316
- return None
317
-
318
- @wraps(func)
319
- def sync_wrapper(*args, **kwargs):
320
- try:
321
- return func(*args, **kwargs)
322
- except Exception as e:
323
- error_context = ErrorContext(
324
- context=f"{context} - {func.__name__}",
325
- module=module,
326
- function=func.__name__
327
- )
328
-
329
- # 如果是详细异常,保留原有信息
330
- if isinstance(e, DetailedException):
331
- # 确保上下文信息完整
332
- if not e.context:
333
- e.context = error_context
334
- enhanced_error_handler.handle_error(
335
- e, context=e.context,
336
- raise_error=raise_error, log_error=log_error
337
- )
338
- else:
339
- # 包装为详细异常
340
- detailed_e = DetailedException(
341
- str(e), context=error_context, error_code=error_code
342
- )
343
- enhanced_error_handler.handle_error(
344
- detailed_e, context=error_context,
345
- raise_error=raise_error, log_error=log_error
346
- )
347
- if not raise_error:
348
- return None
349
-
350
- # 根据函数是否为异步函数返回相应的包装器
351
- import inspect
352
- if inspect.iscoroutinefunction(func):
353
- return async_wrapper
354
- else:
355
- return sync_wrapper
356
-
357
- return decorator
@@ -1,190 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: crawlo
3
- Version: 1.4.4
4
- Summary: Crawlo 是一款基于异步IO的高性能Python爬虫框架,支持分布式抓取。
5
- Home-page: https://github.com/crawl-coder/Crawlo.git
6
- Author: crawl-coder
7
- Author-email: crawlo@qq.com
8
- License: MIT
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Operating System :: OS Independent
12
- Requires-Python: >=3.6
13
- Description-Content-Type: text/markdown
14
- Requires-Dist: aiohttp>=3.12.14
15
- Requires-Dist: aiomysql>=0.2.0
16
- Requires-Dist: aioredis>=2.0.1
17
- Requires-Dist: asyncmy>=0.2.10
18
- Requires-Dist: cssselect>=1.2.0
19
- Requires-Dist: dateparser>=1.2.2
20
- Requires-Dist: httpx[http2]>=0.27.0
21
- Requires-Dist: curl-cffi>=0.13.0
22
- Requires-Dist: lxml>=5.2.1
23
- Requires-Dist: motor>=3.7.0
24
- Requires-Dist: parsel>=1.9.1
25
- Requires-Dist: pydantic>=2.11.7
26
- Requires-Dist: pymongo>=4.11
27
- Requires-Dist: PyMySQL>=1.1.1
28
- Requires-Dist: python-dateutil>=2.9.0.post0
29
- Requires-Dist: redis>=6.2.0
30
- Requires-Dist: requests>=2.32.4
31
- Requires-Dist: six>=1.17.0
32
- Requires-Dist: ujson>=5.9.0
33
- Requires-Dist: urllib3>=2.5.0
34
- Requires-Dist: w3lib>=2.1.2
35
- Requires-Dist: rich>=14.1.0
36
- Requires-Dist: astor>=0.8.1
37
- Requires-Dist: watchdog>=6.0.0
38
- Provides-Extra: render
39
- Requires-Dist: webdriver-manager>=4.0.0; extra == "render"
40
- Requires-Dist: playwright; extra == "render"
41
- Requires-Dist: selenium>=3.141.0; extra == "render"
42
- Provides-Extra: all
43
- Requires-Dist: bitarray>=1.5.3; extra == "all"
44
- Requires-Dist: PyExecJS>=1.5.1; extra == "all"
45
- Requires-Dist: pymongo>=3.10.1; extra == "all"
46
- Requires-Dist: redis-py-cluster>=2.1.0; extra == "all"
47
- Requires-Dist: webdriver-manager>=4.0.0; extra == "all"
48
- Requires-Dist: playwright; extra == "all"
49
- Requires-Dist: selenium>=3.141.0; extra == "all"
50
-
51
- # Crawlo 爬虫框架
52
-
53
- Crawlo 是一个高性能、可扩展的 Python 爬虫框架,支持单机和分布式部署。
54
-
55
- ## 特性
56
-
57
- - 高性能异步爬取
58
- - 支持多种下载器 (aiohttp, httpx, curl-cffi)
59
- - 内置数据清洗和验证
60
- - 分布式爬取支持
61
- - 灵活的中间件系统
62
- - 强大的配置管理系统
63
- - 详细的日志记录和监控
64
- - Windows 和 Linux 兼容
65
-
66
- ## 安装
67
-
68
- ```bash
69
- pip install crawlo
70
- ```
71
-
72
- 或者从源码安装:
73
-
74
- ```bash
75
- git clone https://github.com/your-username/crawlo.git
76
- cd crawlo
77
- pip install -r requirements.txt
78
- pip install .
79
- ```
80
-
81
- ## 快速开始
82
-
83
- ```python
84
- from crawlo import Spider
85
-
86
- class MySpider(Spider):
87
- name = 'example'
88
-
89
- def parse(self, response):
90
- # 解析逻辑
91
- pass
92
-
93
- # 运行爬虫
94
- # crawlo run example
95
- ```
96
-
97
- ## 日志系统
98
-
99
- Crawlo 拥有一个功能强大的日志系统,支持多种配置选项:
100
-
101
- ### 基本配置
102
-
103
- ```python
104
- from crawlo.logging import configure_logging, get_logger
105
-
106
- # 配置日志系统
107
- configure_logging(
108
- LOG_LEVEL='INFO',
109
- LOG_FILE='logs/app.log',
110
- LOG_MAX_BYTES=10*1024*1024, # 10MB
111
- LOG_BACKUP_COUNT=5
112
- )
113
-
114
- # 获取logger
115
- logger = get_logger('my_module')
116
- logger.info('这是一条日志消息')
117
- ```
118
-
119
- ### 高级配置
120
-
121
- ```python
122
- # 分别配置控制台和文件日志级别
123
- configure_logging(
124
- LOG_LEVEL='INFO',
125
- LOG_CONSOLE_LEVEL='WARNING', # 控制台只显示WARNING及以上级别
126
- LOG_FILE_LEVEL='DEBUG', # 文件记录DEBUG及以上级别
127
- LOG_FILE='logs/app.log',
128
- LOG_INCLUDE_THREAD_ID=True, # 包含线程ID
129
- LOG_INCLUDE_PROCESS_ID=True # 包含进程ID
130
- )
131
-
132
- # 模块特定日志级别
133
- configure_logging(
134
- LOG_LEVEL='WARNING',
135
- LOG_LEVELS={
136
- 'my_module.debug': 'DEBUG',
137
- 'my_module.info': 'INFO'
138
- }
139
- )
140
- ```
141
-
142
- ### 性能监控
143
-
144
- ```python
145
- from crawlo.logging import get_monitor
146
-
147
- # 启用日志性能监控
148
- monitor = get_monitor()
149
- monitor.enable_monitoring()
150
-
151
- # 获取性能报告
152
- report = monitor.get_performance_report()
153
- print(report)
154
- ```
155
-
156
- ### 日志采样
157
-
158
- ```python
159
- from crawlo.logging import get_sampler
160
-
161
- # 设置采样率(只记录30%的日志)
162
- sampler = get_sampler()
163
- sampler.set_sample_rate('my_module', 0.3)
164
-
165
- # 设置速率限制(每秒最多100条日志)
166
- sampler.set_rate_limit('my_module', 100)
167
- ```
168
-
169
- ## Windows 兼容性说明
170
-
171
- 在 Windows 系统上使用日志轮转功能时,可能会遇到文件锁定问题。为了解决这个问题,建议安装 `concurrent-log-handler` 库:
172
-
173
- ```bash
174
- pip install concurrent-log-handler
175
- ```
176
-
177
- Crawlo 框架会自动检测并使用这个库来提供更好的 Windows 兼容性。
178
-
179
- 如果未安装 `concurrent-log-handler`,在 Windows 上运行时可能会出现以下错误:
180
- ```
181
- PermissionError: [WinError 32] 另一个程序正在使用此文件,进程无法访问。
182
- ```
183
-
184
- ## 文档
185
-
186
- 请查看 [文档](https://your-docs-url.com) 获取更多信息。
187
-
188
- ## 许可证
189
-
190
- MIT
tests/simple_log_test.py DELETED
@@ -1,58 +0,0 @@
1
- #!/usr/bin/env python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 简单的日志系统测试
5
- """
6
- import sys
7
- import os
8
- sys.path.insert(0, '/')
9
-
10
- # 确保日志目录存在
11
- os.makedirs('/examples/ofweek_standalone/logs', exist_ok=True)
12
-
13
- # 测试日志系统
14
- from crawlo.utils.log import LoggerManager, get_logger
15
-
16
- print("=== 简单日志系统测试 ===")
17
-
18
- # 1. 直接配置日志系统
19
- print("1. 配置日志系统...")
20
- LoggerManager.configure(
21
- LOG_LEVEL='INFO',
22
- LOG_FILE='/Users/oscar/projects/Crawlo/examples/ofweek_standalone/logs/simple_test.log'
23
- )
24
-
25
- # 2. 创建logger
26
- print("2. 创建logger...")
27
- logger = get_logger('test.logger')
28
- print(f" Logger: {logger}")
29
- print(f" Handlers: {len(logger.handlers)}")
30
-
31
- for i, handler in enumerate(logger.handlers):
32
- handler_type = type(handler).__name__
33
- print(f" Handler {i}: {handler_type}")
34
- if hasattr(handler, 'baseFilename'):
35
- print(f" File: {handler.baseFilename}")
36
-
37
- # 3. 测试日志输出
38
- print("3. 测试日志输出...")
39
- logger.info("这是一条测试INFO日志")
40
- logger.debug("这是一条测试DEBUG日志")
41
- logger.warning("这是一条测试WARNING日志")
42
-
43
- print("4. 检查日志文件...")
44
- log_file = '/Users/oscar/projects/Crawlo/examples/ofweek_standalone/logs/simple_test.log'
45
- if os.path.exists(log_file):
46
- print(f" 日志文件存在: {log_file}")
47
- with open(log_file, 'r', encoding='utf-8') as f:
48
- content = f.read()
49
- print(f" 文件内容长度: {len(content)} 字符")
50
- if content:
51
- print(" 文件内容:")
52
- print(content)
53
- else:
54
- print(" 文件为空")
55
- else:
56
- print(f" 日志文件不存在: {log_file}")
57
-
58
- print("=== 测试完成 ===")
tests/simple_test.py DELETED
@@ -1,48 +0,0 @@
1
- #!/usr/bin/env python
2
- # -*- coding: utf-8 -*-
3
- """
4
- 简化的框架测试
5
- """
6
- import os
7
- import sys
8
- sys.path.insert(0, '/')
9
-
10
- # 设置基本配置
11
- test_log_file = '/Users/oscar/projects/Crawlo/simple_test.log'
12
- if os.path.exists(test_log_file):
13
- os.remove(test_log_file)
14
-
15
- # 最简单的测试
16
- try:
17
- from crawlo.utils.log import LoggerManager
18
-
19
- print("配置日志系统...")
20
- LoggerManager.configure(
21
- LOG_LEVEL='INFO',
22
- LOG_FILE=test_log_file
23
- )
24
-
25
- from crawlo.utils.log import get_logger
26
- logger = get_logger('test.simple')
27
-
28
- print("测试日志输出...")
29
- logger.info("这是一条测试信息")
30
- logger.info("Crawlo框架初始化完成")
31
- logger.info("Crawlo Framework Started 1.3.3")
32
-
33
- print("检查日志文件...")
34
- if os.path.exists(test_log_file):
35
- with open(test_log_file, 'r', encoding='utf-8') as f:
36
- content = f.read()
37
- print(f"日志文件内容: {len(content)} 字符")
38
- print("内容:")
39
- print(content)
40
- else:
41
- print("日志文件未创建")
42
-
43
- except Exception as e:
44
- print(f"错误: {e}")
45
- import traceback
46
- traceback.print_exc()
47
-
48
- print("测试完成")