crawlo 1.1.0__py3-none-any.whl → 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +33 -24
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -155
- crawlo/commands/genspider.py +125 -110
- crawlo/commands/list.py +147 -119
- crawlo/commands/run.py +285 -170
- crawlo/commands/startproject.py +111 -101
- crawlo/commands/stats.py +188 -167
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +158 -158
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +57 -57
- crawlo/crawler.py +494 -492
- crawlo/downloader/__init__.py +78 -78
- crawlo/downloader/aiohttp_downloader.py +199 -199
- crawlo/downloader/cffi_downloader.py +242 -277
- crawlo/downloader/httpx_downloader.py +246 -246
- crawlo/event.py +11 -11
- crawlo/exceptions.py +78 -78
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +34 -34
- crawlo/filters/__init__.py +37 -37
- crawlo/filters/aioredis_filter.py +150 -150
- crawlo/filters/memory_filter.py +202 -202
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +245 -245
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +90 -90
- crawlo/network/__init__.py +7 -7
- crawlo/network/request.py +203 -203
- crawlo/network/response.py +166 -166
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_batch_pipline.py +272 -272
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/project.py +153 -0
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +166 -168
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +129 -129
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +27 -27
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +75 -75
- crawlo/templates/project/pipelines.py.tmpl +63 -63
- crawlo/templates/project/settings.py.tmpl +54 -54
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +31 -31
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/log.py +128 -128
- crawlo/utils/pqueue.py +173 -173
- crawlo/utils/request.py +267 -267
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.1.dist-info/METADATA +220 -0
- crawlo-1.1.1.dist-info/RECORD +100 -0
- examples/__init__.py +7 -0
- examples/baidu_spider/__init__.py +7 -0
- examples/baidu_spider/demo.py +94 -0
- examples/baidu_spider/items.py +46 -0
- examples/baidu_spider/middleware.py +49 -0
- examples/baidu_spider/pipeline.py +55 -0
- examples/baidu_spider/run.py +27 -0
- examples/baidu_spider/settings.py +121 -0
- examples/baidu_spider/spiders/__init__.py +7 -0
- examples/baidu_spider/spiders/bai_du.py +61 -0
- examples/baidu_spider/spiders/miit.py +159 -0
- examples/baidu_spider/spiders/sina.py +79 -0
- tests/__init__.py +7 -7
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- crawlo/utils/concurrency_manager.py +0 -125
- crawlo/utils/project.py +0 -197
- crawlo-1.1.0.dist-info/METADATA +0 -49
- crawlo-1.1.0.dist-info/RECORD +0 -97
- examples/gxb/__init__.py +0 -0
- examples/gxb/items.py +0 -36
- examples/gxb/run.py +0 -16
- examples/gxb/settings.py +0 -72
- examples/gxb/spider/__init__.py +0 -2
- examples/gxb/spider/miit_spider.py +0 -180
- examples/gxb/spider/telecom_device.py +0 -129
- {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
- {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
crawlo/utils/func_tools.py
CHANGED
|
@@ -1,82 +1,82 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
from typing import Union, AsyncGenerator, Generator
|
|
3
|
-
from inspect import isgenerator, isasyncgen
|
|
4
|
-
from crawlo import Response, Request, Item
|
|
5
|
-
from crawlo.exceptions import TransformTypeError
|
|
6
|
-
|
|
7
|
-
T = Union[Request, Item]
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
async def transform(
|
|
11
|
-
func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
|
|
12
|
-
response: Response
|
|
13
|
-
) -> AsyncGenerator[Union[T, Exception], None]:
|
|
14
|
-
"""
|
|
15
|
-
转换回调函数的输出为统一异步生成器
|
|
16
|
-
|
|
17
|
-
Args:
|
|
18
|
-
func: 同步或异步生成器函数
|
|
19
|
-
response: 当前响应对象
|
|
20
|
-
|
|
21
|
-
Yields:
|
|
22
|
-
Union[T, Exception]: 生成请求/Item或异常对象
|
|
23
|
-
|
|
24
|
-
Raises:
|
|
25
|
-
TransformTypeError: 当输入类型不符合要求时
|
|
26
|
-
"""
|
|
27
|
-
|
|
28
|
-
def _set_meta(obj: T) -> T:
|
|
29
|
-
"""统一设置请求的depth元数据"""
|
|
30
|
-
if isinstance(obj, Request):
|
|
31
|
-
obj.meta.setdefault('depth', response.meta.get('depth', 0))
|
|
32
|
-
return obj
|
|
33
|
-
|
|
34
|
-
# 类型检查前置
|
|
35
|
-
if not (isgenerator(func) or isasyncgen(func)):
|
|
36
|
-
raise TransformTypeError(
|
|
37
|
-
f'Callback must return generator or async generator, got {type(func).__name__}'
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
try:
|
|
41
|
-
if isgenerator(func):
|
|
42
|
-
# 同步生成器处理
|
|
43
|
-
for item in func:
|
|
44
|
-
yield _set_meta(item)
|
|
45
|
-
else:
|
|
46
|
-
# 异步生成器处理
|
|
47
|
-
async for item in func:
|
|
48
|
-
yield _set_meta(item)
|
|
49
|
-
|
|
50
|
-
except Exception as e:
|
|
51
|
-
yield e
|
|
52
|
-
|
|
53
|
-
# #!/usr/bin/python
|
|
54
|
-
# # -*- coding:UTF-8 -*-
|
|
55
|
-
# from typing import Callable, Union
|
|
56
|
-
# from inspect import isgenerator, isasyncgen
|
|
57
|
-
# from crawlo import Response, Request, Item
|
|
58
|
-
# from crawlo.exceptions import TransformTypeError
|
|
59
|
-
#
|
|
60
|
-
#
|
|
61
|
-
# T = Union[Request, Item]
|
|
62
|
-
#
|
|
63
|
-
#
|
|
64
|
-
# async def transform(func: Callable, response: Response):
|
|
65
|
-
# def set_request(t: T) -> T:
|
|
66
|
-
# if isinstance(t, Request):
|
|
67
|
-
# t.meta['depth'] = response.meta['depth']
|
|
68
|
-
# return t
|
|
69
|
-
# try:
|
|
70
|
-
# if isgenerator(func):
|
|
71
|
-
# for f in func:
|
|
72
|
-
# yield set_request(f)
|
|
73
|
-
# elif isasyncgen(func):
|
|
74
|
-
# async for f in func:
|
|
75
|
-
# yield set_request(f)
|
|
76
|
-
# else:
|
|
77
|
-
# raise TransformTypeError(
|
|
78
|
-
# f'callback return type error: {type(func)} must be `generator` or `async generator`'
|
|
79
|
-
# )
|
|
80
|
-
# except Exception as exp:
|
|
81
|
-
# yield exp
|
|
82
|
-
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
from typing import Union, AsyncGenerator, Generator
|
|
3
|
+
from inspect import isgenerator, isasyncgen
|
|
4
|
+
from crawlo import Response, Request, Item
|
|
5
|
+
from crawlo.exceptions import TransformTypeError
|
|
6
|
+
|
|
7
|
+
T = Union[Request, Item]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def transform(
|
|
11
|
+
func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
|
|
12
|
+
response: Response
|
|
13
|
+
) -> AsyncGenerator[Union[T, Exception], None]:
|
|
14
|
+
"""
|
|
15
|
+
转换回调函数的输出为统一异步生成器
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
func: 同步或异步生成器函数
|
|
19
|
+
response: 当前响应对象
|
|
20
|
+
|
|
21
|
+
Yields:
|
|
22
|
+
Union[T, Exception]: 生成请求/Item或异常对象
|
|
23
|
+
|
|
24
|
+
Raises:
|
|
25
|
+
TransformTypeError: 当输入类型不符合要求时
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def _set_meta(obj: T) -> T:
|
|
29
|
+
"""统一设置请求的depth元数据"""
|
|
30
|
+
if isinstance(obj, Request):
|
|
31
|
+
obj.meta.setdefault('depth', response.meta.get('depth', 0))
|
|
32
|
+
return obj
|
|
33
|
+
|
|
34
|
+
# 类型检查前置
|
|
35
|
+
if not (isgenerator(func) or isasyncgen(func)):
|
|
36
|
+
raise TransformTypeError(
|
|
37
|
+
f'Callback must return generator or async generator, got {type(func).__name__}'
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
if isgenerator(func):
|
|
42
|
+
# 同步生成器处理
|
|
43
|
+
for item in func:
|
|
44
|
+
yield _set_meta(item)
|
|
45
|
+
else:
|
|
46
|
+
# 异步生成器处理
|
|
47
|
+
async for item in func:
|
|
48
|
+
yield _set_meta(item)
|
|
49
|
+
|
|
50
|
+
except Exception as e:
|
|
51
|
+
yield e
|
|
52
|
+
|
|
53
|
+
# #!/usr/bin/python
|
|
54
|
+
# # -*- coding:UTF-8 -*-
|
|
55
|
+
# from typing import Callable, Union
|
|
56
|
+
# from inspect import isgenerator, isasyncgen
|
|
57
|
+
# from crawlo import Response, Request, Item
|
|
58
|
+
# from crawlo.exceptions import TransformTypeError
|
|
59
|
+
#
|
|
60
|
+
#
|
|
61
|
+
# T = Union[Request, Item]
|
|
62
|
+
#
|
|
63
|
+
#
|
|
64
|
+
# async def transform(func: Callable, response: Response):
|
|
65
|
+
# def set_request(t: T) -> T:
|
|
66
|
+
# if isinstance(t, Request):
|
|
67
|
+
# t.meta['depth'] = response.meta['depth']
|
|
68
|
+
# return t
|
|
69
|
+
# try:
|
|
70
|
+
# if isgenerator(func):
|
|
71
|
+
# for f in func:
|
|
72
|
+
# yield set_request(f)
|
|
73
|
+
# elif isasyncgen(func):
|
|
74
|
+
# async for f in func:
|
|
75
|
+
# yield set_request(f)
|
|
76
|
+
# else:
|
|
77
|
+
# raise TransformTypeError(
|
|
78
|
+
# f'callback return type error: {type(func)} must be `generator` or `async generator`'
|
|
79
|
+
# )
|
|
80
|
+
# except Exception as exp:
|
|
81
|
+
# yield exp
|
|
82
|
+
|
crawlo/utils/log.py
CHANGED
|
@@ -1,129 +1,129 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
日志管理器:安全版本,使用字符串化 key 避免 unhashable 问题
|
|
4
|
-
"""
|
|
5
|
-
import os
|
|
6
|
-
from logging import (
|
|
7
|
-
Formatter,
|
|
8
|
-
StreamHandler,
|
|
9
|
-
FileHandler,
|
|
10
|
-
Logger,
|
|
11
|
-
DEBUG,
|
|
12
|
-
INFO,
|
|
13
|
-
WARNING,
|
|
14
|
-
ERROR,
|
|
15
|
-
CRITICAL,
|
|
16
|
-
)
|
|
17
|
-
|
|
18
|
-
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class LoggerManager:
|
|
22
|
-
logger_cache = {}
|
|
23
|
-
_default_filename = None
|
|
24
|
-
_default_level = INFO
|
|
25
|
-
_default_file_level = INFO
|
|
26
|
-
_default_console_level = INFO
|
|
27
|
-
_default_log_format = LOG_FORMAT
|
|
28
|
-
_default_encoding = 'utf-8'
|
|
29
|
-
|
|
30
|
-
_level_map = {
|
|
31
|
-
'DEBUG': DEBUG,
|
|
32
|
-
'INFO': INFO,
|
|
33
|
-
'WARNING': WARNING,
|
|
34
|
-
'ERROR': ERROR,
|
|
35
|
-
'CRITICAL': CRITICAL,
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
@classmethod
|
|
39
|
-
def _to_level(cls, level):
|
|
40
|
-
"""安全转换为日志级别 int"""
|
|
41
|
-
if level is None:
|
|
42
|
-
return INFO
|
|
43
|
-
if isinstance(level, int):
|
|
44
|
-
return level
|
|
45
|
-
if isinstance(level, str):
|
|
46
|
-
return cls._level_map.get(level.upper(), INFO)
|
|
47
|
-
if hasattr(level, 'get'): # 如 SettingManager 或 dict
|
|
48
|
-
lv = level.get('LOG_LEVEL')
|
|
49
|
-
if isinstance(lv, int):
|
|
50
|
-
return lv
|
|
51
|
-
if isinstance(lv, str):
|
|
52
|
-
return cls._level_map.get(lv.upper(), INFO)
|
|
53
|
-
return INFO
|
|
54
|
-
|
|
55
|
-
@classmethod
|
|
56
|
-
def configure(cls, settings=None, **kwargs):
|
|
57
|
-
"""
|
|
58
|
-
使用 settings 对象或关键字参数配置日志
|
|
59
|
-
"""
|
|
60
|
-
# 优先使用 settings,否则用 kwargs
|
|
61
|
-
get_val = settings.get if hasattr(settings, 'get') else (lambda k, d=None: kwargs.get(k, d))
|
|
62
|
-
|
|
63
|
-
filename = get_val('LOG_FILE')
|
|
64
|
-
level = get_val('LOG_LEVEL', 'INFO')
|
|
65
|
-
file_level = get_val('LOG_FILE_LEVEL', level)
|
|
66
|
-
console_level = get_val('LOG_CONSOLE_LEVEL', level)
|
|
67
|
-
log_format = get_val('LOG_FORMAT', LOG_FORMAT)
|
|
68
|
-
encoding = get_val('LOG_ENCODING', 'utf-8')
|
|
69
|
-
|
|
70
|
-
cls._default_filename = filename
|
|
71
|
-
cls._default_level = cls._to_level(level)
|
|
72
|
-
cls._default_file_level = cls._to_level(file_level)
|
|
73
|
-
cls._default_console_level = cls._to_level(console_level)
|
|
74
|
-
cls._default_log_format = log_format
|
|
75
|
-
cls._default_encoding = encoding
|
|
76
|
-
|
|
77
|
-
@classmethod
|
|
78
|
-
def get_logger(cls, name='default', level=None, filename=None):
|
|
79
|
-
"""
|
|
80
|
-
简化接口,只暴露必要参数
|
|
81
|
-
"""
|
|
82
|
-
# 确定最终参数
|
|
83
|
-
final_level = cls._to_level(level) if level is not None else cls._default_level
|
|
84
|
-
final_filename = filename if filename is not None else cls._default_filename
|
|
85
|
-
|
|
86
|
-
# ✅ 安全的字符串化 key,避免任何 unhashable 类型
|
|
87
|
-
key_parts = [
|
|
88
|
-
name,
|
|
89
|
-
str(final_level),
|
|
90
|
-
final_filename or 'no_file',
|
|
91
|
-
]
|
|
92
|
-
key = '|'.join(key_parts) # 如 "my_spider|20|logs/app.log"
|
|
93
|
-
|
|
94
|
-
if key in cls.logger_cache:
|
|
95
|
-
return cls.logger_cache[key]
|
|
96
|
-
|
|
97
|
-
# 创建 logger
|
|
98
|
-
_logger = Logger(name=name)
|
|
99
|
-
_logger.setLevel(final_level)
|
|
100
|
-
|
|
101
|
-
formatter = Formatter(cls._default_log_format)
|
|
102
|
-
|
|
103
|
-
# 控制台
|
|
104
|
-
if cls._default_console_level is not False:
|
|
105
|
-
ch = StreamHandler()
|
|
106
|
-
ch.setFormatter(formatter)
|
|
107
|
-
ch.setLevel(cls._default_console_level)
|
|
108
|
-
_logger.addHandler(ch)
|
|
109
|
-
|
|
110
|
-
# 文件
|
|
111
|
-
if final_filename:
|
|
112
|
-
try:
|
|
113
|
-
log_dir = os.path.dirname(final_filename)
|
|
114
|
-
if log_dir and not os.path.exists(log_dir):
|
|
115
|
-
os.makedirs(log_dir, exist_ok=True)
|
|
116
|
-
|
|
117
|
-
fh = FileHandler(final_filename, encoding=cls._default_encoding)
|
|
118
|
-
fh.setFormatter(formatter)
|
|
119
|
-
fh.setLevel(cls._default_file_level)
|
|
120
|
-
_logger.addHandler(fh)
|
|
121
|
-
except Exception as e:
|
|
122
|
-
print(f"[Logger] 无法创建日志文件 {final_filename}: {e}")
|
|
123
|
-
|
|
124
|
-
cls.logger_cache[key] = _logger
|
|
125
|
-
return _logger
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
# 全局快捷函数
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
日志管理器:安全版本,使用字符串化 key 避免 unhashable 问题
|
|
4
|
+
"""
|
|
5
|
+
import os
|
|
6
|
+
from logging import (
|
|
7
|
+
Formatter,
|
|
8
|
+
StreamHandler,
|
|
9
|
+
FileHandler,
|
|
10
|
+
Logger,
|
|
11
|
+
DEBUG,
|
|
12
|
+
INFO,
|
|
13
|
+
WARNING,
|
|
14
|
+
ERROR,
|
|
15
|
+
CRITICAL,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class LoggerManager:
|
|
22
|
+
logger_cache = {}
|
|
23
|
+
_default_filename = None
|
|
24
|
+
_default_level = INFO
|
|
25
|
+
_default_file_level = INFO
|
|
26
|
+
_default_console_level = INFO
|
|
27
|
+
_default_log_format = LOG_FORMAT
|
|
28
|
+
_default_encoding = 'utf-8'
|
|
29
|
+
|
|
30
|
+
_level_map = {
|
|
31
|
+
'DEBUG': DEBUG,
|
|
32
|
+
'INFO': INFO,
|
|
33
|
+
'WARNING': WARNING,
|
|
34
|
+
'ERROR': ERROR,
|
|
35
|
+
'CRITICAL': CRITICAL,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def _to_level(cls, level):
|
|
40
|
+
"""安全转换为日志级别 int"""
|
|
41
|
+
if level is None:
|
|
42
|
+
return INFO
|
|
43
|
+
if isinstance(level, int):
|
|
44
|
+
return level
|
|
45
|
+
if isinstance(level, str):
|
|
46
|
+
return cls._level_map.get(level.upper(), INFO)
|
|
47
|
+
if hasattr(level, 'get'): # 如 SettingManager 或 dict
|
|
48
|
+
lv = level.get('LOG_LEVEL')
|
|
49
|
+
if isinstance(lv, int):
|
|
50
|
+
return lv
|
|
51
|
+
if isinstance(lv, str):
|
|
52
|
+
return cls._level_map.get(lv.upper(), INFO)
|
|
53
|
+
return INFO
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def configure(cls, settings=None, **kwargs):
|
|
57
|
+
"""
|
|
58
|
+
使用 settings 对象或关键字参数配置日志
|
|
59
|
+
"""
|
|
60
|
+
# 优先使用 settings,否则用 kwargs
|
|
61
|
+
get_val = settings.get if hasattr(settings, 'get') else (lambda k, d=None: kwargs.get(k, d))
|
|
62
|
+
|
|
63
|
+
filename = get_val('LOG_FILE')
|
|
64
|
+
level = get_val('LOG_LEVEL', 'INFO')
|
|
65
|
+
file_level = get_val('LOG_FILE_LEVEL', level)
|
|
66
|
+
console_level = get_val('LOG_CONSOLE_LEVEL', level)
|
|
67
|
+
log_format = get_val('LOG_FORMAT', LOG_FORMAT)
|
|
68
|
+
encoding = get_val('LOG_ENCODING', 'utf-8')
|
|
69
|
+
|
|
70
|
+
cls._default_filename = filename
|
|
71
|
+
cls._default_level = cls._to_level(level)
|
|
72
|
+
cls._default_file_level = cls._to_level(file_level)
|
|
73
|
+
cls._default_console_level = cls._to_level(console_level)
|
|
74
|
+
cls._default_log_format = log_format
|
|
75
|
+
cls._default_encoding = encoding
|
|
76
|
+
|
|
77
|
+
@classmethod
|
|
78
|
+
def get_logger(cls, name='default', level=None, filename=None):
|
|
79
|
+
"""
|
|
80
|
+
简化接口,只暴露必要参数
|
|
81
|
+
"""
|
|
82
|
+
# 确定最终参数
|
|
83
|
+
final_level = cls._to_level(level) if level is not None else cls._default_level
|
|
84
|
+
final_filename = filename if filename is not None else cls._default_filename
|
|
85
|
+
|
|
86
|
+
# ✅ 安全的字符串化 key,避免任何 unhashable 类型
|
|
87
|
+
key_parts = [
|
|
88
|
+
name,
|
|
89
|
+
str(final_level),
|
|
90
|
+
final_filename or 'no_file',
|
|
91
|
+
]
|
|
92
|
+
key = '|'.join(key_parts) # 如 "my_spider|20|logs/app.log"
|
|
93
|
+
|
|
94
|
+
if key in cls.logger_cache:
|
|
95
|
+
return cls.logger_cache[key]
|
|
96
|
+
|
|
97
|
+
# 创建 logger
|
|
98
|
+
_logger = Logger(name=name)
|
|
99
|
+
_logger.setLevel(final_level)
|
|
100
|
+
|
|
101
|
+
formatter = Formatter(cls._default_log_format)
|
|
102
|
+
|
|
103
|
+
# 控制台
|
|
104
|
+
if cls._default_console_level is not False:
|
|
105
|
+
ch = StreamHandler()
|
|
106
|
+
ch.setFormatter(formatter)
|
|
107
|
+
ch.setLevel(cls._default_console_level)
|
|
108
|
+
_logger.addHandler(ch)
|
|
109
|
+
|
|
110
|
+
# 文件
|
|
111
|
+
if final_filename:
|
|
112
|
+
try:
|
|
113
|
+
log_dir = os.path.dirname(final_filename)
|
|
114
|
+
if log_dir and not os.path.exists(log_dir):
|
|
115
|
+
os.makedirs(log_dir, exist_ok=True)
|
|
116
|
+
|
|
117
|
+
fh = FileHandler(final_filename, encoding=cls._default_encoding)
|
|
118
|
+
fh.setFormatter(formatter)
|
|
119
|
+
fh.setLevel(cls._default_file_level)
|
|
120
|
+
_logger.addHandler(fh)
|
|
121
|
+
except Exception as e:
|
|
122
|
+
print(f"[Logger] 无法创建日志文件 {final_filename}: {e}")
|
|
123
|
+
|
|
124
|
+
cls.logger_cache[key] = _logger
|
|
125
|
+
return _logger
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
# 全局快捷函数
|
|
129
129
|
get_logger = LoggerManager.get_logger
|