crawlo 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (96) hide show
  1. crawlo/__init__.py +25 -9
  2. crawlo/__version__.py +1 -1
  3. crawlo/core/__init__.py +2 -2
  4. crawlo/core/engine.py +158 -158
  5. crawlo/core/processor.py +40 -40
  6. crawlo/core/scheduler.py +57 -57
  7. crawlo/crawler.py +424 -242
  8. crawlo/downloader/__init__.py +78 -78
  9. crawlo/downloader/aiohttp_downloader.py +200 -259
  10. crawlo/downloader/cffi_downloader.py +277 -0
  11. crawlo/downloader/httpx_downloader.py +246 -187
  12. crawlo/event.py +11 -11
  13. crawlo/exceptions.py +73 -64
  14. crawlo/extension/__init__.py +31 -31
  15. crawlo/extension/log_interval.py +49 -49
  16. crawlo/extension/log_stats.py +44 -44
  17. crawlo/extension/logging_extension.py +35 -0
  18. crawlo/filters/__init__.py +37 -37
  19. crawlo/filters/aioredis_filter.py +150 -158
  20. crawlo/filters/memory_filter.py +202 -202
  21. crawlo/items/__init__.py +62 -62
  22. crawlo/items/items.py +115 -119
  23. crawlo/middleware/__init__.py +21 -21
  24. crawlo/middleware/default_header.py +32 -32
  25. crawlo/middleware/download_delay.py +28 -28
  26. crawlo/middleware/middleware_manager.py +135 -140
  27. crawlo/middleware/proxy.py +246 -0
  28. crawlo/middleware/request_ignore.py +30 -30
  29. crawlo/middleware/response_code.py +18 -18
  30. crawlo/middleware/response_filter.py +26 -26
  31. crawlo/middleware/retry.py +90 -90
  32. crawlo/network/__init__.py +7 -7
  33. crawlo/network/request.py +203 -204
  34. crawlo/network/response.py +166 -166
  35. crawlo/pipelines/__init__.py +13 -13
  36. crawlo/pipelines/console_pipeline.py +39 -39
  37. crawlo/pipelines/mongo_pipeline.py +116 -116
  38. crawlo/pipelines/mysql_batch_pipline.py +273 -134
  39. crawlo/pipelines/mysql_pipeline.py +195 -195
  40. crawlo/pipelines/pipeline_manager.py +56 -56
  41. crawlo/settings/__init__.py +7 -7
  42. crawlo/settings/default_settings.py +169 -93
  43. crawlo/settings/setting_manager.py +99 -99
  44. crawlo/spider/__init__.py +41 -36
  45. crawlo/stats_collector.py +59 -59
  46. crawlo/subscriber.py +106 -106
  47. crawlo/task_manager.py +27 -27
  48. crawlo/templates/item_template.tmpl +21 -21
  49. crawlo/templates/project_template/main.py +32 -32
  50. crawlo/templates/project_template/setting.py +189 -189
  51. crawlo/templates/spider_template.tmpl +30 -30
  52. crawlo/utils/__init__.py +7 -7
  53. crawlo/utils/concurrency_manager.py +124 -124
  54. crawlo/utils/date_tools.py +233 -177
  55. crawlo/utils/db_helper.py +344 -0
  56. crawlo/utils/func_tools.py +82 -82
  57. crawlo/utils/log.py +129 -39
  58. crawlo/utils/pqueue.py +173 -173
  59. crawlo/utils/project.py +59 -59
  60. crawlo/utils/request.py +267 -122
  61. crawlo/utils/system.py +11 -11
  62. crawlo/utils/tools.py +5 -303
  63. crawlo/utils/url.py +39 -39
  64. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/METADATA +49 -48
  65. crawlo-1.0.5.dist-info/RECORD +84 -0
  66. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/top_level.txt +1 -0
  67. examples/__init__.py +0 -0
  68. examples/gxb/__init__.py +0 -0
  69. examples/gxb/items.py +36 -0
  70. examples/gxb/run.py +15 -0
  71. examples/gxb/settings.py +71 -0
  72. examples/gxb/spider/__init__.py +0 -0
  73. examples/gxb/spider/miit_spider.py +180 -0
  74. examples/gxb/spider/telecom_device_licenses.py +129 -0
  75. tests/__init__.py +7 -7
  76. tests/test_proxy_health_check.py +33 -0
  77. tests/test_proxy_middleware_integration.py +137 -0
  78. tests/test_proxy_providers.py +57 -0
  79. tests/test_proxy_stats.py +20 -0
  80. tests/test_proxy_strategies.py +60 -0
  81. crawlo/downloader/playwright_downloader.py +0 -161
  82. crawlo/filters/redis_filter.py +0 -120
  83. crawlo-1.0.3.dist-info/RECORD +0 -80
  84. tests/baidu_spider/__init__.py +0 -7
  85. tests/baidu_spider/demo.py +0 -94
  86. tests/baidu_spider/items.py +0 -25
  87. tests/baidu_spider/middleware.py +0 -49
  88. tests/baidu_spider/pipeline.py +0 -55
  89. tests/baidu_spider/request_fingerprints.txt +0 -9
  90. tests/baidu_spider/run.py +0 -27
  91. tests/baidu_spider/settings.py +0 -78
  92. tests/baidu_spider/spiders/__init__.py +0 -7
  93. tests/baidu_spider/spiders/bai_du.py +0 -61
  94. tests/baidu_spider/spiders/sina.py +0 -79
  95. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/WHEEL +0 -0
  96. {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,344 @@
1
+ # -*- coding: utf-8 -*-
2
+ import json
3
+ import re
4
+ import pprint
5
+ from typing import Any, Union, List, Dict, Tuple, Optional
6
+ from datetime import date, time, datetime
7
+
8
+ from crawlo.utils.log import get_logger
9
+
10
+ logger = get_logger(__name__)
11
+
12
+ # 正则表达式缓存
13
+ _REGEXPS: Dict[str, "re.Pattern"] = {}
14
+
15
+
16
+ def make_insert_sql(
17
+ table: str,
18
+ data: Dict[str, Any],
19
+ auto_update: bool = False,
20
+ update_columns: Tuple = (),
21
+ insert_ignore: bool = False,
22
+ ) -> str:
23
+ """
24
+ 生成 MySQL INSERT 或 REPLACE 语句。
25
+
26
+ Args:
27
+ table (str): 表名
28
+ data (dict): 表数据,JSON 格式字典
29
+ auto_update (bool): 是否使用 REPLACE INTO(完全覆盖已存在记录)
30
+ update_columns (tuple or list): 冲突时需更新的列名;指定后 auto_update 失效
31
+ insert_ignore (bool): 是否使用 INSERT IGNORE,忽略重复数据
32
+
33
+ Returns:
34
+ str: 生成的 SQL 语句
35
+ """
36
+ keys = [f"`{key}`" for key in data.keys()]
37
+ keys_str = list2str(keys).replace("'", "")
38
+
39
+ values = [format_sql_value(value) for value in data.values()]
40
+ values_str = list2str(values)
41
+
42
+ if update_columns:
43
+ if not isinstance(update_columns, (tuple, list)):
44
+ update_columns = (update_columns,)
45
+ update_clause = ", ".join(f"`{key}`=VALUES(`{key}`)" for key in update_columns)
46
+ ignore_flag = " IGNORE" if insert_ignore else ""
47
+ sql = f"INSERT{ignore_flag} INTO `{table}` {keys_str} VALUES {values_str} ON DUPLICATE KEY UPDATE {update_clause}"
48
+
49
+ elif auto_update:
50
+ sql = f"REPLACE INTO `{table}` {keys_str} VALUES {values_str}"
51
+
52
+ else:
53
+ ignore_flag = " IGNORE" if insert_ignore else ""
54
+ sql = f"INSERT{ignore_flag} INTO `{table}` {keys_str} VALUES {values_str}"
55
+
56
+ return sql.replace("None", "null")
57
+
58
+
59
+ def make_update_sql(
60
+ table: str,
61
+ data: Dict[str, Any],
62
+ condition: str,
63
+ ) -> str:
64
+ """
65
+ 生成 MySQL UPDATE 语句。
66
+
67
+ Args:
68
+ table (str): 表名
69
+ data (dict): 更新字段的键值对,键为列名,值为新值
70
+ condition (str): WHERE 条件,如 "id = 1"
71
+
72
+ Returns:
73
+ str: 生成的 SQL 语句
74
+ """
75
+ key_values: List[str] = []
76
+ for key, value in data.items():
77
+ formatted_value = format_sql_value(value)
78
+ if isinstance(formatted_value, str):
79
+ key_values.append(f"`{key}`={repr(formatted_value)}")
80
+ elif formatted_value is None:
81
+ key_values.append(f"`{key}`=null")
82
+ else:
83
+ key_values.append(f"`{key}`={formatted_value}")
84
+
85
+ key_values_str = ", ".join(key_values)
86
+ sql = f"UPDATE `{table}` SET {key_values_str} WHERE {condition}"
87
+ return sql
88
+
89
+
90
+ def make_batch_sql(
91
+ table: str,
92
+ datas: List[Dict[str, Any]],
93
+ auto_update: bool = False,
94
+ update_columns: Tuple = (),
95
+ update_columns_value: Tuple = (),
96
+ ) -> Optional[Tuple[str, List[List[Any]]]]:
97
+ """
98
+ 生成批量插入 SQL 及对应值列表。
99
+
100
+ 支持 INSERT IGNORE、REPLACE INTO 和 ON DUPLICATE KEY UPDATE。
101
+
102
+ Args:
103
+ table (str): 表名
104
+ datas (list of dict): 数据列表,如 [{'col1': val1}, ...]
105
+ auto_update (bool): 使用 REPLACE INTO 替代 INSERT
106
+ update_columns (tuple or list): 主键冲突时要更新的列名
107
+ update_columns_value (tuple): 更新列对应的固定值,如 ('%s',) 或 ('default',)
108
+
109
+ Returns:
110
+ tuple[str, list[list]] | None: (SQL语句, 值列表);若数据为空则返回 None
111
+ """
112
+ if not datas:
113
+ return None
114
+
115
+ # 提取所有唯一字段名
116
+ keys = list({key for data in datas for key in data})
117
+ values_list = []
118
+
119
+ for data in datas:
120
+ if not isinstance(data, dict):
121
+ continue # 跳过非字典数据
122
+
123
+ row = []
124
+ for key in keys:
125
+ raw_value = data.get(key)
126
+ try:
127
+ formatted_value = format_sql_value(raw_value)
128
+ row.append(formatted_value)
129
+ except Exception as e:
130
+ logger.error(f"{key}: {raw_value} (类型: {type(raw_value)}) -> {e}")
131
+ values_list.append(row)
132
+
133
+ keys_str = ", ".join(f"`{key}`" for key in keys)
134
+ placeholders_str = ", ".join(["%s"] * len(keys))
135
+
136
+ if update_columns:
137
+ if not isinstance(update_columns, (tuple, list)):
138
+ update_columns = (update_columns,)
139
+
140
+ if update_columns_value:
141
+ update_pairs = [
142
+ f"`{key}`={value}"
143
+ for key, value in zip(update_columns, update_columns_value)
144
+ ]
145
+ else:
146
+ update_pairs = [
147
+ f"`{key}`=VALUES(`{key}`)" for key in update_columns
148
+ ]
149
+ update_clause = ", ".join(update_pairs)
150
+ sql = f"INSERT INTO `{table}` ({keys_str}) VALUES ({placeholders_str}) ON DUPLICATE KEY UPDATE {update_clause}"
151
+
152
+ elif auto_update:
153
+ sql = f"REPLACE INTO `{table}` ({keys_str}) VALUES ({placeholders_str})"
154
+
155
+ else:
156
+ sql = f"INSERT IGNORE INTO `{table}` ({keys_str}) VALUES ({placeholders_str})"
157
+
158
+ return sql, values_list
159
+
160
+
161
+ def format_sql_value(value: Any) -> Union[str, int, float, None]:
162
+ """
163
+ 格式化 SQL 字段值,防止注入并兼容类型。
164
+
165
+ 处理字符串、数字、布尔、日期、列表/元组、字典等类型,不可序列化类型抛出异常。
166
+
167
+ Args:
168
+ value (Any): 待处理的值
169
+
170
+ Returns:
171
+ str | int | float | None: 格式化后的值,None 表示 SQL 的 NULL
172
+ """
173
+ if value is None:
174
+ return None
175
+
176
+ if isinstance(value, str):
177
+ return value.strip()
178
+
179
+ elif isinstance(value, (list, tuple, dict)):
180
+ try:
181
+ return json.dumps(value, ensure_ascii=False, default=str)
182
+ except Exception as e:
183
+ raise ValueError(f"Failed to serialize container to JSON: {value}, error: {e}")
184
+
185
+ elif isinstance(value, bool):
186
+ return int(value)
187
+
188
+ elif isinstance(value, (int, float)):
189
+ return value
190
+
191
+ elif isinstance(value, (date, time, datetime)):
192
+ return str(value)
193
+
194
+ else:
195
+ raise TypeError(f"Unsupported value type: {type(value)}, value: {value}")
196
+
197
+
198
+ def list2str(datas: List[Any]) -> str:
199
+ """
200
+ 将列表转为 SQL 元组字符串格式。
201
+
202
+ 例如:[1, 2] → "(1, 2)",单元素不带逗号:[1] → "(1)"
203
+
204
+ Args:
205
+ datas (list): 输入列表
206
+
207
+ Returns:
208
+ str: 对应的元组字符串表示
209
+ """
210
+ data_str = str(tuple(datas))
211
+ return re.sub(r",\)$", ")", data_str)
212
+
213
+
214
+ def get_info(
215
+ html: Union[str, Any],
216
+ regexps: Union[str, List[str]],
217
+ allow_repeat: bool = True,
218
+ fetch_one: bool = False,
219
+ split: Optional[str] = None,
220
+ ) -> Union[str, List[str], Tuple]:
221
+ """
222
+ 从 HTML 文本中提取信息,支持正则匹配和多模式 fallback。
223
+
224
+ Args:
225
+ html (str): HTML 内容或可转为字符串的类型
226
+ regexps (str or list of str): 正则表达式,按顺序尝试匹配
227
+ allow_repeat (bool): 是否允许重复结果
228
+ fetch_one (bool): 是否只提取第一个匹配项(返回元组)
229
+ split (str, optional): 若提供,则将结果用该字符连接成字符串
230
+
231
+ Returns:
232
+ str | list | tuple: 匹配结果,根据参数返回字符串、列表或元组
233
+ """
234
+ if isinstance(regexps, str):
235
+ regexps = [regexps]
236
+
237
+ infos = []
238
+ for regex in regexps:
239
+ if not regex:
240
+ continue
241
+
242
+ if regex not in _REGEXPS:
243
+ _REGEXPS[regex] = re.compile(regex, re.S)
244
+
245
+ if fetch_one:
246
+ match = _REGEXPS[regex].search(str(html))
247
+ infos = match.groups() if match else ("",)
248
+ break
249
+ else:
250
+ found = _REGEXPS[regex].findall(str(html))
251
+ if found:
252
+ infos = found
253
+ break
254
+
255
+ if fetch_one:
256
+ return infos[0] if len(infos) == 1 else infos
257
+
258
+ if not allow_repeat:
259
+ infos = sorted(set(infos), key=infos.index)
260
+
261
+ return split.join(infos) if split else infos
262
+
263
+
264
+ def get_json(json_str: Union[str, Any]) -> Dict:
265
+ """
266
+ 安全解析 JSON 字符串,兼容非标准格式(如单引号、缺少引号键)。
267
+
268
+ 尝试修复常见格式错误后再解析。
269
+
270
+ Args:
271
+ json_str (str): JSON 字符串
272
+
273
+ Returns:
274
+ dict: 解析后的字典,失败返回空字典
275
+ """
276
+ if not json_str:
277
+ return {}
278
+
279
+ try:
280
+ return json.loads(json_str)
281
+ except Exception as e1:
282
+ try:
283
+ cleaned = json_str.strip().replace("'", '"')
284
+ keys = get_info(cleaned, r'(\w+):')
285
+ for key in keys:
286
+ cleaned = cleaned.replace(f"{key}:", f'"{key}":')
287
+ return json.loads(cleaned) if cleaned else {}
288
+ except Exception as e2:
289
+ logger.error(
290
+ f"JSON 解析失败\n"
291
+ f"原始内容: {json_str}\n"
292
+ f"错误1: {e1}\n"
293
+ f"修复后: {cleaned}\n"
294
+ f"错误2: {e2}"
295
+ )
296
+ return {}
297
+
298
+
299
+ def dumps_json(
300
+ data: Union[str, dict, list, Any],
301
+ indent: int = 4,
302
+ sort_keys: bool = False,
303
+ ensure_ascii: bool = False,
304
+ skip_keys: bool = True,
305
+ default_repr: bool = False,
306
+ ) -> str:
307
+ """
308
+ 格式化任意对象为可读字符串,优先使用 JSON,失败时降级为 pprint 或 repr。
309
+
310
+ 支持自动处理 datetime、ObjectId 等不可序列化类型。
311
+
312
+ Args:
313
+ data (Any): 输入数据,支持字符串、字典、列表等
314
+ indent (int): JSON 缩进空格数
315
+ sort_keys (bool): 是否对字典键排序
316
+ ensure_ascii (bool): 是否转义非 ASCII 字符(False 可保留中文)
317
+ skip_keys (bool): 遇到非法键时是否跳过(而非报错)
318
+ default_repr (bool): 是否在最终失败时使用 repr() 降级
319
+
320
+ Returns:
321
+ str: 格式化后的字符串,适合日志输出或打印
322
+ """
323
+ try:
324
+ if isinstance(data, str):
325
+ if not data.strip():
326
+ return '""'
327
+ data = get_json(data)
328
+
329
+ return json.dumps(
330
+ data,
331
+ ensure_ascii=ensure_ascii,
332
+ indent=indent,
333
+ skipkeys=skip_keys,
334
+ sort_keys=sort_keys,
335
+ default=str,
336
+ )
337
+
338
+ except (UnicodeDecodeError, ValueError, TypeError, OverflowError) as e:
339
+ try:
340
+ return pprint.pformat(data, indent=indent, width=80, compact=True)
341
+ except Exception:
342
+ if default_repr:
343
+ return repr(data)
344
+ return f"<无法序列化的对象: {type(data).__name__}>"
@@ -1,82 +1,82 @@
1
- # -*- coding: UTF-8 -*-
2
- from typing import Union, AsyncGenerator, Generator
3
- from inspect import isgenerator, isasyncgen
4
- from crawlo import Response, Request, Item
5
- from crawlo.exceptions import TransformTypeError
6
-
7
- T = Union[Request, Item]
8
-
9
-
10
- async def transform(
11
- func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
12
- response: Response
13
- ) -> AsyncGenerator[Union[T, Exception], None]:
14
- """
15
- 转换回调函数的输出为统一异步生成器
16
-
17
- Args:
18
- func: 同步或异步生成器函数
19
- response: 当前响应对象
20
-
21
- Yields:
22
- Union[T, Exception]: 生成请求/Item或异常对象
23
-
24
- Raises:
25
- TransformTypeError: 当输入类型不符合要求时
26
- """
27
-
28
- def _set_meta(obj: T) -> T:
29
- """统一设置请求的depth元数据"""
30
- if isinstance(obj, Request):
31
- obj.meta.setdefault('depth', response.meta.get('depth', 0))
32
- return obj
33
-
34
- # 类型检查前置
35
- if not (isgenerator(func) or isasyncgen(func)):
36
- raise TransformTypeError(
37
- f'Callback must return generator or async generator, got {type(func).__name__}'
38
- )
39
-
40
- try:
41
- if isgenerator(func):
42
- # 同步生成器处理
43
- for item in func:
44
- yield _set_meta(item)
45
- else:
46
- # 异步生成器处理
47
- async for item in func:
48
- yield _set_meta(item)
49
-
50
- except Exception as e:
51
- yield e
52
-
53
- # #!/usr/bin/python
54
- # # -*- coding:UTF-8 -*-
55
- # from typing import Callable, Union
56
- # from inspect import isgenerator, isasyncgen
57
- # from crawlo import Response, Request, Item
58
- # from crawlo.exceptions import TransformTypeError
59
- #
60
- #
61
- # T = Union[Request, Item]
62
- #
63
- #
64
- # async def transform(func: Callable, response: Response):
65
- # def set_request(t: T) -> T:
66
- # if isinstance(t, Request):
67
- # t.meta['depth'] = response.meta['depth']
68
- # return t
69
- # try:
70
- # if isgenerator(func):
71
- # for f in func:
72
- # yield set_request(f)
73
- # elif isasyncgen(func):
74
- # async for f in func:
75
- # yield set_request(f)
76
- # else:
77
- # raise TransformTypeError(
78
- # f'callback return type error: {type(func)} must be `generator` or `async generator`'
79
- # )
80
- # except Exception as exp:
81
- # yield exp
82
-
1
+ # -*- coding: UTF-8 -*-
2
+ from typing import Union, AsyncGenerator, Generator
3
+ from inspect import isgenerator, isasyncgen
4
+ from crawlo import Response, Request, Item
5
+ from crawlo.exceptions import TransformTypeError
6
+
7
+ T = Union[Request, Item]
8
+
9
+
10
+ async def transform(
11
+ func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
12
+ response: Response
13
+ ) -> AsyncGenerator[Union[T, Exception], None]:
14
+ """
15
+ 转换回调函数的输出为统一异步生成器
16
+
17
+ Args:
18
+ func: 同步或异步生成器函数
19
+ response: 当前响应对象
20
+
21
+ Yields:
22
+ Union[T, Exception]: 生成请求/Item或异常对象
23
+
24
+ Raises:
25
+ TransformTypeError: 当输入类型不符合要求时
26
+ """
27
+
28
+ def _set_meta(obj: T) -> T:
29
+ """统一设置请求的depth元数据"""
30
+ if isinstance(obj, Request):
31
+ obj.meta.setdefault('depth', response.meta.get('depth', 0))
32
+ return obj
33
+
34
+ # 类型检查前置
35
+ if not (isgenerator(func) or isasyncgen(func)):
36
+ raise TransformTypeError(
37
+ f'Callback must return generator or async generator, got {type(func).__name__}'
38
+ )
39
+
40
+ try:
41
+ if isgenerator(func):
42
+ # 同步生成器处理
43
+ for item in func:
44
+ yield _set_meta(item)
45
+ else:
46
+ # 异步生成器处理
47
+ async for item in func:
48
+ yield _set_meta(item)
49
+
50
+ except Exception as e:
51
+ yield e
52
+
53
+ # #!/usr/bin/python
54
+ # # -*- coding:UTF-8 -*-
55
+ # from typing import Callable, Union
56
+ # from inspect import isgenerator, isasyncgen
57
+ # from crawlo import Response, Request, Item
58
+ # from crawlo.exceptions import TransformTypeError
59
+ #
60
+ #
61
+ # T = Union[Request, Item]
62
+ #
63
+ #
64
+ # async def transform(func: Callable, response: Response):
65
+ # def set_request(t: T) -> T:
66
+ # if isinstance(t, Request):
67
+ # t.meta['depth'] = response.meta['depth']
68
+ # return t
69
+ # try:
70
+ # if isgenerator(func):
71
+ # for f in func:
72
+ # yield set_request(f)
73
+ # elif isasyncgen(func):
74
+ # async for f in func:
75
+ # yield set_request(f)
76
+ # else:
77
+ # raise TransformTypeError(
78
+ # f'callback return type error: {type(func)} must be `generator` or `async generator`'
79
+ # )
80
+ # except Exception as exp:
81
+ # yield exp
82
+
crawlo/utils/log.py CHANGED
@@ -1,39 +1,129 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- # @Time : 2024-04-11 09:03
5
- # @Author : oscar
6
- # @Desc : None
7
- """
8
- from logging import Formatter, StreamHandler, Logger, INFO
9
-
10
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
11
-
12
-
13
- class LoggerManager(object):
14
- logger_cache = {}
15
-
16
- def __init__(self):
17
- pass
18
-
19
- @classmethod
20
- def get_logger(cls, name: str = 'default', level=None, log_format: str = LOG_FORMAT):
21
- key = (name, level)
22
-
23
- def gen_logger():
24
- log_formatter = Formatter(log_format)
25
- handler = StreamHandler()
26
- handler.setFormatter(log_formatter)
27
- handler.setLevel(level or INFO)
28
-
29
- _logger = Logger(name=name)
30
- _logger.addHandler(handler)
31
- _logger.setLevel(level or INFO)
32
- cls.logger_cache[key] = _logger
33
- return _logger
34
-
35
- return cls.logger_cache.get(key, None) or gen_logger()
36
-
37
-
38
- get_logger = LoggerManager.get_logger
39
-
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ 日志管理器:安全版本,使用字符串化 key 避免 unhashable 问题
4
+ """
5
+ import os
6
+ from logging import (
7
+ Formatter,
8
+ StreamHandler,
9
+ FileHandler,
10
+ Logger,
11
+ DEBUG,
12
+ INFO,
13
+ WARNING,
14
+ ERROR,
15
+ CRITICAL,
16
+ )
17
+
18
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
19
+
20
+
21
+ class LoggerManager:
22
+ logger_cache = {}
23
+ _default_filename = None
24
+ _default_level = INFO
25
+ _default_file_level = INFO
26
+ _default_console_level = INFO
27
+ _default_log_format = LOG_FORMAT
28
+ _default_encoding = 'utf-8'
29
+
30
+ _level_map = {
31
+ 'DEBUG': DEBUG,
32
+ 'INFO': INFO,
33
+ 'WARNING': WARNING,
34
+ 'ERROR': ERROR,
35
+ 'CRITICAL': CRITICAL,
36
+ }
37
+
38
+ @classmethod
39
+ def _to_level(cls, level):
40
+ """安全转换为日志级别 int"""
41
+ if level is None:
42
+ return INFO
43
+ if isinstance(level, int):
44
+ return level
45
+ if isinstance(level, str):
46
+ return cls._level_map.get(level.upper(), INFO)
47
+ if hasattr(level, 'get'): # 如 SettingManager 或 dict
48
+ lv = level.get('LOG_LEVEL')
49
+ if isinstance(lv, int):
50
+ return lv
51
+ if isinstance(lv, str):
52
+ return cls._level_map.get(lv.upper(), INFO)
53
+ return INFO
54
+
55
+ @classmethod
56
+ def configure(cls, settings=None, **kwargs):
57
+ """
58
+ 使用 settings 对象或关键字参数配置日志
59
+ """
60
+ # 优先使用 settings,否则用 kwargs
61
+ get_val = settings.get if hasattr(settings, 'get') else (lambda k, d=None: kwargs.get(k, d))
62
+
63
+ filename = get_val('LOG_FILE')
64
+ level = get_val('LOG_LEVEL', 'INFO')
65
+ file_level = get_val('LOG_FILE_LEVEL', level)
66
+ console_level = get_val('LOG_CONSOLE_LEVEL', level)
67
+ log_format = get_val('LOG_FORMAT', LOG_FORMAT)
68
+ encoding = get_val('LOG_ENCODING', 'utf-8')
69
+
70
+ cls._default_filename = filename
71
+ cls._default_level = cls._to_level(level)
72
+ cls._default_file_level = cls._to_level(file_level)
73
+ cls._default_console_level = cls._to_level(console_level)
74
+ cls._default_log_format = log_format
75
+ cls._default_encoding = encoding
76
+
77
+ @classmethod
78
+ def get_logger(cls, name='default', level=None, filename=None):
79
+ """
80
+ 简化接口,只暴露必要参数
81
+ """
82
+ # 确定最终参数
83
+ final_level = cls._to_level(level) if level is not None else cls._default_level
84
+ final_filename = filename if filename is not None else cls._default_filename
85
+
86
+ # ✅ 安全的字符串化 key,避免任何 unhashable 类型
87
+ key_parts = [
88
+ name,
89
+ str(final_level),
90
+ final_filename or 'no_file',
91
+ ]
92
+ key = '|'.join(key_parts) # 如 "my_spider|20|logs/app.log"
93
+
94
+ if key in cls.logger_cache:
95
+ return cls.logger_cache[key]
96
+
97
+ # 创建 logger
98
+ _logger = Logger(name=name)
99
+ _logger.setLevel(final_level)
100
+
101
+ formatter = Formatter(cls._default_log_format)
102
+
103
+ # 控制台
104
+ if cls._default_console_level is not False:
105
+ ch = StreamHandler()
106
+ ch.setFormatter(formatter)
107
+ ch.setLevel(cls._default_console_level)
108
+ _logger.addHandler(ch)
109
+
110
+ # 文件
111
+ if final_filename:
112
+ try:
113
+ log_dir = os.path.dirname(final_filename)
114
+ if log_dir and not os.path.exists(log_dir):
115
+ os.makedirs(log_dir, exist_ok=True)
116
+
117
+ fh = FileHandler(final_filename, encoding=cls._default_encoding)
118
+ fh.setFormatter(formatter)
119
+ fh.setLevel(cls._default_file_level)
120
+ _logger.addHandler(fh)
121
+ except Exception as e:
122
+ print(f"[Logger] 无法创建日志文件 {final_filename}: {e}")
123
+
124
+ cls.logger_cache[key] = _logger
125
+ return _logger
126
+
127
+
128
+ # 全局快捷函数
129
+ get_logger = LoggerManager.get_logger