crawlo 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +9 -6
- crawlo/__version__.py +1 -2
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +158 -158
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +57 -59
- crawlo/crawler.py +242 -107
- crawlo/downloader/__init__.py +78 -78
- crawlo/downloader/aiohttp_downloader.py +259 -96
- crawlo/downloader/httpx_downloader.py +187 -48
- crawlo/downloader/playwright_downloader.py +160 -160
- crawlo/event.py +11 -11
- crawlo/exceptions.py +64 -64
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/filters/__init__.py +37 -37
- crawlo/filters/aioredis_filter.py +157 -129
- crawlo/filters/memory_filter.py +202 -203
- crawlo/filters/redis_filter.py +119 -119
- crawlo/items/__init__.py +62 -62
- crawlo/items/items.py +118 -118
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +140 -140
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +90 -89
- crawlo/network/__init__.py +7 -7
- crawlo/network/request.py +205 -155
- crawlo/network/response.py +166 -93
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_batch_pipline.py +133 -133
- crawlo/pipelines/mysql_pipeline.py +195 -176
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +93 -89
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +36 -36
- crawlo/stats_collector.py +59 -47
- crawlo/subscriber.py +106 -27
- crawlo/task_manager.py +27 -27
- crawlo/templates/item_template.tmpl +21 -21
- crawlo/templates/project_template/main.py +32 -32
- crawlo/templates/project_template/setting.py +189 -189
- crawlo/templates/spider_template.tmpl +30 -30
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/concurrency_manager.py +125 -0
- crawlo/utils/date_tools.py +177 -177
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/log.py +39 -39
- crawlo/utils/pqueue.py +173 -173
- crawlo/utils/project.py +59 -59
- crawlo/utils/request.py +122 -85
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +303 -0
- crawlo/utils/url.py +39 -39
- {crawlo-1.0.1.dist-info → crawlo-1.0.3.dist-info}/METADATA +48 -36
- crawlo-1.0.3.dist-info/RECORD +80 -0
- {crawlo-1.0.1.dist-info → crawlo-1.0.3.dist-info}/top_level.txt +1 -0
- tests/__init__.py +7 -0
- tests/baidu_spider/__init__.py +7 -0
- tests/baidu_spider/demo.py +94 -0
- tests/baidu_spider/items.py +25 -0
- tests/baidu_spider/middleware.py +49 -0
- tests/baidu_spider/pipeline.py +55 -0
- tests/baidu_spider/request_fingerprints.txt +9 -0
- tests/baidu_spider/run.py +27 -0
- tests/baidu_spider/settings.py +78 -0
- tests/baidu_spider/spiders/__init__.py +7 -0
- tests/baidu_spider/spiders/bai_du.py +61 -0
- tests/baidu_spider/spiders/sina.py +79 -0
- crawlo-1.0.1.dist-info/RECORD +0 -67
- crawlo-1.0.1.dist-info/licenses/LICENSE +0 -23
- {crawlo-1.0.1.dist-info → crawlo-1.0.3.dist-info}/WHEEL +0 -0
- {crawlo-1.0.1.dist-info → crawlo-1.0.3.dist-info}/entry_points.txt +0 -0
crawlo/extension/log_stats.py
CHANGED
|
@@ -1,44 +1,44 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
from crawlo import event
|
|
4
|
-
from crawlo.utils.date_tools import get_current_time, time_diff_seconds
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
class LogStats(object):
|
|
8
|
-
|
|
9
|
-
def __init__(self, stats):
|
|
10
|
-
self._stats = stats
|
|
11
|
-
|
|
12
|
-
@classmethod
|
|
13
|
-
def create_instance(cls, crawler):
|
|
14
|
-
o = cls(crawler.stats)
|
|
15
|
-
crawler.subscriber.subscribe(o.spider_opened, event=event.spider_opened)
|
|
16
|
-
crawler.subscriber.subscribe(o.spider_closed, event=event.spider_closed)
|
|
17
|
-
crawler.subscriber.subscribe(o.item_successful, event=event.item_successful)
|
|
18
|
-
crawler.subscriber.subscribe(o.item_discard, event=event.item_discard)
|
|
19
|
-
crawler.subscriber.subscribe(o.response_received, event=event.response_received)
|
|
20
|
-
crawler.subscriber.subscribe(o.request_scheduled, event=event.request_scheduled)
|
|
21
|
-
|
|
22
|
-
return o
|
|
23
|
-
|
|
24
|
-
async def spider_opened(self):
|
|
25
|
-
self._stats['start_time'] = get_current_time(fmt='%Y-%m-%d %H:%M:%S')
|
|
26
|
-
|
|
27
|
-
async def spider_closed(self):
|
|
28
|
-
self._stats['end_time'] = get_current_time(fmt='%Y-%m-%d %H:%M:%S')
|
|
29
|
-
self._stats['cost_time(s)'] = time_diff_seconds(start_time=self._stats['start_time'], end_time=self._stats['end_time'])
|
|
30
|
-
|
|
31
|
-
async def item_successful(self, _item, _spider):
|
|
32
|
-
self._stats.inc_value('item_successful_count')
|
|
33
|
-
|
|
34
|
-
async def item_discard(self, _item, exc, _spider):
|
|
35
|
-
self._stats.inc_value('item_discard_count')
|
|
36
|
-
reason = exc.msg
|
|
37
|
-
if reason:
|
|
38
|
-
self._stats.inc_value(f"item_discard/{reason}")
|
|
39
|
-
|
|
40
|
-
async def response_received(self, _response, _spider):
|
|
41
|
-
self._stats.inc_value('response_received_count')
|
|
42
|
-
|
|
43
|
-
async def request_scheduled(self, _request, _spider):
|
|
44
|
-
self._stats.inc_value('request_scheduler_count')
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
from crawlo import event
|
|
4
|
+
from crawlo.utils.date_tools import get_current_time, time_diff_seconds
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class LogStats(object):
|
|
8
|
+
|
|
9
|
+
def __init__(self, stats):
|
|
10
|
+
self._stats = stats
|
|
11
|
+
|
|
12
|
+
@classmethod
|
|
13
|
+
def create_instance(cls, crawler):
|
|
14
|
+
o = cls(crawler.stats)
|
|
15
|
+
crawler.subscriber.subscribe(o.spider_opened, event=event.spider_opened)
|
|
16
|
+
crawler.subscriber.subscribe(o.spider_closed, event=event.spider_closed)
|
|
17
|
+
crawler.subscriber.subscribe(o.item_successful, event=event.item_successful)
|
|
18
|
+
crawler.subscriber.subscribe(o.item_discard, event=event.item_discard)
|
|
19
|
+
crawler.subscriber.subscribe(o.response_received, event=event.response_received)
|
|
20
|
+
crawler.subscriber.subscribe(o.request_scheduled, event=event.request_scheduled)
|
|
21
|
+
|
|
22
|
+
return o
|
|
23
|
+
|
|
24
|
+
async def spider_opened(self):
|
|
25
|
+
self._stats['start_time'] = get_current_time(fmt='%Y-%m-%d %H:%M:%S')
|
|
26
|
+
|
|
27
|
+
async def spider_closed(self):
|
|
28
|
+
self._stats['end_time'] = get_current_time(fmt='%Y-%m-%d %H:%M:%S')
|
|
29
|
+
self._stats['cost_time(s)'] = time_diff_seconds(start_time=self._stats['start_time'], end_time=self._stats['end_time'])
|
|
30
|
+
|
|
31
|
+
async def item_successful(self, _item, _spider):
|
|
32
|
+
self._stats.inc_value('item_successful_count')
|
|
33
|
+
|
|
34
|
+
async def item_discard(self, _item, exc, _spider):
|
|
35
|
+
self._stats.inc_value('item_discard_count')
|
|
36
|
+
reason = exc.msg
|
|
37
|
+
if reason:
|
|
38
|
+
self._stats.inc_value(f"item_discard/{reason}")
|
|
39
|
+
|
|
40
|
+
async def response_received(self, _response, _spider):
|
|
41
|
+
self._stats.inc_value('response_received_count')
|
|
42
|
+
|
|
43
|
+
async def request_scheduled(self, _request, _spider):
|
|
44
|
+
self._stats.inc_value('request_scheduler_count')
|
crawlo/filters/__init__.py
CHANGED
|
@@ -1,37 +1,37 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
from abc import ABC, abstractmethod
|
|
4
|
-
|
|
5
|
-
from crawlo import Request
|
|
6
|
-
from crawlo.utils.request import request_fingerprint
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class BaseFilter(ABC):
|
|
10
|
-
|
|
11
|
-
def __init__(self, logger, stats, debug: bool):
|
|
12
|
-
self.logger = logger
|
|
13
|
-
self.stats = stats
|
|
14
|
-
self.debug = debug
|
|
15
|
-
|
|
16
|
-
@classmethod
|
|
17
|
-
def create_instance(cls, *args, **kwargs) -> 'BaseFilter':
|
|
18
|
-
return cls(*args, **kwargs)
|
|
19
|
-
|
|
20
|
-
def requested(self, request: Request):
|
|
21
|
-
fp = request_fingerprint(request)
|
|
22
|
-
if fp in self:
|
|
23
|
-
return True
|
|
24
|
-
self.add_fingerprint(fp)
|
|
25
|
-
return False
|
|
26
|
-
|
|
27
|
-
@abstractmethod
|
|
28
|
-
def add_fingerprint(self, fp) -> None:
|
|
29
|
-
pass
|
|
30
|
-
|
|
31
|
-
def log_stats(self, request: Request) -> None:
|
|
32
|
-
if self.debug:
|
|
33
|
-
self.logger.debug(f'Filtered duplicate request: {request}')
|
|
34
|
-
self.stats.inc_value(f'{self}/filtered_count')
|
|
35
|
-
|
|
36
|
-
def __str__(self) -> str:
|
|
37
|
-
return f'{self.__class__.__name__}'
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
|
|
5
|
+
from crawlo import Request
|
|
6
|
+
from crawlo.utils.request import request_fingerprint
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BaseFilter(ABC):
|
|
10
|
+
|
|
11
|
+
def __init__(self, logger, stats, debug: bool):
|
|
12
|
+
self.logger = logger
|
|
13
|
+
self.stats = stats
|
|
14
|
+
self.debug = debug
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
def create_instance(cls, *args, **kwargs) -> 'BaseFilter':
|
|
18
|
+
return cls(*args, **kwargs)
|
|
19
|
+
|
|
20
|
+
def requested(self, request: Request):
|
|
21
|
+
fp = request_fingerprint(request)
|
|
22
|
+
if fp in self:
|
|
23
|
+
return True
|
|
24
|
+
self.add_fingerprint(fp)
|
|
25
|
+
return False
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def add_fingerprint(self, fp) -> None:
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
def log_stats(self, request: Request) -> None:
|
|
32
|
+
if self.debug:
|
|
33
|
+
self.logger.debug(f'Filtered duplicate request: {request}')
|
|
34
|
+
self.stats.inc_value(f'{self}/filtered_count')
|
|
35
|
+
|
|
36
|
+
def __str__(self) -> str:
|
|
37
|
+
return f'{self.__class__.__name__}'
|
|
@@ -1,130 +1,158 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
from typing import Optional
|
|
4
|
-
|
|
5
|
-
import
|
|
6
|
-
|
|
7
|
-
from crawlo import
|
|
8
|
-
from crawlo.
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
except
|
|
93
|
-
self.logger.error(f"
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
async def
|
|
125
|
-
"""
|
|
126
|
-
try:
|
|
127
|
-
await self.redis.
|
|
128
|
-
|
|
129
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
from typing import Optional
|
|
4
|
+
import aioredis
|
|
5
|
+
from crawlo import Request
|
|
6
|
+
from crawlo.filters import BaseFilter
|
|
7
|
+
from crawlo.utils.log import get_logger
|
|
8
|
+
from crawlo.utils.request import request_fingerprint
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AioRedisFilter(BaseFilter):
|
|
12
|
+
"""使用Redis集合实现的异步请求去重过滤器(适用于分布式爬虫)"""
|
|
13
|
+
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
redis_key: str,
|
|
17
|
+
client: aioredis.Redis,
|
|
18
|
+
stats: dict,
|
|
19
|
+
debug: bool,
|
|
20
|
+
log_level: str,
|
|
21
|
+
cleanup_fp: bool = False
|
|
22
|
+
):
|
|
23
|
+
"""初始化过滤器"""
|
|
24
|
+
self.logger = get_logger(self.__class__.__name__, log_level)
|
|
25
|
+
super().__init__(self.logger, stats, debug)
|
|
26
|
+
|
|
27
|
+
self.redis_key = redis_key
|
|
28
|
+
self.redis = client
|
|
29
|
+
self.cleanup_fp = cleanup_fp
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def create_instance(cls, crawler) -> 'BaseFilter':
|
|
33
|
+
"""从爬虫配置创建过滤器实例"""
|
|
34
|
+
redis_url = crawler.settings.get('REDIS_URL', 'redis://localhost:6379')
|
|
35
|
+
decode_responses = crawler.settings.get_bool('DECODE_RESPONSES', False) # 关键:改为False
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
redis_client = aioredis.from_url(
|
|
39
|
+
redis_url,
|
|
40
|
+
decode_responses=decode_responses,
|
|
41
|
+
max_connections=20,
|
|
42
|
+
encoding='utf-8'
|
|
43
|
+
)
|
|
44
|
+
except Exception as e:
|
|
45
|
+
raise RuntimeError(f"Redis连接失败 {redis_url}: {str(e)}")
|
|
46
|
+
|
|
47
|
+
return cls(
|
|
48
|
+
redis_key=f"{crawler.settings.get('PROJECT_NAME', 'default')}:{crawler.settings.get('REDIS_KEY', 'request_fingerprints')}",
|
|
49
|
+
client=redis_client,
|
|
50
|
+
stats=crawler.stats,
|
|
51
|
+
cleanup_fp=crawler.settings.get_bool('CLEANUP_FP', False),
|
|
52
|
+
debug=crawler.settings.get_bool('FILTER_DEBUG', False),
|
|
53
|
+
log_level=crawler.settings.get('LOG_LEVEL', 'INFO')
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
async def requested(self, request: Request) -> bool:
|
|
57
|
+
"""
|
|
58
|
+
检查请求是否重复
|
|
59
|
+
"""
|
|
60
|
+
try:
|
|
61
|
+
fp = request_fingerprint(request)
|
|
62
|
+
self.logger.debug(f"Checking fingerprint: {fp}")
|
|
63
|
+
|
|
64
|
+
# 确保fp是字符串类型
|
|
65
|
+
if not isinstance(fp, str):
|
|
66
|
+
fp = str(fp)
|
|
67
|
+
|
|
68
|
+
# 检查Redis连接状态
|
|
69
|
+
if not self.redis:
|
|
70
|
+
raise RuntimeError("Redis client is not initialized")
|
|
71
|
+
|
|
72
|
+
# 检查指纹是否已存在
|
|
73
|
+
is_member = await self.redis.sismember(self.redis_key, fp)
|
|
74
|
+
self.logger.debug(f"Fingerprint {fp} exists: {is_member}")
|
|
75
|
+
|
|
76
|
+
if is_member:
|
|
77
|
+
if self.debug:
|
|
78
|
+
self.logger.debug(f"Filtered duplicate request: {fp}")
|
|
79
|
+
return True
|
|
80
|
+
|
|
81
|
+
# 添加新指纹
|
|
82
|
+
result = await self.redis.sadd(self.redis_key, fp)
|
|
83
|
+
|
|
84
|
+
if self.debug:
|
|
85
|
+
if result == 1:
|
|
86
|
+
self.logger.debug(f"Added new fingerprint: {fp}")
|
|
87
|
+
else:
|
|
88
|
+
self.logger.warning(f"Failed to add fingerprint: {fp}")
|
|
89
|
+
|
|
90
|
+
return False
|
|
91
|
+
|
|
92
|
+
except Exception as e:
|
|
93
|
+
self.logger.error(f"Filter check failed for {getattr(request, 'url', 'unknown')}: {str(e)}")
|
|
94
|
+
# 可以选择抛出异常或返回False(不过滤)
|
|
95
|
+
raise
|
|
96
|
+
|
|
97
|
+
async def add_fingerprint(self, fp: str) -> bool:
|
|
98
|
+
"""向Redis集合添加新指纹"""
|
|
99
|
+
try:
|
|
100
|
+
if not isinstance(fp, str):
|
|
101
|
+
fp = str(fp)
|
|
102
|
+
|
|
103
|
+
result = await self.redis.sadd(self.redis_key, fp)
|
|
104
|
+
if self.debug:
|
|
105
|
+
self.logger.debug(f"Added fingerprint {fp}, result: {result}")
|
|
106
|
+
return result == 1
|
|
107
|
+
except Exception as e:
|
|
108
|
+
self.logger.error(f"Failed to add fingerprint {fp}: {str(e)}")
|
|
109
|
+
raise
|
|
110
|
+
|
|
111
|
+
async def get_stats(self) -> dict:
|
|
112
|
+
"""获取当前过滤器统计信息"""
|
|
113
|
+
try:
|
|
114
|
+
count = await self.redis.scard(self.redis_key)
|
|
115
|
+
return {
|
|
116
|
+
'total_fingerprints': count,
|
|
117
|
+
'redis_key': self.redis_key,
|
|
118
|
+
**self.stats
|
|
119
|
+
}
|
|
120
|
+
except Exception as e:
|
|
121
|
+
self.logger.error(f"Failed to get stats: {str(e)}")
|
|
122
|
+
return self.stats
|
|
123
|
+
|
|
124
|
+
async def clear_all(self) -> int:
|
|
125
|
+
"""清空所有指纹数据"""
|
|
126
|
+
try:
|
|
127
|
+
deleted = await self.redis.delete(self.redis_key)
|
|
128
|
+
self.logger.info(f"Cleared {deleted} keys")
|
|
129
|
+
return deleted
|
|
130
|
+
except Exception as e:
|
|
131
|
+
self.logger.error(f"Failed to clear fingerprints: {str(e)}")
|
|
132
|
+
raise
|
|
133
|
+
|
|
134
|
+
async def closed(self, reason: Optional[str] = None) -> None:
|
|
135
|
+
"""爬虫关闭时的处理"""
|
|
136
|
+
try:
|
|
137
|
+
if self.cleanup_fp:
|
|
138
|
+
deleted = await self.redis.delete(self.redis_key)
|
|
139
|
+
self.logger.info(
|
|
140
|
+
f"Cleaned {deleted} fingerprints from {self.redis_key} "
|
|
141
|
+
f"(reason: {reason or 'manual'})"
|
|
142
|
+
)
|
|
143
|
+
else:
|
|
144
|
+
# 显示统计信息
|
|
145
|
+
count = await self.redis.scard(self.redis_key)
|
|
146
|
+
self.logger.info(f"Total fingerprints preserved: {count}")
|
|
147
|
+
except Exception as e:
|
|
148
|
+
self.logger.warning(f"Close operation failed: {e}")
|
|
149
|
+
finally:
|
|
150
|
+
await self._close_redis()
|
|
151
|
+
|
|
152
|
+
async def _close_redis(self) -> None:
|
|
153
|
+
"""安全关闭Redis连接"""
|
|
154
|
+
try:
|
|
155
|
+
if hasattr(self.redis, 'close'):
|
|
156
|
+
await self.redis.close()
|
|
157
|
+
except Exception as e:
|
|
130
158
|
self.logger.warning(f"Redis close error: {e}")
|