crawlo 1.1.3__py3-none-any.whl → 1.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (118) hide show
  1. crawlo/__init__.py +34 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/list.py +155 -155
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -196
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +186 -186
  12. crawlo/config.py +279 -279
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -171
  15. crawlo/core/enhanced_engine.py +189 -189
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +165 -165
  18. crawlo/crawler.py +1027 -1027
  19. crawlo/downloader/__init__.py +242 -242
  20. crawlo/downloader/aiohttp_downloader.py +212 -212
  21. crawlo/downloader/cffi_downloader.py +251 -251
  22. crawlo/downloader/httpx_downloader.py +259 -259
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +81 -81
  25. crawlo/extension/__init__.py +38 -31
  26. crawlo/extension/health_check.py +142 -0
  27. crawlo/extension/log_interval.py +58 -49
  28. crawlo/extension/log_stats.py +82 -44
  29. crawlo/extension/logging_extension.py +44 -35
  30. crawlo/extension/memory_monitor.py +89 -0
  31. crawlo/extension/performance_profiler.py +118 -0
  32. crawlo/extension/request_recorder.py +108 -0
  33. crawlo/filters/__init__.py +154 -154
  34. crawlo/filters/aioredis_filter.py +241 -241
  35. crawlo/filters/memory_filter.py +269 -269
  36. crawlo/items/__init__.py +23 -23
  37. crawlo/items/base.py +21 -21
  38. crawlo/items/fields.py +53 -53
  39. crawlo/items/items.py +104 -104
  40. crawlo/middleware/__init__.py +21 -21
  41. crawlo/middleware/default_header.py +32 -32
  42. crawlo/middleware/download_delay.py +28 -28
  43. crawlo/middleware/middleware_manager.py +135 -135
  44. crawlo/middleware/proxy.py +248 -248
  45. crawlo/middleware/request_ignore.py +30 -30
  46. crawlo/middleware/response_code.py +18 -18
  47. crawlo/middleware/response_filter.py +26 -26
  48. crawlo/middleware/retry.py +124 -124
  49. crawlo/mode_manager.py +200 -200
  50. crawlo/network/__init__.py +21 -21
  51. crawlo/network/request.py +311 -311
  52. crawlo/network/response.py +271 -271
  53. crawlo/pipelines/__init__.py +21 -21
  54. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  55. crawlo/pipelines/console_pipeline.py +39 -39
  56. crawlo/pipelines/csv_pipeline.py +316 -316
  57. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  58. crawlo/pipelines/json_pipeline.py +218 -218
  59. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  60. crawlo/pipelines/mongo_pipeline.py +132 -117
  61. crawlo/pipelines/mysql_pipeline.py +317 -195
  62. crawlo/pipelines/pipeline_manager.py +56 -56
  63. crawlo/pipelines/redis_dedup_pipeline.py +162 -162
  64. crawlo/project.py +153 -153
  65. crawlo/queue/pqueue.py +37 -37
  66. crawlo/queue/queue_manager.py +307 -307
  67. crawlo/queue/redis_priority_queue.py +208 -208
  68. crawlo/settings/__init__.py +7 -7
  69. crawlo/settings/default_settings.py +278 -244
  70. crawlo/settings/setting_manager.py +99 -99
  71. crawlo/spider/__init__.py +639 -639
  72. crawlo/stats_collector.py +59 -59
  73. crawlo/subscriber.py +131 -106
  74. crawlo/task_manager.py +30 -30
  75. crawlo/templates/crawlo.cfg.tmpl +10 -10
  76. crawlo/templates/project/__init__.py.tmpl +3 -3
  77. crawlo/templates/project/items.py.tmpl +17 -17
  78. crawlo/templates/project/middlewares.py.tmpl +111 -87
  79. crawlo/templates/project/pipelines.py.tmpl +97 -341
  80. crawlo/templates/project/run.py.tmpl +251 -251
  81. crawlo/templates/project/settings.py.tmpl +279 -250
  82. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  83. crawlo/templates/spider/spider.py.tmpl +142 -178
  84. crawlo/utils/__init__.py +7 -7
  85. crawlo/utils/controlled_spider_mixin.py +439 -439
  86. crawlo/utils/date_tools.py +233 -233
  87. crawlo/utils/db_helper.py +343 -343
  88. crawlo/utils/func_tools.py +82 -82
  89. crawlo/utils/large_scale_config.py +286 -286
  90. crawlo/utils/large_scale_helper.py +343 -343
  91. crawlo/utils/log.py +128 -128
  92. crawlo/utils/queue_helper.py +175 -175
  93. crawlo/utils/request.py +267 -267
  94. crawlo/utils/request_serializer.py +219 -219
  95. crawlo/utils/spider_loader.py +62 -62
  96. crawlo/utils/system.py +11 -11
  97. crawlo/utils/tools.py +4 -4
  98. crawlo/utils/url.py +39 -39
  99. crawlo-1.1.4.dist-info/METADATA +403 -0
  100. crawlo-1.1.4.dist-info/RECORD +117 -0
  101. examples/__init__.py +7 -7
  102. examples/controlled_spider_example.py +205 -205
  103. tests/__init__.py +7 -7
  104. tests/test_final_validation.py +153 -153
  105. tests/test_proxy_health_check.py +32 -32
  106. tests/test_proxy_middleware_integration.py +136 -136
  107. tests/test_proxy_providers.py +56 -56
  108. tests/test_proxy_stats.py +19 -19
  109. tests/test_proxy_strategies.py +59 -59
  110. tests/test_redis_config.py +28 -28
  111. tests/test_redis_queue.py +224 -224
  112. tests/test_request_serialization.py +70 -70
  113. tests/test_scheduler.py +241 -241
  114. crawlo-1.1.3.dist-info/METADATA +0 -635
  115. crawlo-1.1.3.dist-info/RECORD +0 -113
  116. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/WHEEL +0 -0
  117. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/entry_points.txt +0 -0
  118. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/top_level.txt +0 -0
crawlo/exceptions.py CHANGED
@@ -1,82 +1,82 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- class TransformTypeError(TypeError):
4
- pass
5
-
6
-
7
- class OutputError(Exception):
8
- pass
9
-
10
-
11
- class SpiderTypeError(TypeError):
12
- pass
13
-
14
-
15
- class ItemInitError(Exception):
16
- pass
17
-
18
-
19
- class ItemAttributeError(Exception):
20
- pass
21
-
22
-
23
- class DecodeError(Exception):
24
- pass
25
-
26
-
27
- class MiddlewareInitError(Exception):
28
- pass
29
-
30
-
31
- class PipelineInitError(Exception):
32
- pass
33
-
34
-
35
- class InvalidOutputError(Exception):
36
- pass
37
-
38
-
39
- class RequestMethodError(Exception):
40
- pass
41
-
42
-
43
- class IgnoreRequestError(Exception):
44
- def __init__(self, msg):
45
- self.msg = msg
46
- super(IgnoreRequestError, self).__init__(msg)
47
-
48
-
49
- class ItemDiscard(Exception):
50
- def __init__(self, msg):
51
- self.msg = msg
52
- super(ItemDiscard, self).__init__(msg)
53
-
54
-
55
- class NotConfigured(Exception):
56
- pass
57
-
58
-
59
- class NotConfiguredError(Exception):
60
- pass
61
-
62
-
63
- class ExtensionInitError(Exception):
64
- pass
65
-
66
-
67
- class ReceiverTypeError(Exception):
68
- pass
69
-
70
-
71
- class SpiderCreationError(Exception):
72
- """爬虫实例化失败异常"""
73
- pass
74
-
75
-
76
- class ItemValidationError(Exception):
77
- """Item 字段验证错误"""
78
- pass
79
-
80
-
81
- class DropItem(Exception):
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ class TransformTypeError(TypeError):
4
+ pass
5
+
6
+
7
+ class OutputError(Exception):
8
+ pass
9
+
10
+
11
+ class SpiderTypeError(TypeError):
12
+ pass
13
+
14
+
15
+ class ItemInitError(Exception):
16
+ pass
17
+
18
+
19
+ class ItemAttributeError(Exception):
20
+ pass
21
+
22
+
23
+ class DecodeError(Exception):
24
+ pass
25
+
26
+
27
+ class MiddlewareInitError(Exception):
28
+ pass
29
+
30
+
31
+ class PipelineInitError(Exception):
32
+ pass
33
+
34
+
35
+ class InvalidOutputError(Exception):
36
+ pass
37
+
38
+
39
+ class RequestMethodError(Exception):
40
+ pass
41
+
42
+
43
+ class IgnoreRequestError(Exception):
44
+ def __init__(self, msg):
45
+ self.msg = msg
46
+ super(IgnoreRequestError, self).__init__(msg)
47
+
48
+
49
+ class ItemDiscard(Exception):
50
+ def __init__(self, msg):
51
+ self.msg = msg
52
+ super(ItemDiscard, self).__init__(msg)
53
+
54
+
55
+ class NotConfigured(Exception):
56
+ pass
57
+
58
+
59
+ class NotConfiguredError(Exception):
60
+ pass
61
+
62
+
63
+ class ExtensionInitError(Exception):
64
+ pass
65
+
66
+
67
+ class ReceiverTypeError(Exception):
68
+ pass
69
+
70
+
71
+ class SpiderCreationError(Exception):
72
+ """爬虫实例化失败异常"""
73
+ pass
74
+
75
+
76
+ class ItemValidationError(Exception):
77
+ """Item 字段验证错误"""
78
+ pass
79
+
80
+
81
+ class DropItem(Exception):
82
82
  pass
@@ -1,31 +1,38 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from typing import List
4
- from pprint import pformat
5
-
6
- from crawlo.utils.log import get_logger
7
- from crawlo.project import load_class
8
- from crawlo.exceptions import ExtensionInitError
9
-
10
-
11
- class ExtensionManager(object):
12
-
13
- def __init__(self, crawler):
14
- self.crawler = crawler
15
- self.extensions: List = []
16
- extensions = self.crawler.settings.get_list('EXTENSIONS')
17
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
18
- self._add_extensions(extensions)
19
-
20
- @classmethod
21
- def create_instance(cls, *args, **kwargs):
22
- return cls(*args, **kwargs)
23
-
24
- def _add_extensions(self, extensions):
25
- for extension in extensions:
26
- extension_cls = load_class(extension)
27
- if not hasattr(extension_cls, 'create_instance'):
28
- raise ExtensionInitError(f"extension init failed, Must have method 'create_instance()")
29
- self.extensions.append(extension_cls.create_instance(self.crawler))
30
- if extensions:
31
- self.logger.info(f"enabled extensions: \n {pformat(extensions)}")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from typing import List, Any
4
+ from pprint import pformat
5
+
6
+ from crawlo.utils.log import get_logger
7
+ from crawlo.project import load_class
8
+ from crawlo.exceptions import ExtensionInitError
9
+
10
+
11
+ class ExtensionManager(object):
12
+
13
+ def __init__(self, crawler: Any):
14
+ self.crawler = crawler
15
+ self.extensions: List = []
16
+ extensions = self.crawler.settings.get_list('EXTENSIONS')
17
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
18
+ self._add_extensions(extensions)
19
+
20
+ @classmethod
21
+ def create_instance(cls, *args: Any, **kwargs: Any) -> 'ExtensionManager':
22
+ return cls(*args, **kwargs)
23
+
24
+ def _add_extensions(self, extensions: List[str]) -> None:
25
+ for extension_path in extensions:
26
+ try:
27
+ extension_cls = load_class(extension_path)
28
+ if not hasattr(extension_cls, 'create_instance'):
29
+ raise ExtensionInitError(
30
+ f"Extension '{extension_path}' init failed: Must have method 'create_instance()'"
31
+ )
32
+ self.extensions.append(extension_cls.create_instance(self.crawler))
33
+ except Exception as e:
34
+ self.logger.error(f"Failed to load extension '{extension_path}': {e}")
35
+ raise ExtensionInitError(f"Failed to load extension '{extension_path}': {e}")
36
+
37
+ if extensions:
38
+ self.logger.info(f"Enabled extensions: \n{pformat(extensions)}")
@@ -0,0 +1,142 @@
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import asyncio
4
+ from typing import Any, Optional, Dict
5
+ from datetime import datetime, timedelta
6
+
7
+ from crawlo.utils.log import get_logger
8
+ from crawlo.event import spider_opened, spider_closed, response_received, request_scheduled
9
+
10
+
11
+ class HealthCheckExtension:
12
+ """
13
+ 健康检查扩展
14
+ 监控爬虫的健康状态,包括响应时间、错误率等指标
15
+ """
16
+
17
+ def __init__(self, crawler: Any):
18
+ self.settings = crawler.settings
19
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
20
+
21
+ # 获取配置参数
22
+ self.enabled = self.settings.get_bool('HEALTH_CHECK_ENABLED', True)
23
+ self.check_interval = self.settings.get_int('HEALTH_CHECK_INTERVAL', 60) # 默认60秒
24
+
25
+ # 健康状态统计
26
+ self.stats: Dict[str, Any] = {
27
+ 'start_time': None,
28
+ 'total_requests': 0,
29
+ 'total_responses': 0,
30
+ 'error_responses': 0,
31
+ 'last_check_time': None,
32
+ 'response_times': [], # 存储最近的响应时间
33
+ }
34
+
35
+ self.task: Optional[asyncio.Task] = None
36
+
37
+ @classmethod
38
+ def create_instance(cls, crawler: Any) -> 'HealthCheckExtension':
39
+ # 只有当配置启用时才创建实例
40
+ if not crawler.settings.get_bool('HEALTH_CHECK_ENABLED', True):
41
+ from crawlo.exceptions import NotConfigured
42
+ raise NotConfigured("HealthCheckExtension: HEALTH_CHECK_ENABLED is False")
43
+
44
+ o = cls(crawler)
45
+ if o.enabled:
46
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
47
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
48
+ crawler.subscriber.subscribe(o.response_received, event=response_received)
49
+ crawler.subscriber.subscribe(o.request_scheduled, event=request_scheduled)
50
+ return o
51
+
52
+ async def spider_opened(self) -> None:
53
+ """爬虫启动时初始化健康检查"""
54
+ if not self.enabled:
55
+ return
56
+
57
+ self.stats['start_time'] = datetime.now()
58
+ self.task = asyncio.create_task(self._health_check_loop())
59
+ self.logger.info("Health check extension started.")
60
+
61
+ async def spider_closed(self) -> None:
62
+ """爬虫关闭时停止健康检查"""
63
+ if not self.enabled:
64
+ return
65
+
66
+ if self.task:
67
+ self.task.cancel()
68
+ try:
69
+ await self.task
70
+ except asyncio.CancelledError:
71
+ pass
72
+
73
+ # 输出最终健康状态
74
+ await self._check_health()
75
+ self.logger.info("Health check extension stopped.")
76
+
77
+ async def request_scheduled(self, request: Any, spider: Any) -> None:
78
+ """记录调度的请求"""
79
+ if not self.enabled:
80
+ return
81
+ self.stats['total_requests'] += 1
82
+
83
+ async def response_received(self, response: Any, spider: Any) -> None:
84
+ """记录接收到的响应"""
85
+ if not self.enabled:
86
+ return
87
+
88
+ self.stats['total_responses'] += 1
89
+
90
+ # 记录错误响应
91
+ if hasattr(response, 'status_code') and response.status_code >= 400:
92
+ self.stats['error_responses'] += 1
93
+
94
+ async def _health_check_loop(self) -> None:
95
+ """健康检查循环"""
96
+ while True:
97
+ try:
98
+ await asyncio.sleep(self.check_interval)
99
+ await self._check_health()
100
+ except asyncio.CancelledError:
101
+ break
102
+ except Exception as e:
103
+ self.logger.error(f"Error in health check loop: {e}")
104
+
105
+ async def _check_health(self) -> None:
106
+ """执行健康检查并输出报告"""
107
+ try:
108
+ now_time = datetime.now()
109
+ self.stats['last_check_time'] = now_time
110
+
111
+ # 计算基本统计信息
112
+ runtime = (now_time - self.stats['start_time']).total_seconds() if self.stats['start_time'] else 0
113
+ requests_per_second = self.stats['total_requests'] / runtime if runtime > 0 else 0
114
+ responses_per_second = self.stats['total_responses'] / runtime if runtime > 0 else 0
115
+
116
+ # 计算错误率
117
+ error_rate = (
118
+ self.stats['error_responses'] / self.stats['total_responses']
119
+ if self.stats['total_responses'] > 0 else 0
120
+ )
121
+
122
+ # 输出健康报告
123
+ health_report = {
124
+ 'runtime_seconds': round(runtime, 2),
125
+ 'total_requests': self.stats['total_requests'],
126
+ 'total_responses': self.stats['total_responses'],
127
+ 'requests_per_second': round(requests_per_second, 2),
128
+ 'responses_per_second': round(responses_per_second, 2),
129
+ 'error_responses': self.stats['error_responses'],
130
+ 'error_rate': f"{error_rate:.2%}",
131
+ }
132
+
133
+ # 根据错误率判断健康状态
134
+ if error_rate > 0.1: # 错误率超过10%
135
+ self.logger.warning(f"Health check report: {health_report}")
136
+ elif error_rate > 0.05: # 错误率超过5%
137
+ self.logger.info(f"Health check report: {health_report}")
138
+ else:
139
+ self.logger.debug(f"Health check report: {health_report}")
140
+
141
+ except Exception as e:
142
+ self.logger.error(f"Error in health check: {e}")
@@ -1,49 +1,58 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import asyncio
4
-
5
- from crawlo.utils.log import get_logger
6
- from crawlo.event import spider_opened, spider_closed
7
-
8
-
9
- class LogIntervalExtension(object):
10
-
11
- def __init__(self, crawler):
12
- self.task = None
13
- self.stats = crawler.stats
14
- self.item_count = 0
15
- self.response_count = 0
16
- self.seconds = crawler.settings.get('INTERVAL')
17
- self.interval = int(self.seconds / 60) if self.seconds % 60 == 0 else self.seconds
18
- self.interval = "" if self.interval == 1 else self.interval
19
- self.unit = 'min' if self.seconds % 60 == 0 else 's'
20
-
21
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
22
-
23
- @classmethod
24
- def create_instance(cls, crawler):
25
- o = cls(crawler)
26
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
27
- crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
28
- return o
29
-
30
- async def spider_opened(self):
31
- self.task = asyncio.create_task(self.interval_log())
32
- await self.task
33
-
34
- async def spider_closed(self):
35
- if self.task:
36
- self.task.cancel()
37
-
38
- async def interval_log(self):
39
- while True:
40
- last_item_count = self.stats.get_value('item_successful_count', default=0)
41
- last_response_count = self.stats.get_value('response_received_count', default=0)
42
- item_rate = last_item_count - self.item_count
43
- response_rate = last_response_count - self.response_count
44
- self.item_count, self.response_count = last_item_count, last_response_count
45
- self.logger.info(
46
- f'Crawled {last_response_count} pages (at {response_rate} pages/{self.interval}{self.unit}),'
47
- f' Got {last_item_count} items (at {item_rate} items/{self.interval}{self.unit}).'
48
- )
49
- await asyncio.sleep(self.seconds)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import asyncio
4
+ from typing import Any, Optional
5
+
6
+ from crawlo.utils.log import get_logger
7
+ from crawlo.event import spider_opened, spider_closed
8
+
9
+
10
+ class LogIntervalExtension(object):
11
+
12
+ def __init__(self, crawler: Any):
13
+ self.task: Optional[asyncio.Task] = None
14
+ self.stats = crawler.stats
15
+ self.item_count = 0
16
+ self.response_count = 0
17
+ self.seconds = crawler.settings.get('INTERVAL', 60) # 默认60
18
+ self.interval = int(self.seconds / 60) if self.seconds % 60 == 0 else self.seconds
19
+ self.interval = "" if self.interval == 1 else self.interval
20
+ self.unit = 'min' if self.seconds % 60 == 0 else 's'
21
+
22
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
+
24
+ @classmethod
25
+ def create_instance(cls, crawler: Any) -> 'LogIntervalExtension':
26
+ o = cls(crawler)
27
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
28
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
29
+ return o
30
+
31
+ async def spider_opened(self) -> None:
32
+ self.task = asyncio.create_task(self.interval_log())
33
+
34
+ async def spider_closed(self) -> None:
35
+ if self.task:
36
+ self.task.cancel()
37
+ try:
38
+ await self.task
39
+ except asyncio.CancelledError:
40
+ pass
41
+ self.task = None
42
+
43
+ async def interval_log(self) -> None:
44
+ while True:
45
+ try:
46
+ last_item_count = self.stats.get_value('item_successful_count', default=0)
47
+ last_response_count = self.stats.get_value('response_received_count', default=0)
48
+ item_rate = last_item_count - self.item_count
49
+ response_rate = last_response_count - self.response_count
50
+ self.item_count, self.response_count = last_item_count, last_response_count
51
+ self.logger.info(
52
+ f'Crawled {last_response_count} pages (at {response_rate} pages/{self.interval}{self.unit}),'
53
+ f' Got {last_item_count} items (at {item_rate} items/{self.interval}{self.unit}).'
54
+ )
55
+ await asyncio.sleep(self.seconds)
56
+ except Exception as e:
57
+ self.logger.error(f"Error in interval logging: {e}")
58
+ await asyncio.sleep(self.seconds) # 即使出错也继续执行
@@ -1,44 +1,82 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from crawlo import event
4
- from crawlo.utils.date_tools import now, time_diff
5
-
6
-
7
- class LogStats(object):
8
-
9
- def __init__(self, stats):
10
- self._stats = stats
11
-
12
- @classmethod
13
- def create_instance(cls, crawler):
14
- o = cls(crawler.stats)
15
- crawler.subscriber.subscribe(o.spider_opened, event=event.spider_opened)
16
- crawler.subscriber.subscribe(o.spider_closed, event=event.spider_closed)
17
- crawler.subscriber.subscribe(o.item_successful, event=event.item_successful)
18
- crawler.subscriber.subscribe(o.item_discard, event=event.item_discard)
19
- crawler.subscriber.subscribe(o.response_received, event=event.response_received)
20
- crawler.subscriber.subscribe(o.request_scheduled, event=event.request_scheduled)
21
-
22
- return o
23
-
24
- async def spider_opened(self):
25
- self._stats['start_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
26
-
27
- async def spider_closed(self):
28
- self._stats['end_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
29
- self._stats['cost_time(s)'] = time_diff(start=self._stats['start_time'], end=self._stats['end_time'])
30
-
31
- async def item_successful(self, _item, _spider):
32
- self._stats.inc_value('item_successful_count')
33
-
34
- async def item_discard(self, _item, exc, _spider):
35
- self._stats.inc_value('item_discard_count')
36
- reason = exc.msg
37
- if reason:
38
- self._stats.inc_value(f"item_discard/{reason}")
39
-
40
- async def response_received(self, _response, _spider):
41
- self._stats.inc_value('response_received_count')
42
-
43
- async def request_scheduled(self, _request, _spider):
44
- self._stats.inc_value('request_scheduler_count')
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from typing import Any
4
+
5
+ from crawlo import event
6
+ from crawlo.utils.date_tools import now, time_diff
7
+
8
+
9
+ class LogStats(object):
10
+
11
+ def __init__(self, stats: Any):
12
+ self._stats = stats
13
+
14
+ @classmethod
15
+ def create_instance(cls, crawler: Any) -> 'LogStats':
16
+ o = cls(crawler.stats)
17
+ # 订阅所有需要的事件
18
+ event_subscriptions = [
19
+ (o.spider_opened, event.spider_opened),
20
+ (o.spider_closed, event.spider_closed),
21
+ (o.item_successful, event.item_successful),
22
+ (o.item_discard, event.item_discard),
23
+ (o.response_received, event.response_received),
24
+ (o.request_scheduled, event.request_scheduled),
25
+ ]
26
+
27
+ for handler, evt in event_subscriptions:
28
+ try:
29
+ crawler.subscriber.subscribe(handler, event=evt)
30
+ except Exception as e:
31
+ # 获取日志记录器并记录错误
32
+ from crawlo.utils.log import get_logger
33
+ logger = get_logger(cls.__name__)
34
+ logger.error(f"Failed to subscribe to event {evt}: {e}")
35
+
36
+ return o
37
+
38
+ async def spider_opened(self) -> None:
39
+ try:
40
+ self._stats['start_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
41
+ except Exception as e:
42
+ # 静默处理,避免影响爬虫运行
43
+ pass
44
+
45
+ async def spider_closed(self) -> None:
46
+ try:
47
+ self._stats['end_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
48
+ self._stats['cost_time(s)'] = time_diff(start=self._stats['start_time'], end=self._stats['end_time'])
49
+ except Exception as e:
50
+ # 静默处理,避免影响爬虫运行
51
+ pass
52
+
53
+ async def item_successful(self, _item: Any, _spider: Any) -> None:
54
+ try:
55
+ self._stats.inc_value('item_successful_count')
56
+ except Exception as e:
57
+ # 静默处理,避免影响爬虫运行
58
+ pass
59
+
60
+ async def item_discard(self, _item: Any, exc: Any, _spider: Any) -> None:
61
+ try:
62
+ self._stats.inc_value('item_discard_count')
63
+ reason = getattr(exc, 'msg', None) # 更安全地获取属性
64
+ if reason:
65
+ self._stats.inc_value(f"item_discard/{reason}")
66
+ except Exception as e:
67
+ # 静默处理,避免影响爬虫运行
68
+ pass
69
+
70
+ async def response_received(self, _response: Any, _spider: Any) -> None:
71
+ try:
72
+ self._stats.inc_value('response_received_count')
73
+ except Exception as e:
74
+ # 静默处理,避免影响爬虫运行
75
+ pass
76
+
77
+ async def request_scheduled(self, _request: Any, _spider: Any) -> None:
78
+ try:
79
+ self._stats.inc_value('request_scheduler_count')
80
+ except Exception as e:
81
+ # 静默处理,避免影响爬虫运行
82
+ pass