crawlo 1.3.7__py3-none-any.whl → 1.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

crawlo/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = '1.3.7'
1
+ __version__ = '1.3.9'
@@ -39,7 +39,7 @@ except ImportError:
39
39
  from crawlo import Item
40
40
  from crawlo.spider import Spider
41
41
  from crawlo.utils.log import get_logger
42
- from crawlo.exceptions import DropItem
42
+ from crawlo.exceptions import DropItem, ItemDiscard
43
43
 
44
44
 
45
45
  class BloomDedupPipeline:
@@ -101,7 +101,7 @@ class BloomDedupPipeline:
101
101
  # 如果可能已存在(Bloom Filter 可能有误判),丢弃这个数据项
102
102
  self.dropped_count += 1
103
103
  self.logger.debug(f"Possibly dropping duplicate item: {fingerprint[:20]}...")
104
- raise DropItem(f"可能重复的数据项: {fingerprint}")
104
+ raise ItemDiscard(f"可能重复的数据项: {fingerprint}")
105
105
  else:
106
106
  # 添加指纹到 Bloom Filter
107
107
  self.bloom_filter.add(fingerprint)
@@ -15,7 +15,7 @@ import hashlib
15
15
  import aiomysql
16
16
 
17
17
  from crawlo import Item
18
- from crawlo.exceptions import DropItem
18
+ from crawlo.exceptions import DropItem, ItemDiscard
19
19
  from crawlo.spider import Spider
20
20
  from crawlo.utils.log import get_logger
21
21
 
@@ -133,7 +133,7 @@ class DatabaseDedupPipeline:
133
133
  # 如果已存在,丢弃这个数据项
134
134
  self.dropped_count += 1
135
135
  self.logger.debug(f"Dropping duplicate item: {fingerprint[:20]}...")
136
- raise DropItem(f"Duplicate item: {fingerprint}")
136
+ raise ItemDiscard(f"Duplicate item: {fingerprint}")
137
137
  else:
138
138
  # 记录新数据项的指纹
139
139
  await self._insert_fingerprint(fingerprint)
@@ -176,7 +176,7 @@ class DatabaseDedupPipeline:
176
176
  except aiomysql.IntegrityError:
177
177
  # 指纹已存在(并发情况下可能发生)
178
178
  await conn.rollback()
179
- raise DropItem(f"重复的数据项: {fingerprint}")
179
+ raise ItemDiscard(f"重复的数据项: {fingerprint}")
180
180
  except Exception:
181
181
  await conn.rollback()
182
182
  raise
@@ -197,27 +197,4 @@ class DatabaseDedupPipeline:
197
197
  # 兼容没有to_dict方法的Item实现
198
198
  item_dict = dict(item)
199
199
 
200
- # 对字典进行排序以确保一致性
201
- sorted_items = sorted(item_dict.items())
202
-
203
- # 生成指纹字符串
204
- fingerprint_string = '|'.join([f"{k}={v}" for k, v in sorted_items if v is not None])
205
-
206
- # 使用 SHA256 生成固定长度的指纹
207
- return hashlib.sha256(fingerprint_string.encode('utf-8')).hexdigest()
208
-
209
- async def close_spider(self, spider: Spider) -> None:
210
- """
211
- 爬虫关闭时的清理工作
212
-
213
- :param spider: 爬虫实例
214
- """
215
- try:
216
- if self.pool:
217
- self.pool.close()
218
- await self.pool.wait_closed()
219
-
220
- self.logger.info(f"Spider {spider.name} closed:")
221
- self.logger.info(f" - Dropped duplicate items: {self.dropped_count}")
222
- except Exception as e:
223
- self.logger.error(f"Error closing spider: {e}")
200
+ # 对字典进行排序以确保一致性
@@ -16,7 +16,7 @@ import hashlib
16
16
  from typing import Set
17
17
 
18
18
  from crawlo import Item
19
- from crawlo.exceptions import DropItem
19
+ from crawlo.exceptions import DropItem, ItemDiscard
20
20
  from crawlo.spider import Spider
21
21
  from crawlo.utils.log import get_logger
22
22
 
@@ -64,7 +64,7 @@ class MemoryDedupPipeline:
64
64
  # 如果已存在,丢弃这个数据项
65
65
  self.dropped_count += 1
66
66
  self.logger.debug(f"Dropping duplicate item: {fingerprint[:20]}...")
67
- raise DropItem(f"重复的数据项: {fingerprint}")
67
+ raise ItemDiscard(f"重复的数据项: {fingerprint}")
68
68
  else:
69
69
  # 记录新数据项的指纹
70
70
  self.seen_items.add(fingerprint)
@@ -8,7 +8,7 @@ from crawlo.utils.log import get_logger
8
8
  from crawlo.event import item_successful, item_discard
9
9
  from crawlo.utils.class_loader import load_class
10
10
  from crawlo.project import common_call
11
- from crawlo.exceptions import PipelineInitError, ItemDiscard, InvalidOutputError
11
+ from crawlo.exceptions import PipelineInitError, ItemDiscard, InvalidOutputError, DropItem
12
12
 
13
13
 
14
14
  class PipelineManager:
@@ -70,7 +70,7 @@ class PipelineManager:
70
70
  item = await common_call(method, item, self.crawler.spider)
71
71
  if item is None:
72
72
  raise InvalidOutputError(f"{method.__qualname__} return None is not supported.")
73
- except ItemDiscard as exc:
73
+ except (ItemDiscard, DropItem) as exc: # 同时捕获两种异常类型
74
74
  create_task(self.crawler.subscriber.notify(item_discard, item, exc, self.crawler.spider))
75
75
  else:
76
- create_task(self.crawler.subscriber.notify(item_successful, item, self.crawler.spider))
76
+ create_task(self.crawler.subscriber.notify(item_successful, item, self.crawler.spider))
@@ -17,7 +17,7 @@ from typing import Optional
17
17
 
18
18
  from crawlo import Item
19
19
  from crawlo.spider import Spider
20
- from crawlo.exceptions import DropItem
20
+ from crawlo.exceptions import DropItem, ItemDiscard
21
21
  from crawlo.utils.log import get_logger
22
22
 
23
23
 
@@ -105,7 +105,7 @@ class RedisDedupPipeline:
105
105
  # 如果指纹已存在,丢弃这个数据项
106
106
  self.dropped_count += 1
107
107
  # self.logger.debug(f"Dropping duplicate item: {fingerprint[:20]}...") # 注释掉重复的日志
108
- raise DropItem(f"Duplicate item: {fingerprint}")
108
+ raise ItemDiscard(f"Duplicate item: {fingerprint}")
109
109
  else:
110
110
  # 如果是新数据项,继续处理
111
111
  # self.logger.debug(f"Processing new item: {fingerprint[:20]}...") # 注释掉重复的日志
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: crawlo
3
- Version: 1.3.7
3
+ Version: 1.3.9
4
4
  Summary: Crawlo 是一款基于异步IO的高性能Python爬虫框架,支持分布式抓取。
5
5
  Home-page: https://github.com/crawl-coder/Crawlo.git
6
6
  Author: crawl-coder
@@ -1,5 +1,5 @@
1
1
  crawlo/__init__.py,sha256=rCeDq1OoX6mmcBxuK60eUpEp1cIg5T8Zgic3FUQAOkA,2318
2
- crawlo/__version__.py,sha256=ejHyy8zORCf0PfUyyfPDzlV1k5vn5lI98S7TxKzblZc,22
2
+ crawlo/__version__.py,sha256=w1HvwXrREPyUQwcUNaOv25LesFD0cwTBQjuG4ym_vww,22
3
3
  crawlo/cli.py,sha256=OXprmcTUbFK02ptw_Gq8Gk4-ZCU-WEMJgzU1ztgP6Bk,2327
4
4
  crawlo/config.py,sha256=dNoNyTkXLe2msQ7bZx3YTQItk1m49nIg5-g89FQDNwE,9486
5
5
  crawlo/config_validator.py,sha256=gsiLqf5swWd9ISDvoLqCdG7iSXr-ZdBPD4iT6ug1ua4,11239
@@ -78,16 +78,16 @@ crawlo/network/__init__.py,sha256=BLPERYPo22g1BXrW--wUnlolrdFUmOPjgOB8XQQJlck,39
78
78
  crawlo/network/request.py,sha256=9kV-gqb_d6aCsSBAwyzxnP9a70cAViwX8qvpyYV7Ym4,13799
79
79
  crawlo/network/response.py,sha256=EZiG4LjuIb7PxdGou4H-oSOQhec1ZdBRTkO-5fl8JTo,12701
80
80
  crawlo/pipelines/__init__.py,sha256=lrdVDjeHLNkA4_MAwI1auk_I9xfeU1SlBWXiammb6lc,616
81
- crawlo/pipelines/bloom_dedup_pipeline.py,sha256=omB_gHtoacbco0sn_c6HO6PHCh6xylSecK7UbJIeLq8,5661
81
+ crawlo/pipelines/bloom_dedup_pipeline.py,sha256=AIejBxWpRWg2SSYuMPmPy2VrG_FhFmjXYaAcRhuF7h0,5677
82
82
  crawlo/pipelines/console_pipeline.py,sha256=KABkR3J-rqO0Awox7lizxKR2XuHfVhWPiVRgIybwwu4,1248
83
83
  crawlo/pipelines/csv_pipeline.py,sha256=6FBT2AoU6iNU-5NfgWRq7-JpF9dK2nBokjxx-y4jIas,12174
84
- crawlo/pipelines/database_dedup_pipeline.py,sha256=Ao_5jvVPl5QikxXhPeIrcB7_3tinR9bPNRV5Fu5zfDU,7978
84
+ crawlo/pipelines/database_dedup_pipeline.py,sha256=qQ6w0eoGcUB26VO1lgI8GFhVjERG-GZhg5UCTYKyovg,7135
85
85
  crawlo/pipelines/json_pipeline.py,sha256=vlu1nqbD2mtqtExt9cL5nibx1CwJM1RNqd4WGjZRHAY,8367
86
- crawlo/pipelines/memory_dedup_pipeline.py,sha256=oIksbIrmSw9s9jMh6JJMfVbv6hzseVMV_g9S8UHQUP4,3837
86
+ crawlo/pipelines/memory_dedup_pipeline.py,sha256=9KuUA1S0uHWSB3qJntPdg9ifPdRXwc8ju4j9tWe8qTo,3853
87
87
  crawlo/pipelines/mongo_pipeline.py,sha256=k7gNqAO-g2MtIfArphC6z5ZzkKVRkBKcv-2ImziPFA0,5706
88
88
  crawlo/pipelines/mysql_pipeline.py,sha256=_oRfIvlEiOsTKkr4v-yPTcL8nG9O9coRmke2ZSkkKII,13871
89
- crawlo/pipelines/pipeline_manager.py,sha256=rtKZEgDc9oMDYaTrSSQYCc7rVJ-a65TQw4p3dWHF1SM,3116
90
- crawlo/pipelines/redis_dedup_pipeline.py,sha256=POYRiWAOp1pqDW9iTPJ8h3VcpLALeLrpw74MvJJqPiM,6342
89
+ crawlo/pipelines/pipeline_manager.py,sha256=AZPOjm7N1WcjyfIoyZpzVEchmAfZP0uFSZ_WicKL5co,3171
90
+ crawlo/pipelines/redis_dedup_pipeline.py,sha256=2Esl-Yh6nhNzYlUsrpvT0fV8Wx4cNNU9jpwIxqOrgCM,6358
91
91
  crawlo/queue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
92
92
  crawlo/queue/pqueue.py,sha256=j2ISmyays5t1tuI36xM6EcELwSpq2xIjAScSBWSRZms,1220
93
93
  crawlo/queue/queue_manager.py,sha256=JfkjtOD04e_OZZvEEvp3O_W3lfGXhHslZHrCgw90amY,20693
@@ -209,6 +209,7 @@ tests/test_config_validator.py,sha256=5ivB71KstHGNi2BPzcclf9hBukXEgt_B8N4l1HRjBF
209
209
  tests/test_controlled_spider_mixin.py,sha256=7t6VGWr6Hxw0xtIFyToLH8_deSagUtsdqSJpibXHMY8,2785
210
210
  tests/test_crawlo_proxy_integration.py,sha256=SvdBuZjS6N2vuvFkTnc59U5n3dHV3E4dmFayxtmjCm4,2625
211
211
  tests/test_date_tools.py,sha256=CQdAmIS6bpAdwQH9ETDH__06l2gGL7EHUQuh7mdTF-A,3930
212
+ tests/test_dedup_fix.py,sha256=9rFzzsDJKQbFaohzKJAlqo3Mm4wFy8-wAm3fWmw8jb4,8568
212
213
  tests/test_default_header_middleware.py,sha256=7kpONSsGMsmWgTX2pCpseme54_-82Baak0xVz6gclJk,5845
213
214
  tests/test_distributed.py,sha256=RQHUpDfRNG2x_1Cdr9DLk25IBcgapm_u0xSBMObE0Xc,1725
214
215
  tests/test_double_crawlo_fix.py,sha256=E5NxWHnQkwRTIrJGoag8G29fZqVMnsN6eCPuv17gGq0,7652
@@ -285,8 +286,8 @@ tests/verify_distributed.py,sha256=krnYYA5Qx9xXDMWc9YF5DxPSplGvawDg2n0l-3CAqoM,3
285
286
  tests/verify_log_fix.py,sha256=TD7M1R22NxLqQPufvgE-H33u9tUjyz-rSR2ayIXozRU,4225
286
287
  tests/scrapy_comparison/ofweek_scrapy.py,sha256=2Hvpi6DRTubUxBy6RyJApQxMQONPLc1zWjKTQO_i5U4,5652
287
288
  tests/scrapy_comparison/scrapy_test.py,sha256=5sw7jOHhaTmQ8bsUd1TiolAUTRQYQOe-f49HPfysqbI,5466
288
- crawlo-1.3.7.dist-info/METADATA,sha256=rHuHJj9-pUt4TJRUgQcNV48DM6LRk0Ag8Ek5pZZeqLk,33235
289
- crawlo-1.3.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
290
- crawlo-1.3.7.dist-info/entry_points.txt,sha256=5HoVoTSPxI8SCa5B7pQYxLSrkOdiunyO9tqNsLMv52g,43
291
- crawlo-1.3.7.dist-info/top_level.txt,sha256=keG_67pbZ_wZL2dmDRA9RMaNHTaV_x_oxZ9DKNgwvR0,22
292
- crawlo-1.3.7.dist-info/RECORD,,
289
+ crawlo-1.3.9.dist-info/METADATA,sha256=a6FUc4WoaMyek0LyyW55pQIMjeAaeRgxyDnWJXvA62I,33235
290
+ crawlo-1.3.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
291
+ crawlo-1.3.9.dist-info/entry_points.txt,sha256=5HoVoTSPxI8SCa5B7pQYxLSrkOdiunyO9tqNsLMv52g,43
292
+ crawlo-1.3.9.dist-info/top_level.txt,sha256=keG_67pbZ_wZL2dmDRA9RMaNHTaV_x_oxZ9DKNgwvR0,22
293
+ crawlo-1.3.9.dist-info/RECORD,,
@@ -0,0 +1,221 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 去重管道异常处理修复测试
5
+ 验证去重管道抛出的异常能被正确处理,防止重复数据传递到后续管道
6
+ """
7
+ import sys
8
+ import os
9
+ import asyncio
10
+ import unittest
11
+ from unittest.mock import Mock, patch, MagicMock
12
+ from collections import namedtuple
13
+
14
+ # 添加项目根目录到路径
15
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
+
17
+ # 导入相关模块
18
+ from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
19
+ from crawlo.pipelines.memory_dedup_pipeline import MemoryDedupPipeline
20
+ from crawlo.pipelines.bloom_dedup_pipeline import BloomDedupPipeline
21
+ from crawlo.pipelines.pipeline_manager import PipelineManager
22
+ from crawlo.exceptions import ItemDiscard, DropItem
23
+
24
+
25
+ class TestDedupFix(unittest.TestCase):
26
+ """去重管道异常处理修复测试"""
27
+
28
+ def setUp(self):
29
+ """测试初始化"""
30
+ # 创建模拟的crawler对象
31
+ self.mock_crawler = Mock()
32
+ self.mock_crawler.settings = Mock()
33
+ self.mock_crawler.settings.get = Mock(return_value="INFO")
34
+ self.mock_crawler.settings.get_int = Mock(return_value=0)
35
+ self.mock_crawler.settings.get_bool = Mock(return_value=False)
36
+ self.mock_crawler.subscriber = Mock()
37
+ self.mock_crawler.subscriber.subscribe = Mock()
38
+
39
+ # 创建简单的测试数据项(使用namedtuple模拟Item)
40
+ self.TestItem = namedtuple('TestItem', ['title', 'url', 'content'])
41
+ self.test_item = self.TestItem(
42
+ title="Test Title",
43
+ url="http://example.com",
44
+ content="Test content"
45
+ )
46
+
47
+ def test_redis_dedup_pipeline_exception_type(self):
48
+ """测试Redis去重管道抛出正确的异常类型"""
49
+ # 创建Redis去重管道实例
50
+ with patch('redis.Redis') as mock_redis:
51
+ mock_redis_instance = Mock()
52
+ mock_redis_instance.sadd = Mock(return_value=0) # 模拟已存在的指纹
53
+ mock_redis.return_value = mock_redis_instance
54
+
55
+ pipeline = RedisDedupPipeline(
56
+ redis_host='localhost',
57
+ redis_port=6379,
58
+ redis_db=0,
59
+ redis_password=None,
60
+ redis_key='test:key',
61
+ log_level='INFO'
62
+ )
63
+
64
+ # 验证抛出的是ItemDiscard异常而不是DropItem异常
65
+ with self.assertRaises(ItemDiscard) as context:
66
+ pipeline.process_item(self.test_item, Mock())
67
+
68
+ # 验证异常消息
69
+ self.assertIn("Duplicate item:", str(context.exception))
70
+
71
+ def test_memory_dedup_pipeline_exception_type(self):
72
+ """测试内存去重管道抛出正确的异常类型"""
73
+ # 创建内存去重管道实例
74
+ pipeline = MemoryDedupPipeline(log_level='INFO')
75
+
76
+ # 添加一个指纹到已见过的集合中
77
+ fingerprint = pipeline._generate_item_fingerprint(self.test_item)
78
+ pipeline.seen_items.add(fingerprint)
79
+
80
+ # 验证抛出的是ItemDiscard异常而不是DropItem异常
81
+ with self.assertRaises(ItemDiscard) as context:
82
+ pipeline.process_item(self.test_item, Mock())
83
+
84
+ # 验证异常消息
85
+ self.assertIn("重复的数据项:", str(context.exception))
86
+
87
+ def test_bloom_dedup_pipeline_exception_type(self):
88
+ """测试Bloom去重管道抛出正确的异常类型"""
89
+ # 创建Bloom去重管道实例
90
+ pipeline = BloomDedupPipeline(log_level='INFO')
91
+
92
+ # 添加一个指纹到Bloom过滤器中
93
+ fingerprint = pipeline._generate_item_fingerprint(self.test_item)
94
+ pipeline.bloom_filter.add(fingerprint)
95
+
96
+ # 验证抛出的是ItemDiscard异常而不是DropItem异常
97
+ with self.assertRaises(ItemDiscard) as context:
98
+ pipeline.process_item(self.test_item, Mock())
99
+
100
+ # 验证异常消息
101
+ self.assertIn("可能重复的数据项:", str(context.exception))
102
+
103
+ async def test_pipeline_manager_exception_handling(self):
104
+ """测试管道管理器能正确处理两种异常类型"""
105
+ # 创建管道管理器实例
106
+ pipeline_manager = PipelineManager(self.mock_crawler)
107
+
108
+ # 创建测试数据项
109
+ test_item = self.TestItem(
110
+ title="Test Title",
111
+ url="http://example.com",
112
+ content="Test content"
113
+ )
114
+
115
+ # 模拟管道方法列表
116
+ pipeline_manager.methods = []
117
+
118
+ # 创建模拟的去重管道方法(抛出ItemDiscard异常)
119
+ mock_dedup_method = Mock()
120
+ mock_dedup_method.side_effect = ItemDiscard("测试ItemDiscard异常")
121
+
122
+ # 创建模拟的MySQL管道方法
123
+ mock_mysql_method = Mock()
124
+ mock_mysql_method.return_value = test_item
125
+
126
+ pipeline_manager.methods = [mock_dedup_method, mock_mysql_method]
127
+
128
+ # 测试处理数据项
129
+ with patch('crawlo.pipelines.pipeline_manager.common_call') as mock_common_call, \
130
+ patch('crawlo.pipelines.pipeline_manager.create_task') as mock_create_task:
131
+
132
+ # 设置common_call的副作用来模拟异常
133
+ async def mock_common_call_func(method, *args, **kwargs):
134
+ if method == mock_dedup_method:
135
+ raise ItemDiscard("测试ItemDiscard异常")
136
+ return test_item
137
+
138
+ mock_common_call.side_effect = mock_common_call_func
139
+
140
+ # 调用处理方法
141
+ await pipeline_manager.process_item(test_item)
142
+
143
+ # 验证ItemDiscard异常被正确处理
144
+ # 验证create_task被调用了一次(item_discard事件)
145
+ self.assertEqual(mock_create_task.call_count, 1)
146
+
147
+ # 验证MySQL管道方法没有被调用
148
+ mock_mysql_method.assert_not_called()
149
+
150
+ async def test_pipeline_manager_dropitem_exception_handling(self):
151
+ """测试管道管理器能正确处理DropItem异常"""
152
+ # 创建管道管理器实例
153
+ pipeline_manager = PipelineManager(self.mock_crawler)
154
+
155
+ # 创建测试数据项
156
+ test_item = self.TestItem(
157
+ title="Test Title",
158
+ url="http://example.com",
159
+ content="Test content"
160
+ )
161
+
162
+ # 模拟管道方法列表
163
+ pipeline_manager.methods = []
164
+
165
+ # 创建模拟的去重管道方法(抛出DropItem异常)
166
+ mock_dedup_method = Mock()
167
+ mock_dedup_method.side_effect = DropItem("测试DropItem异常")
168
+
169
+ # 创建模拟的MySQL管道方法
170
+ mock_mysql_method = Mock()
171
+ mock_mysql_method.return_value = test_item
172
+
173
+ pipeline_manager.methods = [mock_dedup_method, mock_mysql_method]
174
+
175
+ # 测试处理数据项
176
+ with patch('crawlo.pipelines.pipeline_manager.common_call') as mock_common_call, \
177
+ patch('crawlo.pipelines.pipeline_manager.create_task') as mock_create_task:
178
+
179
+ # 设置common_call的副作用来模拟异常
180
+ async def mock_common_call_func(method, *args, **kwargs):
181
+ if method == mock_dedup_method:
182
+ raise DropItem("测试DropItem异常")
183
+ return test_item
184
+
185
+ mock_common_call.side_effect = mock_common_call_func
186
+
187
+ # 调用处理方法
188
+ await pipeline_manager.process_item(test_item)
189
+
190
+ # 验证DropItem异常被正确处理
191
+ # 验证create_task被调用了一次(item_discard事件)
192
+ self.assertEqual(mock_create_task.call_count, 1)
193
+
194
+ # 验证MySQL管道方法没有被调用
195
+ mock_mysql_method.assert_not_called()
196
+
197
+
198
+ async def main():
199
+ """主测试函数"""
200
+ print("开始去重管道异常处理修复测试...")
201
+ print("=" * 50)
202
+
203
+ # 创建测试套件
204
+ test_suite = unittest.TestLoader().loadTestsFromTestCase(TestDedupFix)
205
+
206
+ # 运行测试
207
+ runner = unittest.TextTestRunner(verbosity=2)
208
+ result = runner.run(test_suite)
209
+
210
+ print("=" * 50)
211
+ if result.wasSuccessful():
212
+ print("所有测试通过!去重管道异常处理修复验证成功")
213
+ return 0
214
+ else:
215
+ print("部分测试失败,请检查实现")
216
+ return 1
217
+
218
+
219
+ if __name__ == "__main__":
220
+ exit_code = asyncio.run(main())
221
+ sys.exit(exit_code)
File without changes