crawlo 1.0.4__py3-none-any.whl → 1.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (112) hide show
  1. crawlo/__init__.py +25 -9
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +41 -0
  4. crawlo/commands/__init__.py +10 -0
  5. crawlo/commands/genspider.py +111 -0
  6. crawlo/commands/run.py +149 -0
  7. crawlo/commands/startproject.py +101 -0
  8. crawlo/core/__init__.py +2 -2
  9. crawlo/core/engine.py +158 -158
  10. crawlo/core/processor.py +40 -40
  11. crawlo/core/scheduler.py +57 -57
  12. crawlo/crawler.py +219 -242
  13. crawlo/downloader/__init__.py +78 -78
  14. crawlo/downloader/aiohttp_downloader.py +200 -259
  15. crawlo/downloader/cffi_downloader.py +277 -0
  16. crawlo/downloader/httpx_downloader.py +246 -187
  17. crawlo/event.py +11 -11
  18. crawlo/exceptions.py +78 -64
  19. crawlo/extension/__init__.py +31 -31
  20. crawlo/extension/log_interval.py +49 -49
  21. crawlo/extension/log_stats.py +44 -44
  22. crawlo/extension/logging_extension.py +35 -0
  23. crawlo/filters/__init__.py +37 -37
  24. crawlo/filters/aioredis_filter.py +150 -150
  25. crawlo/filters/memory_filter.py +202 -202
  26. crawlo/items/__init__.py +22 -62
  27. crawlo/items/base.py +31 -0
  28. crawlo/items/fields.py +54 -0
  29. crawlo/items/items.py +105 -119
  30. crawlo/middleware/__init__.py +21 -21
  31. crawlo/middleware/default_header.py +32 -32
  32. crawlo/middleware/download_delay.py +28 -28
  33. crawlo/middleware/middleware_manager.py +135 -140
  34. crawlo/middleware/proxy.py +246 -0
  35. crawlo/middleware/request_ignore.py +30 -30
  36. crawlo/middleware/response_code.py +18 -18
  37. crawlo/middleware/response_filter.py +26 -26
  38. crawlo/middleware/retry.py +90 -90
  39. crawlo/network/__init__.py +7 -7
  40. crawlo/network/request.py +203 -204
  41. crawlo/network/response.py +166 -166
  42. crawlo/pipelines/__init__.py +13 -13
  43. crawlo/pipelines/console_pipeline.py +39 -39
  44. crawlo/pipelines/mongo_pipeline.py +116 -116
  45. crawlo/pipelines/mysql_batch_pipline.py +273 -134
  46. crawlo/pipelines/mysql_pipeline.py +195 -195
  47. crawlo/pipelines/pipeline_manager.py +56 -56
  48. crawlo/settings/__init__.py +7 -7
  49. crawlo/settings/default_settings.py +169 -94
  50. crawlo/settings/setting_manager.py +99 -99
  51. crawlo/spider/__init__.py +41 -36
  52. crawlo/stats_collector.py +59 -59
  53. crawlo/subscriber.py +106 -106
  54. crawlo/task_manager.py +27 -27
  55. crawlo/templates/crawlo.cfg.tmpl +11 -0
  56. crawlo/templates/project/__init__.py.tmpl +4 -0
  57. crawlo/templates/project/items.py.tmpl +18 -0
  58. crawlo/templates/project/middlewares.py.tmpl +76 -0
  59. crawlo/templates/project/pipelines.py.tmpl +64 -0
  60. crawlo/templates/project/settings.py.tmpl +54 -0
  61. crawlo/templates/project/spiders/__init__.py.tmpl +6 -0
  62. crawlo/templates/spider/spider.py.tmpl +32 -0
  63. crawlo/utils/__init__.py +7 -7
  64. crawlo/utils/concurrency_manager.py +124 -124
  65. crawlo/utils/date_tools.py +233 -177
  66. crawlo/utils/db_helper.py +344 -0
  67. crawlo/utils/func_tools.py +82 -82
  68. crawlo/utils/log.py +129 -39
  69. crawlo/utils/pqueue.py +173 -173
  70. crawlo/utils/project.py +199 -59
  71. crawlo/utils/request.py +267 -122
  72. crawlo/utils/spider_loader.py +63 -0
  73. crawlo/utils/system.py +11 -11
  74. crawlo/utils/tools.py +5 -303
  75. crawlo/utils/url.py +39 -39
  76. {crawlo-1.0.4.dist-info → crawlo-1.0.6.dist-info}/METADATA +49 -48
  77. crawlo-1.0.6.dist-info/RECORD +94 -0
  78. crawlo-1.0.6.dist-info/entry_points.txt +2 -0
  79. {crawlo-1.0.4.dist-info → crawlo-1.0.6.dist-info}/top_level.txt +1 -0
  80. examples/gxb/items.py +36 -0
  81. examples/gxb/run.py +16 -0
  82. examples/gxb/settings.py +72 -0
  83. examples/gxb/spider/__init__.py +0 -0
  84. examples/gxb/spider/miit_spider.py +180 -0
  85. examples/gxb/spider/telecom_device.py +129 -0
  86. tests/__init__.py +7 -7
  87. tests/test_proxy_health_check.py +33 -0
  88. tests/test_proxy_middleware_integration.py +137 -0
  89. tests/test_proxy_providers.py +57 -0
  90. tests/test_proxy_stats.py +20 -0
  91. tests/test_proxy_strategies.py +60 -0
  92. crawlo/downloader/playwright_downloader.py +0 -161
  93. crawlo/templates/item_template.tmpl +0 -22
  94. crawlo/templates/project_template/main.py +0 -33
  95. crawlo/templates/project_template/setting.py +0 -190
  96. crawlo/templates/spider_template.tmpl +0 -31
  97. crawlo-1.0.4.dist-info/RECORD +0 -79
  98. crawlo-1.0.4.dist-info/entry_points.txt +0 -2
  99. tests/baidu_spider/__init__.py +0 -7
  100. tests/baidu_spider/demo.py +0 -94
  101. tests/baidu_spider/items.py +0 -25
  102. tests/baidu_spider/middleware.py +0 -49
  103. tests/baidu_spider/pipeline.py +0 -55
  104. tests/baidu_spider/request_fingerprints.txt +0 -9
  105. tests/baidu_spider/run.py +0 -27
  106. tests/baidu_spider/settings.py +0 -80
  107. tests/baidu_spider/spiders/__init__.py +0 -7
  108. tests/baidu_spider/spiders/bai_du.py +0 -61
  109. tests/baidu_spider/spiders/sina.py +0 -79
  110. {crawlo-1.0.4.dist-info → crawlo-1.0.6.dist-info}/WHEEL +0 -0
  111. {crawlo/templates/project_template/items → examples}/__init__.py +0 -0
  112. {crawlo/templates/project_template/spiders → examples/gxb}/__init__.py +0 -0
@@ -1,259 +1,200 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- from typing import Optional
4
- from aiohttp import (
5
- ClientSession,
6
- TCPConnector,
7
- ClientTimeout,
8
- TraceConfig,
9
- ClientResponse, ClientError,
10
- )
11
-
12
- from crawlo import Response
13
- from crawlo.downloader import DownloaderBase
14
-
15
-
16
- class AioHttpDownloader(DownloaderBase):
17
- """
18
- 高性能异步下载器
19
- - 基于持久化 ClientSession
20
- - 智能识别 Request 的高层语义(json_body/form_data)
21
- - 支持 GET/POST/PUT/DELETE 等方法
22
- - 内存安全防护
23
- """
24
-
25
- def __init__(self, crawler):
26
- super().__init__(crawler)
27
- self.session: Optional[ClientSession] = None
28
- self.max_download_size: int = 0
29
-
30
- def open(self):
31
- super().open()
32
- self.logger.info("Opening AioHttpDownloader")
33
-
34
- # 读取配置
35
- timeout_secs = self.crawler.settings.get_int("DOWNLOAD_TIMEOUT", 30)
36
- verify_ssl = self.crawler.settings.get_bool("VERIFY_SSL", True)
37
- pool_limit = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT", 100)
38
- pool_per_host = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT_PER_HOST", 20)
39
- self.max_download_size = self.crawler.settings.get_int("DOWNLOAD_MAXSIZE", 10 * 1024 * 1024) # 10MB
40
-
41
- # 创建连接器
42
- connector = TCPConnector(
43
- verify_ssl=verify_ssl,
44
- limit=pool_limit,
45
- limit_per_host=pool_per_host,
46
- ttl_dns_cache=300,
47
- keepalive_timeout=15,
48
- force_close=False,
49
- )
50
-
51
- # 超时控制
52
- timeout = ClientTimeout(total=timeout_secs)
53
-
54
- # 请求追踪
55
- trace_config = TraceConfig()
56
- trace_config.on_request_start.append(self._on_request_start)
57
- trace_config.on_request_end.append(self._on_request_end)
58
- trace_config.on_request_exception.append(self._on_request_exception)
59
-
60
- # 创建全局 session
61
- self.session = ClientSession(
62
- connector=connector,
63
- timeout=timeout,
64
- trace_configs=[trace_config],
65
- auto_decompress=True,
66
- )
67
-
68
- self.logger.debug("AioHttpDownloader initialized.")
69
-
70
- async def download(self, request) -> Optional[Response]:
71
- if not self.session or self.session.closed:
72
- raise RuntimeError("AioHttpDownloader session is not open.")
73
-
74
- try:
75
- # 使用通用发送逻辑(支持所有 HTTP 方法)
76
- async with await self._send_request(self.session, request) as resp:
77
- # 安全检查:防止大响应体导致 OOM
78
- content_length = resp.headers.get("Content-Length")
79
- if content_length and int(content_length) > self.max_download_size:
80
- raise OverflowError(f"Response too large: {content_length} > {self.max_download_size}")
81
-
82
- body = await resp.read()
83
- return self._structure_response(request, resp, body)
84
-
85
- except ClientError as e:
86
- self.logger.error(f"Client error for {request.url}: {e}")
87
- raise
88
- except Exception as e:
89
- self.logger.critical(f"Unexpected error for {request.url}: {e}", exc_info=True)
90
- raise
91
-
92
- @staticmethod
93
- async def _send_request(session: ClientSession, request) -> ClientResponse:
94
- """
95
- 根据请求方法和高层语义智能发送请求。
96
- 利用 aiohttp 内建方法(.get/.post 等),避免重复代码。
97
- """
98
- method = request.method.lower()
99
- if not hasattr(session, method):
100
- raise ValueError(f"Unsupported HTTP method: {request.method}")
101
-
102
- method_func = getattr(session, method)
103
-
104
- # 构造参数
105
- kwargs = {
106
- "headers": request.headers,
107
- "cookies": request.cookies,
108
- "proxy": request.proxy,
109
- "allow_redirects": request.allow_redirects,
110
- }
111
-
112
- # 关键优化:如果原始请求使用了 json_body,则使用 json= 参数
113
- if hasattr(request, "_json_body") and request._json_body is not None:
114
- kwargs["json"] = request._json_body # 让 aiohttp 自动处理序列化 + Content-Type
115
- elif isinstance(request.body, (dict, list)):
116
- # 兼容直接传 body=dict 的旧写法
117
- kwargs["json"] = request.body
118
- else:
119
- # 其他情况(表单、bytes、str)走 data=
120
- if request.body is not None:
121
- kwargs["data"] = request.body
122
-
123
- return await method_func(request.url, **kwargs)
124
-
125
- @staticmethod
126
- def _structure_response(request, resp: ClientResponse, body: bytes) -> Response:
127
- """构造框架所需的 Response 对象"""
128
- return Response(
129
- url=str(resp.url),
130
- headers=dict(resp.headers),
131
- status_code=resp.status,
132
- body=body,
133
- request=request,
134
- )
135
-
136
- # --- 请求追踪日志 ---
137
- async def _on_request_start(self, session, trace_config_ctx, params):
138
- """请求开始时的回调。"""
139
- self.logger.debug(f"Requesting: {params.method} {params.url}")
140
-
141
- async def _on_request_end(self, session, trace_config_ctx, params):
142
- """请求成功结束时的回调。"""
143
- # 正确方式:直接从 params 获取响应对象
144
- response = params.response
145
- self.logger.debug(
146
- f"Finished: {params.method} {params.url} with status {response.status}"
147
- )
148
-
149
- async def _on_request_exception(self, session, trace_config_ctx, params):
150
- """请求发生异常时的回调。"""
151
- # 正确方式:通过 .exception 属性获取异常
152
- exc = trace_config_ctx.exception
153
- self.logger.warning(
154
- f"Failed: {params.method} {params.url} with exception {type(exc).__name__}: {exc}"
155
- )
156
-
157
- async def close(self) -> None:
158
- """关闭会话资源"""
159
- if self.session and not self.session.closed:
160
- self.logger.info("Closing AioHttpDownloader session...")
161
- await self.session.close()
162
- self.logger.debug("AioHttpDownloader closed.")
163
-
164
- # #!/usr/bin/python
165
- # # -*- coding:UTF-8 -*-
166
- # from typing import Optional
167
- # from aiohttp import ClientSession, TCPConnector, BaseConnector, ClientTimeout, ClientResponse, TraceConfig
168
- #
169
- # from crawlo import Response
170
- # from crawlo.downloader import DownloaderBase
171
- #
172
- #
173
- # class AioHttpDownloader(DownloaderBase):
174
- # def __init__(self, crawler):
175
- # super().__init__(crawler)
176
- # self.session: Optional[ClientSession] = None
177
- # self.connector: Optional[BaseConnector] = None
178
- # self._verify_ssl: Optional[bool] = None
179
- # self._timeout: Optional[ClientTimeout] = None
180
- # self._use_session: Optional[bool] = None
181
- # self.trace_config: Optional[TraceConfig] = None
182
- #
183
- # self.request_method = {
184
- # "get": self._get,
185
- # "post": self._post
186
- # }
187
- #
188
- # def open(self):
189
- # super().open()
190
- # self._timeout = ClientTimeout(total=self.crawler.settings.get_int("DOWNLOAD_TIMEOUT"))
191
- # self._verify_ssl = self.crawler.settings.get_bool("VERIFY_SSL")
192
- # self._use_session = self.crawler.settings.get_bool("USE_SESSION")
193
- # self.trace_config = TraceConfig()
194
- # self.trace_config.on_request_start.append(self.request_start)
195
- # if self._use_session:
196
- # self.connector = TCPConnector(verify_ssl=self._verify_ssl)
197
- # self.session = ClientSession(
198
- # connector=self.connector, timeout=self._timeout, trace_configs=[self.trace_config]
199
- # )
200
- #
201
- # async def download(self, request) -> Optional[Response]:
202
- # try:
203
- # if self._use_session:
204
- # response = await self.send_request(self.session, request)
205
- # body = await response.content.read()
206
- # else:
207
- # connector = TCPConnector(verify_ssl=self._verify_ssl)
208
- # async with ClientSession(
209
- # connector=connector, timeout=self._timeout, trace_configs=[self.trace_config]
210
- # ) as session:
211
- # response = await self.send_request(session, request)
212
- # body = await response.content.read()
213
- # except Exception as exp:
214
- # self.logger.error(f"Error downloading {request}: {exp}")
215
- # raise exp
216
- #
217
- # return self.structure_response(request=request, response=response, body=body)
218
- #
219
- # @staticmethod
220
- # def structure_response(request, response, body):
221
- # return Response(
222
- # url=response.url,
223
- # headers=dict(response.headers),
224
- # status_code=response.status,
225
- # body=body,
226
- # request=request
227
- # )
228
- #
229
- # async def send_request(self, session, request) -> ClientResponse:
230
- # return await self.request_method[request.method.lower()](session, request)
231
- #
232
- # @staticmethod
233
- # async def _get(session, request) -> ClientResponse:
234
- # response = await session.get(
235
- # request.url,
236
- # headers=request.headers,
237
- # cookies=request.cookies
238
- # )
239
- # return response
240
- #
241
- # @staticmethod
242
- # async def _post(session, request) -> ClientResponse:
243
- # response = await session.post(
244
- # request.url,
245
- # data=request.body,
246
- # headers=request.headers,
247
- # cookies=request.cookies,
248
- # proxy=request.proxy,
249
- # )
250
- # return response
251
- #
252
- # async def request_start(self, _session, _trace_config_ctx, params):
253
- # self.logger.debug(f"Request start: {params.url}, method:{params.method}")
254
- #
255
- # async def close(self) -> None:
256
- # if self.connector:
257
- # await self.connector.close()
258
- # if self.session:
259
- # await self.session.close()
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ from yarl import URL
4
+ from typing import Optional
5
+ from aiohttp import (
6
+ ClientSession,
7
+ TCPConnector,
8
+ ClientTimeout,
9
+ TraceConfig,
10
+ ClientResponse,
11
+ ClientError,
12
+ BasicAuth,
13
+ )
14
+
15
+ from crawlo import Response
16
+ from crawlo.utils.log import get_logger
17
+ from crawlo.downloader import DownloaderBase
18
+
19
+
20
+ class AioHttpDownloader(DownloaderBase):
21
+ """
22
+ 高性能异步下载器
23
+ - 基于持久化 ClientSession
24
+ - 智能识别 Request 的高层语义(json_body/form_data)
25
+ - 支持 GET/POST/PUT/DELETE 等方法
26
+ - 支持中间件设置的 IP 代理(HTTP/HTTPS)
27
+ - 内存安全防护
28
+ """
29
+
30
+ def __init__(self, crawler):
31
+ super().__init__(crawler)
32
+ self.session: Optional[ClientSession] = None
33
+ self.max_download_size: int = 0
34
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get("LOG_LEVEL"))
35
+
36
+ def open(self):
37
+ super().open()
38
+ self.logger.info("Opening AioHttpDownloader")
39
+
40
+ # 读取配置
41
+ timeout_secs = self.crawler.settings.get_int("DOWNLOAD_TIMEOUT", 30)
42
+ verify_ssl = self.crawler.settings.get_bool("VERIFY_SSL", True)
43
+ pool_limit = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT", 100)
44
+ pool_per_host = self.crawler.settings.get_int("CONNECTION_POOL_LIMIT_PER_HOST", 20)
45
+ self.max_download_size = self.crawler.settings.get_int("DOWNLOAD_MAXSIZE", 10 * 1024 * 1024) # 10MB
46
+
47
+ # 创建连接器
48
+ connector = TCPConnector(
49
+ verify_ssl=verify_ssl,
50
+ limit=pool_limit,
51
+ limit_per_host=pool_per_host,
52
+ ttl_dns_cache=300,
53
+ keepalive_timeout=15,
54
+ force_close=False,
55
+ )
56
+
57
+ # 超时控制
58
+ timeout = ClientTimeout(total=timeout_secs)
59
+
60
+ # 请求追踪
61
+ trace_config = TraceConfig()
62
+ trace_config.on_request_start.append(self._on_request_start)
63
+ trace_config.on_request_end.append(self._on_request_end)
64
+ trace_config.on_request_exception.append(self._on_request_exception)
65
+
66
+ # 创建全局 session
67
+ self.session = ClientSession(
68
+ connector=connector,
69
+ timeout=timeout,
70
+ trace_configs=[trace_config],
71
+ auto_decompress=True,
72
+ )
73
+
74
+ self.logger.debug("AioHttpDownloader initialized.")
75
+
76
+ async def download(self, request) -> Optional[Response]:
77
+ if not self.session or self.session.closed:
78
+ raise RuntimeError("AioHttpDownloader session is not open.")
79
+
80
+ try:
81
+ # 使用通用发送逻辑(支持所有 HTTP 方法)
82
+ async with await self._send_request(self.session, request) as resp:
83
+ # 安全检查:防止大响应体导致 OOM
84
+ content_length = resp.headers.get("Content-Length")
85
+ if content_length and int(content_length) > self.max_download_size:
86
+ raise OverflowError(f"Response too large: {content_length} > {self.max_download_size}")
87
+
88
+ body = await resp.read()
89
+ return self._structure_response(request, resp, body)
90
+
91
+ except ClientError as e:
92
+ self.logger.error(f"Client error for {request.url}: {e}")
93
+ raise
94
+ except Exception as e:
95
+ self.logger.critical(f"Unexpected error for {request.url}: {e}", exc_info=True)
96
+ raise
97
+
98
+ @staticmethod
99
+ async def _send_request(session: ClientSession, request) -> ClientResponse:
100
+ """
101
+ 根据请求方法和高层语义智能发送请求。
102
+ 支持中间件设置的 proxy,兼容以下格式:
103
+ - str: "http://user:pass@host:port"
104
+ - dict: {"http": "...", "https": "..."} (自动取 http 或 https 字段)
105
+ """
106
+ method = request.method.lower()
107
+ if not hasattr(session, method):
108
+ raise ValueError(f"Unsupported HTTP method: {request.method}")
109
+
110
+ method_func = getattr(session, method)
111
+
112
+ # 构造参数
113
+ kwargs = {
114
+ "headers": request.headers,
115
+ "cookies": request.cookies,
116
+ "allow_redirects": request.allow_redirects,
117
+ }
118
+
119
+ # === 处理代理(proxy)===
120
+ proxy = getattr(request, "proxy", None)
121
+ proxy_auth = None
122
+
123
+ if proxy:
124
+ # 兼容字典格式:{"http": "http://...", "https": "http://..."}
125
+ if isinstance(proxy, dict):
126
+ # 优先使用 https,否则用 http
127
+ proxy = proxy.get("https") or proxy.get("http")
128
+
129
+ if not isinstance(proxy, (str, URL)):
130
+ raise ValueError(f"proxy must be str or URL, got {type(proxy)}")
131
+
132
+ try:
133
+ proxy_url = URL(proxy)
134
+ if proxy_url.scheme not in ("http", "https"):
135
+ raise ValueError(f"Unsupported proxy scheme: {proxy_url.scheme}, only HTTP/HTTPS supported.")
136
+
137
+ # 提取认证信息
138
+ if proxy_url.user and proxy_url.password:
139
+ proxy_auth = BasicAuth(proxy_url.user, proxy_url.password)
140
+ # 去掉用户密码的 URL
141
+ proxy = str(proxy_url.with_user(None))
142
+ else:
143
+ proxy = str(proxy_url)
144
+
145
+ kwargs["proxy"] = proxy
146
+ if proxy_auth:
147
+ kwargs["proxy_auth"] = proxy_auth
148
+
149
+ except Exception as e:
150
+ raise ValueError(f"Invalid proxy URL: {proxy}") from e
151
+
152
+ # === 处理请求体 ===
153
+ if hasattr(request, "_json_body") and request._json_body is not None:
154
+ kwargs["json"] = request._json_body
155
+ elif isinstance(request.body, (dict, list)):
156
+ kwargs["json"] = request.body
157
+ else:
158
+ if request.body is not None:
159
+ kwargs["data"] = request.body
160
+
161
+ return await method_func(request.url, **kwargs)
162
+
163
+ @staticmethod
164
+ def _structure_response(request, resp: ClientResponse, body: bytes) -> Response:
165
+ """构造框架所需的 Response 对象"""
166
+ return Response(
167
+ url=str(resp.url),
168
+ headers=dict(resp.headers),
169
+ status_code=resp.status,
170
+ body=body,
171
+ request=request,
172
+ )
173
+
174
+ # --- 请求追踪日志 ---
175
+ async def _on_request_start(self, session, trace_config_ctx, params):
176
+ """请求开始时的回调。"""
177
+ proxy = getattr(params, "proxy", None)
178
+ proxy_info = f" via {proxy}" if proxy else ""
179
+ self.logger.debug(f"Requesting: {params.method} {params.url}{proxy_info}")
180
+
181
+ async def _on_request_end(self, session, trace_config_ctx, params):
182
+ """请求成功结束时的回调。"""
183
+ response = params.response
184
+ self.logger.debug(
185
+ f"Finished: {params.method} {params.url} with status {response.status}"
186
+ )
187
+
188
+ async def _on_request_exception(self, session, trace_config_ctx, params):
189
+ """请求发生异常时的回调。"""
190
+ exc = params.exception
191
+ self.logger.warning(
192
+ f"Failed: {params.method} {params.url} with exception {type(exc).__name__}: {exc}"
193
+ )
194
+
195
+ async def close(self) -> None:
196
+ """关闭会话资源"""
197
+ if self.session and not self.session.closed:
198
+ self.logger.info("Closing AioHttpDownloader session...")
199
+ await self.session.close()
200
+ self.logger.debug("AioHttpDownloader closed.")