aio-scrapy 2.1.4__py3-none-any.whl → 2.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.7.dist-info}/LICENSE +1 -1
  2. aio_scrapy-2.1.7.dist-info/METADATA +147 -0
  3. aio_scrapy-2.1.7.dist-info/RECORD +134 -0
  4. {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.7.dist-info}/WHEEL +1 -1
  5. aioscrapy/VERSION +1 -1
  6. aioscrapy/cmdline.py +438 -5
  7. aioscrapy/core/downloader/__init__.py +522 -17
  8. aioscrapy/core/downloader/handlers/__init__.py +187 -5
  9. aioscrapy/core/downloader/handlers/aiohttp.py +190 -6
  10. aioscrapy/core/downloader/handlers/curl_cffi.py +126 -5
  11. aioscrapy/core/downloader/handlers/httpx.py +135 -5
  12. aioscrapy/core/downloader/handlers/pyhttpx.py +137 -5
  13. aioscrapy/core/downloader/handlers/requests.py +120 -2
  14. aioscrapy/core/downloader/handlers/webdriver/__init__.py +2 -0
  15. aioscrapy/core/downloader/handlers/webdriver/drissionpage.py +493 -0
  16. aioscrapy/core/downloader/handlers/webdriver/driverpool.py +234 -0
  17. aioscrapy/core/downloader/handlers/webdriver/playwright.py +498 -0
  18. aioscrapy/core/engine.py +381 -20
  19. aioscrapy/core/scheduler.py +350 -36
  20. aioscrapy/core/scraper.py +509 -33
  21. aioscrapy/crawler.py +392 -10
  22. aioscrapy/db/__init__.py +149 -0
  23. aioscrapy/db/absmanager.py +212 -6
  24. aioscrapy/db/aiomongo.py +292 -10
  25. aioscrapy/db/aiomysql.py +363 -10
  26. aioscrapy/db/aiopg.py +299 -2
  27. aioscrapy/db/aiorabbitmq.py +444 -4
  28. aioscrapy/db/aioredis.py +260 -11
  29. aioscrapy/dupefilters/__init__.py +110 -5
  30. aioscrapy/dupefilters/disk.py +124 -2
  31. aioscrapy/dupefilters/redis.py +598 -32
  32. aioscrapy/exceptions.py +151 -13
  33. aioscrapy/http/__init__.py +1 -1
  34. aioscrapy/http/headers.py +237 -3
  35. aioscrapy/http/request/__init__.py +257 -11
  36. aioscrapy/http/request/form.py +83 -3
  37. aioscrapy/http/request/json_request.py +121 -9
  38. aioscrapy/http/response/__init__.py +306 -33
  39. aioscrapy/http/response/html.py +42 -3
  40. aioscrapy/http/response/text.py +496 -49
  41. aioscrapy/http/response/web_driver.py +144 -0
  42. aioscrapy/http/response/xml.py +45 -3
  43. aioscrapy/libs/downloader/defaultheaders.py +66 -2
  44. aioscrapy/libs/downloader/downloadtimeout.py +91 -2
  45. aioscrapy/libs/downloader/ja3fingerprint.py +95 -2
  46. aioscrapy/libs/downloader/retry.py +192 -6
  47. aioscrapy/libs/downloader/stats.py +142 -0
  48. aioscrapy/libs/downloader/useragent.py +93 -2
  49. aioscrapy/libs/extensions/closespider.py +166 -4
  50. aioscrapy/libs/extensions/corestats.py +151 -1
  51. aioscrapy/libs/extensions/logstats.py +145 -1
  52. aioscrapy/libs/extensions/metric.py +370 -1
  53. aioscrapy/libs/extensions/throttle.py +235 -1
  54. aioscrapy/libs/pipelines/__init__.py +345 -2
  55. aioscrapy/libs/pipelines/csv.py +242 -0
  56. aioscrapy/libs/pipelines/excel.py +545 -0
  57. aioscrapy/libs/pipelines/mongo.py +132 -0
  58. aioscrapy/libs/pipelines/mysql.py +67 -0
  59. aioscrapy/libs/pipelines/pg.py +67 -0
  60. aioscrapy/libs/spider/depth.py +141 -3
  61. aioscrapy/libs/spider/httperror.py +144 -4
  62. aioscrapy/libs/spider/offsite.py +202 -2
  63. aioscrapy/libs/spider/referer.py +396 -21
  64. aioscrapy/libs/spider/urllength.py +97 -1
  65. aioscrapy/link.py +115 -8
  66. aioscrapy/logformatter.py +199 -8
  67. aioscrapy/middleware/absmanager.py +328 -2
  68. aioscrapy/middleware/downloader.py +218 -0
  69. aioscrapy/middleware/extension.py +50 -1
  70. aioscrapy/middleware/itempipeline.py +96 -0
  71. aioscrapy/middleware/spider.py +360 -7
  72. aioscrapy/process.py +200 -0
  73. aioscrapy/proxy/__init__.py +142 -3
  74. aioscrapy/proxy/redis.py +136 -2
  75. aioscrapy/queue/__init__.py +168 -16
  76. aioscrapy/scrapyd/runner.py +124 -3
  77. aioscrapy/serializer.py +182 -2
  78. aioscrapy/settings/__init__.py +610 -128
  79. aioscrapy/settings/default_settings.py +314 -14
  80. aioscrapy/signalmanager.py +151 -20
  81. aioscrapy/signals.py +183 -1
  82. aioscrapy/spiderloader.py +165 -12
  83. aioscrapy/spiders/__init__.py +233 -6
  84. aioscrapy/statscollectors.py +312 -1
  85. aioscrapy/utils/conf.py +345 -17
  86. aioscrapy/utils/curl.py +168 -16
  87. aioscrapy/utils/decorators.py +76 -6
  88. aioscrapy/utils/deprecate.py +212 -19
  89. aioscrapy/utils/httpobj.py +55 -3
  90. aioscrapy/utils/log.py +79 -0
  91. aioscrapy/utils/misc.py +189 -21
  92. aioscrapy/utils/ossignal.py +67 -5
  93. aioscrapy/utils/project.py +165 -3
  94. aioscrapy/utils/python.py +254 -44
  95. aioscrapy/utils/reqser.py +75 -1
  96. aioscrapy/utils/request.py +173 -12
  97. aioscrapy/utils/response.py +91 -6
  98. aioscrapy/utils/signal.py +196 -14
  99. aioscrapy/utils/spider.py +51 -4
  100. aioscrapy/utils/template.py +93 -6
  101. aioscrapy/utils/tools.py +191 -17
  102. aioscrapy/utils/trackref.py +198 -12
  103. aioscrapy/utils/url.py +341 -36
  104. aio_scrapy-2.1.4.dist-info/METADATA +0 -239
  105. aio_scrapy-2.1.4.dist-info/RECORD +0 -133
  106. aioscrapy/core/downloader/handlers/playwright/__init__.py +0 -115
  107. aioscrapy/core/downloader/handlers/playwright/driverpool.py +0 -59
  108. aioscrapy/core/downloader/handlers/playwright/webdriver.py +0 -96
  109. aioscrapy/http/response/playwright.py +0 -36
  110. aioscrapy/libs/pipelines/execl.py +0 -169
  111. {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.7.dist-info}/entry_points.txt +0 -0
  112. {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.7.dist-info}/top_level.txt +0 -0
aioscrapy/exceptions.py CHANGED
@@ -1,8 +1,6 @@
1
1
  """
2
2
  aioscrapy core exceptions
3
-
4
- These exceptions are documented in docs/topics/exceptions.rst. Please don't add
5
- new exceptions here without documenting them there.
3
+ aioscrapy核心异常
6
4
  """
7
5
 
8
6
 
@@ -10,14 +8,23 @@ new exceptions here without documenting them there.
10
8
 
11
9
 
12
10
  class NotConfigured(Exception):
13
- """Indicates a missing configuration situation"""
11
+ """
12
+ Indicates a missing configuration situation.
13
+ 表示缺少配置的情况。
14
+
15
+ This exception is raised when a component or extension is not configured properly.
16
+ 当组件或扩展未正确配置时,会引发此异常。
17
+ """
14
18
  pass
15
19
 
16
20
 
17
21
  class _InvalidOutput(TypeError):
18
22
  """
19
23
  Indicates an invalid value has been returned by a middleware's processing method.
24
+ 表示中间件的处理方法返回了无效值。
25
+
20
26
  Internal and undocumented, it should not be raised or caught by user code.
27
+ 内部使用且未记录,不应由用户代码引发或捕获。
21
28
  """
22
29
  pass
23
30
 
@@ -26,18 +33,47 @@ class _InvalidOutput(TypeError):
26
33
 
27
34
 
28
35
  class IgnoreRequest(Exception):
29
- """Indicates a decision was made not to process a request"""
36
+ """
37
+ Indicates a decision was made not to process a request.
38
+ 表示已决定不处理请求。
39
+
40
+ This exception can be raised by downloader middlewares to indicate that a request
41
+ should be ignored and not processed further.
42
+ 下载器中间件可以引发此异常,以指示应忽略请求且不进一步处理。
43
+ """
30
44
 
31
45
 
32
46
  class DontCloseSpider(Exception):
33
- """Request the spider not to be closed yet"""
47
+ """
48
+ Request the spider not to be closed yet.
49
+ 请求不要立即关闭爬虫。
50
+
51
+ This exception can be raised in the spider_idle signal handler to prevent
52
+ the spider from being closed when it becomes idle.
53
+ 可以在spider_idle信号处理程序中引发此异常,以防止爬虫在变为空闲状态时被关闭。
54
+ """
34
55
  pass
35
56
 
36
57
 
37
58
  class CloseSpider(Exception):
38
- """Raise this from callbacks to request the spider to be closed"""
59
+ """
60
+ Raise this from callbacks to request the spider to be closed.
61
+ 从回调中引发此异常以请求关闭爬虫。
62
+
63
+ This exception can be raised from a spider callback to request the spider to be
64
+ closed with a specific reason.
65
+ 可以从爬虫回调中引发此异常,以请求以特定原因关闭爬虫。
66
+ """
39
67
 
40
68
  def __init__(self, reason='cancelled'):
69
+ """
70
+ Initialize the exception with a reason.
71
+ 使用原因初始化异常。
72
+
73
+ Args:
74
+ reason: The reason for closing the spider. Default is 'cancelled'.
75
+ 关闭爬虫的原因。默认为'cancelled'。
76
+ """
41
77
  super().__init__()
42
78
  self.reason = reason
43
79
 
@@ -45,11 +81,30 @@ class CloseSpider(Exception):
45
81
  class StopDownload(Exception):
46
82
  """
47
83
  Stop the download of the body for a given response.
84
+ 停止给定响应的正文下载。
85
+
48
86
  The 'fail' boolean parameter indicates whether or not the resulting partial response
49
87
  should be handled by the request errback. Note that 'fail' is a keyword-only argument.
88
+ 'fail'布尔参数指示是否应由请求的errback处理结果部分响应。请注意,'fail'是仅关键字参数。
89
+
90
+ This exception can be raised during the download process to stop downloading
91
+ the response body, for example when only the headers are needed.
92
+ 可以在下载过程中引发此异常以停止下载响应正文,例如当只需要头信息时。
50
93
  """
51
94
 
52
95
  def __init__(self, *, fail=True):
96
+ """
97
+ Initialize the exception.
98
+ 初始化异常。
99
+
100
+ Args:
101
+ fail: Whether the partial response should be treated as a failure.
102
+ 部分响应是否应被视为失败。
103
+ If True, the request's errback will be called.
104
+ 如果为True,将调用请求的errback。
105
+ If False, the request's callback will be called with the partial response.
106
+ 如果为False,将使用部分响应调用请求的callback。
107
+ """
53
108
  super().__init__()
54
109
  self.fail = fail
55
110
 
@@ -58,12 +113,26 @@ class StopDownload(Exception):
58
113
 
59
114
 
60
115
  class DropItem(Exception):
61
- """Drop item from the item pipeline"""
116
+ """
117
+ Drop item from the item pipeline.
118
+ 从项目管道中丢弃项目。
119
+
120
+ This exception can be raised by item pipeline components to indicate that an item
121
+ should not be processed further and should be dropped.
122
+ 项目管道组件可以引发此异常,以指示不应进一步处理项目并应将其丢弃。
123
+ """
62
124
  pass
63
125
 
64
126
 
65
127
  class NotSupported(Exception):
66
- """Indicates a feature or method is not supported"""
128
+ """
129
+ Indicates a feature or method is not supported.
130
+ 表示不支持某个功能或方法。
131
+
132
+ This exception is raised when attempting to use a feature or method that is
133
+ not supported by the current implementation or configuration.
134
+ 当尝试使用当前实现或配置不支持的功能或方法时,会引发此异常。
135
+ """
67
136
  pass
68
137
 
69
138
 
@@ -71,37 +140,106 @@ class NotSupported(Exception):
71
140
 
72
141
 
73
142
  class UsageError(Exception):
74
- """To indicate a command-line usage error"""
143
+ """
144
+ To indicate a command-line usage error.
145
+ 表示命令行使用错误。
146
+
147
+ This exception is raised when a command-line tool is used incorrectly,
148
+ with invalid arguments or options.
149
+ 当命令行工具使用不正确、带有无效参数或选项时,会引发此异常。
150
+ """
75
151
 
76
152
  def __init__(self, *a, **kw):
153
+ """
154
+ Initialize the exception.
155
+ 初始化异常。
156
+
157
+ Args:
158
+ *a: Positional arguments for the exception message.
159
+ 异常消息的位置参数。
160
+ **kw: Keyword arguments. Special keyword 'print_help' controls whether
161
+ to print help information when the exception is caught.
162
+ 关键字参数。特殊关键字'print_help'控制在捕获异常时是否打印帮助信息。
163
+ """
77
164
  self.print_help = kw.pop('print_help', True)
78
165
  super().__init__(*a, **kw)
79
166
 
80
167
 
81
168
  class AioScrapyDeprecationWarning(Warning):
82
- """Warning category for deprecated features, since the default
169
+ """
170
+ Warning category for deprecated features, since the default
83
171
  DeprecationWarning is silenced on Python 2.7+
172
+ 已弃用功能的警告类别,因为默认的DeprecationWarning在Python 2.7+上被静默。
173
+
174
+ This warning is used to indicate that a feature or API is deprecated and will be
175
+ removed in a future version of aioscrapy.
176
+ 此警告用于指示某个功能或API已弃用,并将在aioscrapy的未来版本中删除。
84
177
  """
85
178
  pass
86
179
 
87
180
 
88
181
  class ContractFail(AssertionError):
89
- """Error raised in case of a failing contract"""
182
+ """
183
+ Error raised in case of a failing contract.
184
+ 在合约失败的情况下引发的错误。
185
+
186
+ This exception is raised when a spider contract fails during testing.
187
+ 当爬虫合约在测试期间失败时,会引发此异常。
188
+ Spider contracts are used to test the behavior of spiders.
189
+ 爬虫合约用于测试爬虫的行为。
190
+ """
90
191
  pass
91
192
 
92
193
 
93
194
  class ProxyException(Exception):
195
+ """
196
+ Exception related to proxy usage.
197
+ 与代理使用相关的异常。
198
+
199
+ This exception is raised when there is an issue with proxy configuration,
200
+ connection, or authentication.
201
+ 当代理配置、连接或认证出现问题时,会引发此异常。
202
+ """
94
203
  pass
95
204
 
96
205
 
97
206
  class DownloadError(Exception):
98
- """下载页面时发生的错误"""
207
+ """
208
+ Error that occurs when downloading a page.
209
+ 下载页面时发生的错误。
210
+
211
+ This exception wraps the original error that occurred during the download process,
212
+ providing additional context and formatting.
213
+ 此异常包装了下载过程中发生的原始错误,提供额外的上下文和格式化。
214
+ """
99
215
 
100
216
  def __init__(self, *args, real_error=None):
217
+ """
218
+ Initialize the exception.
219
+ 初始化异常。
220
+
221
+ Args:
222
+ *args: Positional arguments for the exception message.
223
+ 异常消息的位置参数。
224
+ real_error: The original error that caused the download to fail.
225
+ 导致下载失败的原始错误。
226
+ """
101
227
  self.real_error = real_error
102
228
  super().__init__(*args)
103
229
 
104
230
  def __str__(self):
231
+ """
232
+ Return a string representation of the exception.
233
+ 返回异常的字符串表示。
234
+
235
+ If there is a real error, returns a string in the format:
236
+ 如果有真实错误,则返回格式为:
237
+ "module.ErrorClass: error message"
238
+
239
+ Returns:
240
+ String representation of the exception.
241
+ 异常的字符串表示。
242
+ """
105
243
  if not self.real_error:
106
244
  return "DownloadError"
107
245
 
@@ -10,6 +10,6 @@ from aioscrapy.http.request.form import FormRequest
10
10
  from aioscrapy.http.request.json_request import JsonRequest
11
11
  from aioscrapy.http.response import Response
12
12
  from aioscrapy.http.response.html import HtmlResponse
13
- from aioscrapy.http.response.playwright import PlaywrightResponse
13
+ from aioscrapy.http.response.web_driver import WebDriverResponse
14
14
  from aioscrapy.http.response.text import TextResponse
15
15
  from aioscrapy.http.response.xml import XmlResponse
aioscrapy/http/headers.py CHANGED
@@ -1,57 +1,291 @@
1
+ """
2
+ HTTP headers implementation for aioscrapy.
3
+ aioscrapy的HTTP头部实现。
4
+
5
+ This module provides the Headers class, which is a case-insensitive dictionary
6
+ specifically designed for handling HTTP headers. It normalizes header names
7
+ to title case for consistent access regardless of the original casing.
8
+ 此模块提供了Headers类,这是一个专门为处理HTTP头部设计的大小写不敏感的字典。
9
+ 它将头部名称规范化为标题大小写,以便无论原始大小写如何都能一致地访问。
10
+ """
11
+
1
12
  from collections.abc import Mapping
2
13
 
3
14
 
4
15
  class Headers(dict):
5
- """Case insensitive http headers dictionary"""
16
+ """
17
+ Case insensitive HTTP headers dictionary.
18
+ 大小写不敏感的HTTP头部字典。
19
+
20
+ This class extends the built-in dict to provide a case-insensitive
21
+ dictionary specifically for HTTP headers. Header names are normalized
22
+ to title case (e.g., 'content-type' becomes 'Content-Type') for
23
+ consistent access regardless of the original casing.
24
+ 此类扩展了内置的dict,为HTTP头部提供了一个大小写不敏感的字典。
25
+ 头部名称被规范化为标题大小写(例如,'content-type'变为'Content-Type'),
26
+ 以便无论原始大小写如何都能一致地访问。
6
27
 
28
+ Example:
29
+ ```python
30
+ headers = Headers({'content-type': 'text/html'})
31
+ assert headers['Content-Type'] == 'text/html'
32
+ assert headers['CONTENT-TYPE'] == 'text/html'
33
+ assert headers['content-type'] == 'text/html'
34
+ ```
35
+ """
36
+
37
+ # Use __slots__ to optimize memory usage
38
+ # 使用__slots__优化内存使用
7
39
  __slots__ = ()
8
40
 
9
41
  def __init__(self, seq=None):
42
+ """
43
+ Initialize a Headers dictionary.
44
+ 初始化Headers字典。
45
+
46
+ Args:
47
+ seq: An optional sequence of key-value pairs or a mapping to initialize the dictionary.
48
+ 一个可选的键值对序列或映射,用于初始化字典。
49
+ """
10
50
  super().__init__()
11
51
  if seq:
12
52
  self.update(seq)
13
53
 
14
54
  def __getitem__(self, key):
55
+ """
56
+ Get a header value by key.
57
+ 通过键获取头部值。
58
+
59
+ The key is normalized to title case before lookup.
60
+ 在查找之前,键会被规范化为标题大小写。
61
+
62
+ Args:
63
+ key: The header name.
64
+ 头部名称。
65
+
66
+ Returns:
67
+ The header value.
68
+ 头部值。
69
+
70
+ Raises:
71
+ KeyError: If the header is not found.
72
+ 如果未找到头部。
73
+ """
15
74
  return dict.__getitem__(self, self.normkey(key))
16
75
 
17
76
  def __setitem__(self, key, value):
77
+ """
78
+ Set a header value.
79
+ 设置头部值。
80
+
81
+ The key is normalized to title case and the value is normalized
82
+ before being stored.
83
+ 在存储之前,键会被规范化为标题大小写,值也会被规范化。
84
+
85
+ Args:
86
+ key: The header name.
87
+ 头部名称。
88
+ value: The header value.
89
+ 头部值。
90
+ """
18
91
  dict.__setitem__(self, self.normkey(key), self.normvalue(value))
19
92
 
20
93
  def __delitem__(self, key):
94
+ """
95
+ Delete a header.
96
+ 删除头部。
97
+
98
+ The key is normalized to title case before lookup.
99
+ 在查找之前,键会被规范化为标题大小写。
100
+
101
+ Args:
102
+ key: The header name.
103
+ 头部名称。
104
+
105
+ Raises:
106
+ KeyError: If the header is not found.
107
+ 如果未找到头部。
108
+ """
21
109
  dict.__delitem__(self, self.normkey(key))
22
110
 
23
111
  def __contains__(self, key):
112
+ """
113
+ Check if a header exists.
114
+ 检查头部是否存在。
115
+
116
+ The key is normalized to title case before lookup.
117
+ 在查找之前,键会被规范化为标题大小写。
118
+
119
+ Args:
120
+ key: The header name.
121
+ 头部名称。
122
+
123
+ Returns:
124
+ bool: True if the header exists, False otherwise.
125
+ 如果头部存在则为True,否则为False。
126
+ """
24
127
  return dict.__contains__(self, self.normkey(key))
25
128
 
129
+ # Alias for backward compatibility
130
+ # 为了向后兼容的别名
26
131
  has_key = __contains__
27
132
 
28
133
  def __copy__(self):
134
+ """
135
+ Create a copy of the Headers dictionary.
136
+ 创建Headers字典的副本。
137
+
138
+ Returns:
139
+ Headers: A new Headers instance with the same contents.
140
+ 具有相同内容的新Headers实例。
141
+ """
29
142
  return self.__class__(self)
30
143
 
144
+ # Alias for standard dict interface
145
+ # 标准dict接口的别名
31
146
  copy = __copy__
32
147
 
33
148
  def normkey(self, key):
34
- """Method to normalize dictionary key access"""
149
+ """
150
+ Normalize a dictionary key for case-insensitive access.
151
+ 规范化字典键以进行大小写不敏感的访问。
152
+
153
+ This method converts the key to title case (e.g., 'content-type' becomes 'Content-Type').
154
+ 此方法将键转换为标题大小写(例如,'content-type'变为'Content-Type')。
155
+
156
+ Args:
157
+ key: The header name to normalize.
158
+ 要规范化的头部名称。
159
+
160
+ Returns:
161
+ str: The normalized header name.
162
+ 规范化的头部名称。
163
+ """
35
164
  return key.title()
36
165
 
37
166
  def normvalue(self, value):
38
- """Method to normalize values prior to be setted"""
167
+ """
168
+ Normalize a value before setting it in the dictionary.
169
+ 在将值设置到字典中之前对其进行规范化。
170
+
171
+ This method is a hook for subclasses to override. The base implementation
172
+ returns the value unchanged.
173
+ 此方法是供子类重写的钩子。基本实现返回未更改的值。
174
+
175
+ Args:
176
+ value: The header value to normalize.
177
+ 要规范化的头部值。
178
+
179
+ Returns:
180
+ The normalized header value.
181
+ 规范化的头部值。
182
+ """
39
183
  return value
40
184
 
41
185
  def get(self, key, def_val=None):
186
+ """
187
+ Get a header value by key, with a default value if not found.
188
+ 通过键获取头部值,如果未找到则返回默认值。
189
+
190
+ The key is normalized to title case and the default value is normalized
191
+ before lookup.
192
+ 在查找之前,键会被规范化为标题大小写,默认值也会被规范化。
193
+
194
+ Args:
195
+ key: The header name.
196
+ 头部名称。
197
+ def_val: The default value to return if the header is not found.
198
+ 如果未找到头部,则返回的默认值。
199
+
200
+ Returns:
201
+ The header value if found, otherwise the default value.
202
+ 如果找到头部值则返回它,否则返回默认值。
203
+ """
42
204
  return dict.get(self, self.normkey(key), self.normvalue(def_val))
43
205
 
44
206
  def setdefault(self, key, def_val=None):
207
+ """
208
+ Get a header value by key, or set it to a default value if not found.
209
+ 通过键获取头部值,如果未找到则将其设置为默认值。
210
+
211
+ The key is normalized to title case and the default value is normalized
212
+ before lookup or insertion.
213
+ 在查找或插入之前,键会被规范化为标题大小写,默认值也会被规范化。
214
+
215
+ Args:
216
+ key: The header name.
217
+ 头部名称。
218
+ def_val: The default value to set and return if the header is not found.
219
+ 如果未找到头部,则设置并返回的默认值。
220
+
221
+ Returns:
222
+ The header value if found, otherwise the default value.
223
+ 如果找到头部值则返回它,否则返回默认值。
224
+ """
45
225
  return dict.setdefault(self, self.normkey(key), self.normvalue(def_val))
46
226
 
47
227
  def update(self, seq):
228
+ """
229
+ Update the dictionary with new headers.
230
+ 使用新头部更新字典。
231
+
232
+ The keys and values are normalized before insertion.
233
+ 在插入之前,键和值会被规范化。
234
+
235
+ Args:
236
+ seq: A sequence of key-value pairs or a mapping to update the dictionary with.
237
+ 用于更新字典的键值对序列或映射。
238
+ """
239
+ # Convert mapping to items() if necessary
240
+ # 如果需要,将映射转换为items()
48
241
  seq = seq.items() if isinstance(seq, Mapping) else seq
242
+
243
+ # Normalize keys and values
244
+ # 规范化键和值
49
245
  iseq = ((self.normkey(k), self.normvalue(v)) for k, v in seq)
246
+
247
+ # Update the dictionary
248
+ # 更新字典
50
249
  super().update(iseq)
51
250
 
52
251
  @classmethod
53
252
  def fromkeys(cls, keys, value=None):
253
+ """
254
+ Create a new Headers dictionary with the specified keys and value.
255
+ 使用指定的键和值创建一个新的Headers字典。
256
+
257
+ Args:
258
+ keys: An iterable of keys.
259
+ 键的可迭代对象。
260
+ value: The value to set for all keys.
261
+ 为所有键设置的值。
262
+
263
+ Returns:
264
+ Headers: A new Headers instance with the specified keys and value.
265
+ 具有指定键和值的新Headers实例。
266
+ """
54
267
  return cls((k, value) for k in keys)
55
268
 
56
269
  def pop(self, key, *args):
270
+ """
271
+ Remove a header and return its value.
272
+ 移除头部并返回其值。
273
+
274
+ The key is normalized to title case before lookup.
275
+ 在查找之前,键会被规范化为标题大小写。
276
+
277
+ Args:
278
+ key: The header name.
279
+ 头部名称。
280
+ *args: Optional default value to return if the header is not found.
281
+ 如果未找到头部,则返回的可选默认值。
282
+
283
+ Returns:
284
+ The header value if found, otherwise the default value.
285
+ 如果找到头部值则返回它,否则返回默认值。
286
+
287
+ Raises:
288
+ KeyError: If the header is not found and no default value is provided.
289
+ 如果未找到头部且未提供默认值。
290
+ """
57
291
  return dict.pop(self, self.normkey(key), *args)