aio-scrapy 2.1.4__py3-none-any.whl → 2.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.6.dist-info}/LICENSE +1 -1
- {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.6.dist-info}/METADATA +53 -41
- aio_scrapy-2.1.6.dist-info/RECORD +134 -0
- {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.6.dist-info}/WHEEL +1 -1
- aioscrapy/VERSION +1 -1
- aioscrapy/cmdline.py +438 -5
- aioscrapy/core/downloader/__init__.py +522 -17
- aioscrapy/core/downloader/handlers/__init__.py +187 -5
- aioscrapy/core/downloader/handlers/aiohttp.py +187 -3
- aioscrapy/core/downloader/handlers/curl_cffi.py +124 -3
- aioscrapy/core/downloader/handlers/httpx.py +133 -3
- aioscrapy/core/downloader/handlers/pyhttpx.py +132 -3
- aioscrapy/core/downloader/handlers/requests.py +120 -2
- aioscrapy/core/downloader/handlers/webdriver/__init__.py +2 -0
- aioscrapy/core/downloader/handlers/webdriver/drissionpage.py +493 -0
- aioscrapy/core/downloader/handlers/webdriver/driverpool.py +234 -0
- aioscrapy/core/downloader/handlers/webdriver/playwright.py +498 -0
- aioscrapy/core/engine.py +381 -20
- aioscrapy/core/scheduler.py +350 -36
- aioscrapy/core/scraper.py +509 -33
- aioscrapy/crawler.py +392 -10
- aioscrapy/db/__init__.py +149 -0
- aioscrapy/db/absmanager.py +212 -6
- aioscrapy/db/aiomongo.py +292 -10
- aioscrapy/db/aiomysql.py +363 -10
- aioscrapy/db/aiopg.py +299 -2
- aioscrapy/db/aiorabbitmq.py +444 -4
- aioscrapy/db/aioredis.py +260 -11
- aioscrapy/dupefilters/__init__.py +110 -5
- aioscrapy/dupefilters/disk.py +124 -2
- aioscrapy/dupefilters/redis.py +598 -32
- aioscrapy/exceptions.py +151 -13
- aioscrapy/http/__init__.py +1 -1
- aioscrapy/http/headers.py +237 -3
- aioscrapy/http/request/__init__.py +257 -11
- aioscrapy/http/request/form.py +83 -3
- aioscrapy/http/request/json_request.py +121 -9
- aioscrapy/http/response/__init__.py +306 -33
- aioscrapy/http/response/html.py +42 -3
- aioscrapy/http/response/text.py +496 -49
- aioscrapy/http/response/web_driver.py +144 -0
- aioscrapy/http/response/xml.py +45 -3
- aioscrapy/libs/downloader/defaultheaders.py +66 -2
- aioscrapy/libs/downloader/downloadtimeout.py +91 -2
- aioscrapy/libs/downloader/ja3fingerprint.py +95 -2
- aioscrapy/libs/downloader/retry.py +192 -6
- aioscrapy/libs/downloader/stats.py +142 -0
- aioscrapy/libs/downloader/useragent.py +93 -2
- aioscrapy/libs/extensions/closespider.py +166 -4
- aioscrapy/libs/extensions/corestats.py +151 -1
- aioscrapy/libs/extensions/logstats.py +145 -1
- aioscrapy/libs/extensions/metric.py +370 -1
- aioscrapy/libs/extensions/throttle.py +235 -1
- aioscrapy/libs/pipelines/__init__.py +345 -2
- aioscrapy/libs/pipelines/csv.py +242 -0
- aioscrapy/libs/pipelines/excel.py +545 -0
- aioscrapy/libs/pipelines/mongo.py +132 -0
- aioscrapy/libs/pipelines/mysql.py +67 -0
- aioscrapy/libs/pipelines/pg.py +67 -0
- aioscrapy/libs/spider/depth.py +141 -3
- aioscrapy/libs/spider/httperror.py +144 -4
- aioscrapy/libs/spider/offsite.py +202 -2
- aioscrapy/libs/spider/referer.py +396 -21
- aioscrapy/libs/spider/urllength.py +97 -1
- aioscrapy/link.py +115 -8
- aioscrapy/logformatter.py +199 -8
- aioscrapy/middleware/absmanager.py +328 -2
- aioscrapy/middleware/downloader.py +218 -0
- aioscrapy/middleware/extension.py +50 -1
- aioscrapy/middleware/itempipeline.py +96 -0
- aioscrapy/middleware/spider.py +360 -7
- aioscrapy/process.py +200 -0
- aioscrapy/proxy/__init__.py +142 -3
- aioscrapy/proxy/redis.py +136 -2
- aioscrapy/queue/__init__.py +168 -16
- aioscrapy/scrapyd/runner.py +124 -3
- aioscrapy/serializer.py +182 -2
- aioscrapy/settings/__init__.py +610 -128
- aioscrapy/settings/default_settings.py +313 -13
- aioscrapy/signalmanager.py +151 -20
- aioscrapy/signals.py +183 -1
- aioscrapy/spiderloader.py +165 -12
- aioscrapy/spiders/__init__.py +233 -6
- aioscrapy/statscollectors.py +312 -1
- aioscrapy/utils/conf.py +345 -17
- aioscrapy/utils/curl.py +168 -16
- aioscrapy/utils/decorators.py +76 -6
- aioscrapy/utils/deprecate.py +212 -19
- aioscrapy/utils/httpobj.py +55 -3
- aioscrapy/utils/log.py +79 -0
- aioscrapy/utils/misc.py +189 -21
- aioscrapy/utils/ossignal.py +67 -5
- aioscrapy/utils/project.py +165 -3
- aioscrapy/utils/python.py +254 -44
- aioscrapy/utils/reqser.py +75 -1
- aioscrapy/utils/request.py +173 -12
- aioscrapy/utils/response.py +91 -6
- aioscrapy/utils/signal.py +196 -14
- aioscrapy/utils/spider.py +51 -4
- aioscrapy/utils/template.py +93 -6
- aioscrapy/utils/tools.py +191 -17
- aioscrapy/utils/trackref.py +198 -12
- aioscrapy/utils/url.py +341 -36
- aio_scrapy-2.1.4.dist-info/RECORD +0 -133
- aioscrapy/core/downloader/handlers/playwright/__init__.py +0 -115
- aioscrapy/core/downloader/handlers/playwright/driverpool.py +0 -59
- aioscrapy/core/downloader/handlers/playwright/webdriver.py +0 -96
- aioscrapy/http/response/playwright.py +0 -36
- aioscrapy/libs/pipelines/execl.py +0 -169
- {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.6.dist-info}/entry_points.txt +0 -0
- {aio_scrapy-2.1.4.dist-info → aio_scrapy-2.1.6.dist-info}/top_level.txt +0 -0
aioscrapy/statscollectors.py
CHANGED
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
"""
|
|
2
|
-
|
|
2
|
+
AioScrapy Statistics Collection System
|
|
3
|
+
AioScrapy统计数据收集系统
|
|
4
|
+
|
|
5
|
+
This module provides classes for collecting and managing statistics during the
|
|
6
|
+
scraping process. Statistics can include counters for items scraped, pages downloaded,
|
|
7
|
+
processing times, etc. These statistics are useful for monitoring the performance
|
|
8
|
+
and behavior of spiders.
|
|
9
|
+
此模块提供了用于在抓取过程中收集和管理统计数据的类。
|
|
10
|
+
统计数据可以包括已抓取项目的计数器、已下载页面、处理时间等。
|
|
11
|
+
这些统计数据对于监控爬虫的性能和行为很有用。
|
|
12
|
+
|
|
13
|
+
The module includes:
|
|
14
|
+
模块包括:
|
|
15
|
+
- StatsCollector: Base class for all statistics collectors
|
|
16
|
+
所有统计收集器的基类
|
|
17
|
+
- MemoryStatsCollector: Stores stats in memory
|
|
18
|
+
在内存中存储统计数据
|
|
19
|
+
- DummyStatsCollector: No-op collector that doesn't store anything
|
|
20
|
+
不存储任何内容的空操作收集器
|
|
3
21
|
"""
|
|
4
22
|
import pprint
|
|
5
23
|
|
|
@@ -7,74 +25,367 @@ from aioscrapy.utils.log import logger
|
|
|
7
25
|
|
|
8
26
|
|
|
9
27
|
class StatsCollector:
|
|
28
|
+
"""
|
|
29
|
+
Base class for all statistics collectors in AioScrapy.
|
|
30
|
+
AioScrapy中所有统计收集器的基类。
|
|
31
|
+
|
|
32
|
+
This class provides methods to store, retrieve, and manipulate statistics
|
|
33
|
+
during the scraping process. It's designed to be extended by specific
|
|
34
|
+
implementations that determine how statistics are stored and persisted.
|
|
35
|
+
此类提供了在抓取过程中存储、检索和操作统计数据的方法。
|
|
36
|
+
它被设计为可由确定统计数据如何存储和持久化的特定实现扩展。
|
|
37
|
+
"""
|
|
10
38
|
|
|
11
39
|
def __init__(self, crawler):
|
|
40
|
+
"""
|
|
41
|
+
Initialize the stats collector.
|
|
42
|
+
初始化统计收集器。
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
crawler: The crawler instance that uses this stats collector.
|
|
46
|
+
使用此统计收集器的爬虫实例。
|
|
47
|
+
"""
|
|
48
|
+
# Whether to dump stats when the spider closes
|
|
49
|
+
# 爬虫关闭时是否转储统计数据
|
|
12
50
|
self._dump = crawler.settings.getbool('STATS_DUMP')
|
|
51
|
+
# Dictionary to store the stats
|
|
52
|
+
# 用于存储统计数据的字典
|
|
13
53
|
self._stats = {}
|
|
14
54
|
|
|
15
55
|
def get_value(self, key, default=None, spider=None):
|
|
56
|
+
"""
|
|
57
|
+
Get the value for a given stats key.
|
|
58
|
+
获取给定统计键的值。
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
key: The stats key to get the value for.
|
|
62
|
+
要获取值的统计键。
|
|
63
|
+
default: The default value to return if the key is not found.
|
|
64
|
+
如果未找到键,则返回的默认值。
|
|
65
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
66
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
The value for the given stats key, or the default value if the key is not found.
|
|
70
|
+
给定统计键的值,如果未找到键,则为默认值。
|
|
71
|
+
"""
|
|
16
72
|
return self._stats.get(key, default)
|
|
17
73
|
|
|
18
74
|
def get_stats(self, spider=None):
|
|
75
|
+
"""
|
|
76
|
+
Get all stats.
|
|
77
|
+
获取所有统计数据。
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
81
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
dict: A dictionary containing all stats.
|
|
85
|
+
包含所有统计数据的字典。
|
|
86
|
+
"""
|
|
19
87
|
return self._stats
|
|
20
88
|
|
|
21
89
|
def set_value(self, key, value, spider=None):
|
|
90
|
+
"""
|
|
91
|
+
Set the value for a given stats key.
|
|
92
|
+
设置给定统计键的值。
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
key: The stats key to set the value for.
|
|
96
|
+
要设置值的统计键。
|
|
97
|
+
value: The value to set.
|
|
98
|
+
要设置的值。
|
|
99
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
100
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
101
|
+
"""
|
|
22
102
|
self._stats[key] = value
|
|
23
103
|
|
|
24
104
|
def set_stats(self, stats, spider=None):
|
|
105
|
+
"""
|
|
106
|
+
Set all stats at once.
|
|
107
|
+
一次设置所有统计数据。
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
stats: A dictionary of stats to set.
|
|
111
|
+
要设置的统计数据字典。
|
|
112
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
113
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
114
|
+
"""
|
|
25
115
|
self._stats = stats
|
|
26
116
|
|
|
27
117
|
def inc_value(self, key, count=1, start=0, spider=None):
|
|
118
|
+
"""
|
|
119
|
+
Increment the value for a given stats key.
|
|
120
|
+
增加给定统计键的值。
|
|
121
|
+
|
|
122
|
+
If the key does not exist, it is set to the start value plus the count.
|
|
123
|
+
如果键不存在,则将其设置为起始值加上计数。
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
key: The stats key to increment.
|
|
127
|
+
要增加的统计键。
|
|
128
|
+
count: The amount to increment by. Defaults to 1.
|
|
129
|
+
要增加的数量。默认为1。
|
|
130
|
+
start: The starting value if the key does not exist. Defaults to 0.
|
|
131
|
+
如果键不存在,则为起始值。默认为0。
|
|
132
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
133
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
134
|
+
"""
|
|
28
135
|
d = self._stats
|
|
29
136
|
d[key] = d.setdefault(key, start) + count
|
|
30
137
|
|
|
31
138
|
def max_value(self, key, value, spider=None):
|
|
139
|
+
"""
|
|
140
|
+
Set the maximum value for a given stats key.
|
|
141
|
+
设置给定统计键的最大值。
|
|
142
|
+
|
|
143
|
+
If the key does not exist, it is set to the given value.
|
|
144
|
+
If it exists, it is set to the maximum of the current value and the given value.
|
|
145
|
+
如果键不存在,则将其设置为给定值。
|
|
146
|
+
如果存在,则将其设置为当前值和给定值的最大值。
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
key: The stats key to set the maximum value for.
|
|
150
|
+
要设置最大值的统计键。
|
|
151
|
+
value: The value to compare with the current value.
|
|
152
|
+
要与当前值比较的值。
|
|
153
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
154
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
155
|
+
"""
|
|
32
156
|
self._stats[key] = max(self._stats.setdefault(key, value), value)
|
|
33
157
|
|
|
34
158
|
def min_value(self, key, value, spider=None):
|
|
159
|
+
"""
|
|
160
|
+
Set the minimum value for a given stats key.
|
|
161
|
+
设置给定统计键的最小值。
|
|
162
|
+
|
|
163
|
+
If the key does not exist, it is set to the given value.
|
|
164
|
+
If it exists, it is set to the minimum of the current value and the given value.
|
|
165
|
+
如果键不存在,则将其设置为给定值。
|
|
166
|
+
如果存在,则将其设置为当前值和给定值的最小值。
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
key: The stats key to set the minimum value for.
|
|
170
|
+
要设置最小值的统计键。
|
|
171
|
+
value: The value to compare with the current value.
|
|
172
|
+
要与当前值比较的值。
|
|
173
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
174
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
175
|
+
"""
|
|
35
176
|
self._stats[key] = min(self._stats.setdefault(key, value), value)
|
|
36
177
|
|
|
37
178
|
def clear_stats(self, spider=None):
|
|
179
|
+
"""
|
|
180
|
+
Clear all stats.
|
|
181
|
+
清除所有统计数据。
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
spider: The spider instance (optional, not used in the base implementation).
|
|
185
|
+
爬虫实例(可选,在基本实现中未使用)。
|
|
186
|
+
"""
|
|
38
187
|
self._stats.clear()
|
|
39
188
|
|
|
40
189
|
def open_spider(self, spider):
|
|
190
|
+
"""
|
|
191
|
+
Called when a spider is opened.
|
|
192
|
+
当爬虫打开时调用。
|
|
193
|
+
|
|
194
|
+
This method can be overridden by subclasses to perform initialization
|
|
195
|
+
when a spider is opened.
|
|
196
|
+
此方法可由子类覆盖,以在爬虫打开时执行初始化。
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
spider: The spider instance that was opened.
|
|
200
|
+
已打开的爬虫实例。
|
|
201
|
+
"""
|
|
41
202
|
pass
|
|
42
203
|
|
|
43
204
|
def close_spider(self, spider, reason):
|
|
205
|
+
"""
|
|
206
|
+
Called when a spider is closed.
|
|
207
|
+
当爬虫关闭时调用。
|
|
208
|
+
|
|
209
|
+
If STATS_DUMP setting is True, this method dumps the stats to the log.
|
|
210
|
+
It also calls _persist_stats to allow subclasses to persist the stats.
|
|
211
|
+
如果STATS_DUMP设置为True,此方法会将统计数据转储到日志。
|
|
212
|
+
它还调用_persist_stats以允许子类持久化统计数据。
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
spider: The spider instance that was closed.
|
|
216
|
+
已关闭的爬虫实例。
|
|
217
|
+
reason: A string describing the reason why the spider was closed.
|
|
218
|
+
描述爬虫关闭原因的字符串。
|
|
219
|
+
"""
|
|
44
220
|
if self._dump:
|
|
45
221
|
logger.info("Dumping aioscrapy stats:\n" + pprint.pformat(self._stats))
|
|
46
222
|
self._persist_stats(self._stats, spider)
|
|
47
223
|
|
|
48
224
|
def _persist_stats(self, stats, spider):
|
|
225
|
+
"""
|
|
226
|
+
Persist the given stats.
|
|
227
|
+
持久化给定的统计数据。
|
|
228
|
+
|
|
229
|
+
This method is called by close_spider and can be overridden by subclasses
|
|
230
|
+
to persist the stats in a custom way.
|
|
231
|
+
此方法由close_spider调用,可由子类覆盖以自定义方式持久化统计数据。
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
stats: The stats to persist.
|
|
235
|
+
要持久化的统计数据。
|
|
236
|
+
spider: The spider instance that the stats belong to.
|
|
237
|
+
统计数据所属的爬虫实例。
|
|
238
|
+
"""
|
|
49
239
|
pass
|
|
50
240
|
|
|
51
241
|
|
|
52
242
|
class MemoryStatsCollector(StatsCollector):
|
|
243
|
+
"""
|
|
244
|
+
Stats collector that keeps stats in memory.
|
|
245
|
+
将统计数据保存在内存中的统计收集器。
|
|
246
|
+
|
|
247
|
+
This collector stores stats in memory and persists them in a dictionary
|
|
248
|
+
keyed by spider name. This allows retrieving stats for a spider even
|
|
249
|
+
after it has been closed.
|
|
250
|
+
此收集器将统计数据存储在内存中,并将其持久化在以爬虫名称为键的字典中。
|
|
251
|
+
这允许即使在爬虫关闭后也能检索爬虫的统计数据。
|
|
252
|
+
"""
|
|
53
253
|
|
|
54
254
|
def __init__(self, crawler):
|
|
255
|
+
"""
|
|
256
|
+
Initialize the memory stats collector.
|
|
257
|
+
初始化内存统计收集器。
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
crawler: The crawler instance that uses this stats collector.
|
|
261
|
+
使用此统计收集器的爬虫实例。
|
|
262
|
+
"""
|
|
55
263
|
super().__init__(crawler)
|
|
264
|
+
# Dictionary to store stats for each spider by name
|
|
265
|
+
# 用于按名称存储每个爬虫的统计数据的字典
|
|
56
266
|
self.spider_stats = {}
|
|
57
267
|
|
|
58
268
|
def _persist_stats(self, stats, spider):
|
|
269
|
+
"""
|
|
270
|
+
Persist stats in memory.
|
|
271
|
+
在内存中持久化统计数据。
|
|
272
|
+
|
|
273
|
+
This method stores the stats in the spider_stats dictionary,
|
|
274
|
+
using the spider's name as the key.
|
|
275
|
+
此方法将统计数据存储在spider_stats字典中,
|
|
276
|
+
使用爬虫的名称作为键。
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
stats: The stats to persist.
|
|
280
|
+
要持久化的统计数据。
|
|
281
|
+
spider: The spider instance that the stats belong to.
|
|
282
|
+
统计数据所属的爬虫实例。
|
|
283
|
+
"""
|
|
59
284
|
self.spider_stats[spider.name] = stats
|
|
60
285
|
|
|
61
286
|
|
|
62
287
|
class DummyStatsCollector(StatsCollector):
|
|
288
|
+
"""
|
|
289
|
+
Stats collector that does nothing.
|
|
290
|
+
不执行任何操作的统计收集器。
|
|
291
|
+
|
|
292
|
+
This collector is a no-op implementation that doesn't actually store any stats.
|
|
293
|
+
It's useful when stats collection is not needed or should be disabled for
|
|
294
|
+
performance reasons.
|
|
295
|
+
此收集器是一个无操作实现,实际上不存储任何统计数据。
|
|
296
|
+
当不需要统计数据收集或出于性能原因应禁用统计数据收集时,它很有用。
|
|
297
|
+
"""
|
|
63
298
|
|
|
64
299
|
def get_value(self, key, default=None, spider=None):
|
|
300
|
+
"""
|
|
301
|
+
Always returns the default value.
|
|
302
|
+
始终返回默认值。
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
key: The stats key (ignored).
|
|
306
|
+
统计键(被忽略)。
|
|
307
|
+
default: The default value to return.
|
|
308
|
+
要返回的默认值。
|
|
309
|
+
spider: The spider instance (ignored).
|
|
310
|
+
爬虫实例(被忽略)。
|
|
311
|
+
|
|
312
|
+
Returns:
|
|
313
|
+
The default value.
|
|
314
|
+
默认值。
|
|
315
|
+
"""
|
|
65
316
|
return default
|
|
66
317
|
|
|
67
318
|
def set_value(self, key, value, spider=None):
|
|
319
|
+
"""
|
|
320
|
+
Does nothing.
|
|
321
|
+
不执行任何操作。
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
key: The stats key (ignored).
|
|
325
|
+
统计键(被忽略)。
|
|
326
|
+
value: The value to set (ignored).
|
|
327
|
+
要设置的值(被忽略)。
|
|
328
|
+
spider: The spider instance (ignored).
|
|
329
|
+
爬虫实例(被忽略)。
|
|
330
|
+
"""
|
|
68
331
|
pass
|
|
69
332
|
|
|
70
333
|
def set_stats(self, stats, spider=None):
|
|
334
|
+
"""
|
|
335
|
+
Does nothing.
|
|
336
|
+
不执行任何操作。
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
stats: The stats to set (ignored).
|
|
340
|
+
要设置的统计数据(被忽略)。
|
|
341
|
+
spider: The spider instance (ignored).
|
|
342
|
+
爬虫实例(被忽略)。
|
|
343
|
+
"""
|
|
71
344
|
pass
|
|
72
345
|
|
|
73
346
|
def inc_value(self, key, count=1, start=0, spider=None):
|
|
347
|
+
"""
|
|
348
|
+
Does nothing.
|
|
349
|
+
不执行任何操作。
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
key: The stats key (ignored).
|
|
353
|
+
统计键(被忽略)。
|
|
354
|
+
count: The amount to increment by (ignored).
|
|
355
|
+
要增加的数量(被忽略)。
|
|
356
|
+
start: The starting value (ignored).
|
|
357
|
+
起始值(被忽略)。
|
|
358
|
+
spider: The spider instance (ignored).
|
|
359
|
+
爬虫实例(被忽略)。
|
|
360
|
+
"""
|
|
74
361
|
pass
|
|
75
362
|
|
|
76
363
|
def max_value(self, key, value, spider=None):
|
|
364
|
+
"""
|
|
365
|
+
Does nothing.
|
|
366
|
+
不执行任何操作。
|
|
367
|
+
|
|
368
|
+
Args:
|
|
369
|
+
key: The stats key (ignored).
|
|
370
|
+
统计键(被忽略)。
|
|
371
|
+
value: The value to compare (ignored).
|
|
372
|
+
要比较的值(被忽略)。
|
|
373
|
+
spider: The spider instance (ignored).
|
|
374
|
+
爬虫实例(被忽略)。
|
|
375
|
+
"""
|
|
77
376
|
pass
|
|
78
377
|
|
|
79
378
|
def min_value(self, key, value, spider=None):
|
|
379
|
+
"""
|
|
380
|
+
Does nothing.
|
|
381
|
+
不执行任何操作。
|
|
382
|
+
|
|
383
|
+
Args:
|
|
384
|
+
key: The stats key (ignored).
|
|
385
|
+
统计键(被忽略)。
|
|
386
|
+
value: The value to compare (ignored).
|
|
387
|
+
要比较的值(被忽略)。
|
|
388
|
+
spider: The spider instance (ignored).
|
|
389
|
+
爬虫实例(被忽略)。
|
|
390
|
+
"""
|
|
80
391
|
pass
|