crawlo 1.0.2__py3-none-any.whl → 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +9 -6
- crawlo/__version__.py +1 -2
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +158 -158
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +57 -59
- crawlo/crawler.py +242 -222
- crawlo/downloader/__init__.py +78 -78
- crawlo/downloader/aiohttp_downloader.py +259 -96
- crawlo/downloader/httpx_downloader.py +187 -48
- crawlo/downloader/playwright_downloader.py +160 -160
- crawlo/event.py +11 -11
- crawlo/exceptions.py +64 -64
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/filters/__init__.py +37 -37
- crawlo/filters/aioredis_filter.py +150 -130
- crawlo/filters/memory_filter.py +202 -203
- crawlo/items/__init__.py +62 -62
- crawlo/items/items.py +118 -118
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +140 -140
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +90 -90
- crawlo/network/__init__.py +7 -7
- crawlo/network/request.py +204 -233
- crawlo/network/response.py +166 -162
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_batch_pipline.py +133 -133
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +94 -89
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +36 -36
- crawlo/stats_collector.py +59 -47
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +27 -27
- crawlo/templates/item_template.tmpl +21 -21
- crawlo/templates/project_template/main.py +32 -32
- crawlo/templates/project_template/setting.py +189 -189
- crawlo/templates/spider_template.tmpl +30 -30
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/concurrency_manager.py +124 -124
- crawlo/utils/date_tools.py +177 -177
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/log.py +39 -39
- crawlo/utils/pqueue.py +173 -173
- crawlo/utils/project.py +59 -59
- crawlo/utils/request.py +122 -85
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +302 -302
- crawlo/utils/url.py +39 -39
- {crawlo-1.0.2.dist-info → crawlo-1.0.4.dist-info}/METADATA +48 -48
- crawlo-1.0.4.dist-info/RECORD +79 -0
- {crawlo-1.0.2.dist-info → crawlo-1.0.4.dist-info}/top_level.txt +1 -0
- tests/__init__.py +7 -0
- tests/baidu_spider/__init__.py +7 -0
- tests/baidu_spider/demo.py +94 -0
- tests/baidu_spider/items.py +25 -0
- tests/baidu_spider/middleware.py +49 -0
- tests/baidu_spider/pipeline.py +55 -0
- tests/baidu_spider/request_fingerprints.txt +9 -0
- tests/baidu_spider/run.py +27 -0
- tests/baidu_spider/settings.py +80 -0
- tests/baidu_spider/spiders/__init__.py +7 -0
- tests/baidu_spider/spiders/bai_du.py +61 -0
- tests/baidu_spider/spiders/sina.py +79 -0
- crawlo/filters/redis_filter.py +0 -120
- crawlo-1.0.2.dist-info/RECORD +0 -68
- {crawlo-1.0.2.dist-info → crawlo-1.0.4.dist-info}/WHEEL +0 -0
- {crawlo-1.0.2.dist-info → crawlo-1.0.4.dist-info}/entry_points.txt +0 -0
|
@@ -1,89 +1,94 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
# 默认项目名称
|
|
4
|
-
import os
|
|
5
|
-
|
|
6
|
-
PROJECT_NAME = 'crawlo'
|
|
7
|
-
VERSION = 1.0
|
|
8
|
-
# 并发数
|
|
9
|
-
CONCURRENCY = 8
|
|
10
|
-
|
|
11
|
-
# 下载超时时长
|
|
12
|
-
DOWNLOAD_TIMEOUT = 60
|
|
13
|
-
|
|
14
|
-
INTERVAL = 5
|
|
15
|
-
|
|
16
|
-
# --------------------------------------------------- delay ------------------------------------------------------------
|
|
17
|
-
# 下载延迟,默认关闭
|
|
18
|
-
DOWNLOAD_DELAY = 0
|
|
19
|
-
# 下载延迟范围
|
|
20
|
-
RANDOM_RANGE = (0.75, 1.25)
|
|
21
|
-
# 是否需要随机
|
|
22
|
-
RANDOMNESS = True
|
|
23
|
-
|
|
24
|
-
# --------------------------------------------------- retry ------------------------------------------------------------
|
|
25
|
-
MAX_RETRY_TIMES = 2
|
|
26
|
-
IGNORE_HTTP_CODES = [403, 404]
|
|
27
|
-
RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
|
|
28
|
-
# 允许通过的状态码
|
|
29
|
-
ALLOWED_CODES = []
|
|
30
|
-
# 请求优先级设置
|
|
31
|
-
RETRY_PRIORITY = -1
|
|
32
|
-
#
|
|
33
|
-
DEPTH_PRIORITY = 1
|
|
34
|
-
|
|
35
|
-
STATS_DUMP = True
|
|
36
|
-
# ssl 验证
|
|
37
|
-
VERIFY_SSL = True
|
|
38
|
-
# 是否使用同一个session
|
|
39
|
-
USE_SESSION = True
|
|
40
|
-
# 日志级别
|
|
41
|
-
LOG_LEVEL = 'DEBUG'
|
|
42
|
-
# 选择下载器
|
|
43
|
-
DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # HttpXDownloader
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
#
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
#
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
# 默认项目名称
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
PROJECT_NAME = 'crawlo'
|
|
7
|
+
VERSION = 1.0
|
|
8
|
+
# 并发数
|
|
9
|
+
CONCURRENCY = 8
|
|
10
|
+
|
|
11
|
+
# 下载超时时长
|
|
12
|
+
DOWNLOAD_TIMEOUT = 60
|
|
13
|
+
|
|
14
|
+
INTERVAL = 5
|
|
15
|
+
|
|
16
|
+
# --------------------------------------------------- delay ------------------------------------------------------------
|
|
17
|
+
# 下载延迟,默认关闭
|
|
18
|
+
DOWNLOAD_DELAY = 0
|
|
19
|
+
# 下载延迟范围
|
|
20
|
+
RANDOM_RANGE = (0.75, 1.25)
|
|
21
|
+
# 是否需要随机
|
|
22
|
+
RANDOMNESS = True
|
|
23
|
+
|
|
24
|
+
# --------------------------------------------------- retry ------------------------------------------------------------
|
|
25
|
+
MAX_RETRY_TIMES = 2
|
|
26
|
+
IGNORE_HTTP_CODES = [403, 404]
|
|
27
|
+
RETRY_HTTP_CODES = [408, 429, 500, 502, 503, 504, 522, 524]
|
|
28
|
+
# 允许通过的状态码
|
|
29
|
+
ALLOWED_CODES = []
|
|
30
|
+
# 请求优先级设置
|
|
31
|
+
RETRY_PRIORITY = -1
|
|
32
|
+
#
|
|
33
|
+
DEPTH_PRIORITY = 1
|
|
34
|
+
|
|
35
|
+
STATS_DUMP = True
|
|
36
|
+
# ssl 验证
|
|
37
|
+
VERIFY_SSL = True
|
|
38
|
+
# 是否使用同一个session
|
|
39
|
+
USE_SESSION = True
|
|
40
|
+
# 日志级别
|
|
41
|
+
LOG_LEVEL = 'DEBUG'
|
|
42
|
+
# 选择下载器
|
|
43
|
+
DOWNLOADER = "crawlo.downloader.aiohttp_downloader.AioHttpDownloader" # HttpXDownloader
|
|
44
|
+
|
|
45
|
+
# --------------------------------------------------- 公共MySQL配置 -----------------------------------------------------
|
|
46
|
+
MYSQL_HOST = '127.0.0.1'
|
|
47
|
+
MYSQL_PORT = 3306
|
|
48
|
+
MYSQL_USER = 'scrapy_user'
|
|
49
|
+
MYSQL_PASSWORD = 'your_password'
|
|
50
|
+
MYSQL_DB = 'scrapy_data'
|
|
51
|
+
MYSQL_TABLE = 'crawled_data'
|
|
52
|
+
|
|
53
|
+
# asyncmy专属配置
|
|
54
|
+
MYSQL_POOL_MIN = 5 # 连接池最小连接数
|
|
55
|
+
MYSQL_POOL_MAX = 20 # 连接池最大连接数
|
|
56
|
+
MYSQL_ECHO = False
|
|
57
|
+
|
|
58
|
+
# 批量插入大小
|
|
59
|
+
MYSQL_BATCH_SIZE = 100
|
|
60
|
+
|
|
61
|
+
# --------------------------------------------------- MongoDB 基础配置 -----------------------------------------------------
|
|
62
|
+
MONGO_URI = 'mongodb://user:password@host:27017'
|
|
63
|
+
MONGO_DATABASE = 'scrapy_data'
|
|
64
|
+
MONGO_COLLECTION = 'crawled_items' # 可选,默认使用spider名称
|
|
65
|
+
|
|
66
|
+
# 连接池优化配置(仅方案二需要)
|
|
67
|
+
MONGO_MAX_POOL_SIZE = 200 # 最大连接数
|
|
68
|
+
MONGO_MIN_POOL_SIZE = 20 # 最小保持连接数
|
|
69
|
+
|
|
70
|
+
# 启用管道
|
|
71
|
+
PIPELINES = [
|
|
72
|
+
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
73
|
+
]
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
EXTENSIONS = [
|
|
77
|
+
'crawlo.extension.log_interval.LogIntervalExtension',
|
|
78
|
+
'crawlo.extension.log_stats.LogStats'
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
# filter
|
|
82
|
+
REQUEST_DIR = '.'
|
|
83
|
+
FILTER_DEBUG = True
|
|
84
|
+
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
85
|
+
|
|
86
|
+
# redis filter
|
|
87
|
+
REDIS_TTL = 0
|
|
88
|
+
CLEANUP_FP = 0
|
|
89
|
+
DECODE_RESPONSES = True
|
|
90
|
+
REDIS_KEY = 'request_fingerprint'
|
|
91
|
+
REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1')
|
|
92
|
+
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', 'oscar&0503')
|
|
93
|
+
REDIS_PORT = os.getenv('REDIS_PORT', 6379)
|
|
94
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD or ""}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
@@ -1,100 +1,100 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
import json
|
|
4
|
-
from copy import deepcopy
|
|
5
|
-
from importlib import import_module
|
|
6
|
-
from collections.abc import MutableMapping
|
|
7
|
-
|
|
8
|
-
from crawlo.settings import default_settings
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class SettingManager(MutableMapping):
|
|
12
|
-
|
|
13
|
-
def __init__(self, values=None):
|
|
14
|
-
self.attributes = {}
|
|
15
|
-
self.set_settings(default_settings)
|
|
16
|
-
self.update_attributes(values)
|
|
17
|
-
|
|
18
|
-
def get(self, key, default=None):
|
|
19
|
-
"""安全获取值,不触发递归"""
|
|
20
|
-
value = self.attributes.get(key, default)
|
|
21
|
-
return value if value is not None else default
|
|
22
|
-
|
|
23
|
-
def get_int(self, key, default=0):
|
|
24
|
-
return int(self.get(key, default=default))
|
|
25
|
-
|
|
26
|
-
def get_float(self, key, default=0.0):
|
|
27
|
-
return float(self.get(key, default=default))
|
|
28
|
-
|
|
29
|
-
def get_bool(self, key, default=False):
|
|
30
|
-
got = self.get(key, default=default)
|
|
31
|
-
if isinstance(got, bool):
|
|
32
|
-
return got
|
|
33
|
-
if isinstance(got, (int, float)):
|
|
34
|
-
return bool(got)
|
|
35
|
-
got_lower = str(got).strip().lower()
|
|
36
|
-
if got_lower in ('1', 'true'):
|
|
37
|
-
return True
|
|
38
|
-
if got_lower in ('0', 'false'):
|
|
39
|
-
return False
|
|
40
|
-
raise ValueError(
|
|
41
|
-
f"Unsupported value for boolean setting: {got}. "
|
|
42
|
-
"Supported values are: 0/1, True/False, '0'/'1', 'True'/'False' (case-insensitive)."
|
|
43
|
-
)
|
|
44
|
-
|
|
45
|
-
def get_list(self, key, default=None):
|
|
46
|
-
values = self.get(key, default or [])
|
|
47
|
-
if isinstance(values, str):
|
|
48
|
-
return [v.strip() for v in values.split(',') if v.strip()]
|
|
49
|
-
try:
|
|
50
|
-
return list(values)
|
|
51
|
-
except TypeError:
|
|
52
|
-
return [values]
|
|
53
|
-
|
|
54
|
-
def get_dict(self, key, default=None):
|
|
55
|
-
value = self.get(key, default or {})
|
|
56
|
-
if isinstance(value, str):
|
|
57
|
-
value = json.loads(value)
|
|
58
|
-
try:
|
|
59
|
-
return dict(value)
|
|
60
|
-
except TypeError:
|
|
61
|
-
return value
|
|
62
|
-
|
|
63
|
-
def set(self, key, value):
|
|
64
|
-
self.attributes[key] = value
|
|
65
|
-
|
|
66
|
-
def set_settings(self, module):
|
|
67
|
-
if isinstance(module, str):
|
|
68
|
-
module = import_module(module)
|
|
69
|
-
for key in dir(module):
|
|
70
|
-
if key.isupper():
|
|
71
|
-
self.set(key, getattr(module, key))
|
|
72
|
-
|
|
73
|
-
# 实现 MutableMapping 必须的方法
|
|
74
|
-
def __getitem__(self, item):
|
|
75
|
-
return self.attributes[item]
|
|
76
|
-
|
|
77
|
-
def __setitem__(self, key, value):
|
|
78
|
-
self.set(key, value)
|
|
79
|
-
|
|
80
|
-
def __delitem__(self, key):
|
|
81
|
-
del self.attributes[key]
|
|
82
|
-
|
|
83
|
-
def __iter__(self):
|
|
84
|
-
return iter(self.attributes)
|
|
85
|
-
|
|
86
|
-
def __len__(self):
|
|
87
|
-
return len(self.attributes)
|
|
88
|
-
|
|
89
|
-
def __str__(self):
|
|
90
|
-
return f'<Settings: {self.attributes}>'
|
|
91
|
-
|
|
92
|
-
__repr__ = __str__
|
|
93
|
-
|
|
94
|
-
def update_attributes(self, attributes):
|
|
95
|
-
if attributes is not None:
|
|
96
|
-
for key, value in attributes.items():
|
|
97
|
-
self.set(key, value)
|
|
98
|
-
|
|
99
|
-
def copy(self):
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
import json
|
|
4
|
+
from copy import deepcopy
|
|
5
|
+
from importlib import import_module
|
|
6
|
+
from collections.abc import MutableMapping
|
|
7
|
+
|
|
8
|
+
from crawlo.settings import default_settings
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SettingManager(MutableMapping):
|
|
12
|
+
|
|
13
|
+
def __init__(self, values=None):
|
|
14
|
+
self.attributes = {}
|
|
15
|
+
self.set_settings(default_settings)
|
|
16
|
+
self.update_attributes(values)
|
|
17
|
+
|
|
18
|
+
def get(self, key, default=None):
|
|
19
|
+
"""安全获取值,不触发递归"""
|
|
20
|
+
value = self.attributes.get(key, default)
|
|
21
|
+
return value if value is not None else default
|
|
22
|
+
|
|
23
|
+
def get_int(self, key, default=0):
|
|
24
|
+
return int(self.get(key, default=default))
|
|
25
|
+
|
|
26
|
+
def get_float(self, key, default=0.0):
|
|
27
|
+
return float(self.get(key, default=default))
|
|
28
|
+
|
|
29
|
+
def get_bool(self, key, default=False):
|
|
30
|
+
got = self.get(key, default=default)
|
|
31
|
+
if isinstance(got, bool):
|
|
32
|
+
return got
|
|
33
|
+
if isinstance(got, (int, float)):
|
|
34
|
+
return bool(got)
|
|
35
|
+
got_lower = str(got).strip().lower()
|
|
36
|
+
if got_lower in ('1', 'true'):
|
|
37
|
+
return True
|
|
38
|
+
if got_lower in ('0', 'false'):
|
|
39
|
+
return False
|
|
40
|
+
raise ValueError(
|
|
41
|
+
f"Unsupported value for boolean setting: {got}. "
|
|
42
|
+
"Supported values are: 0/1, True/False, '0'/'1', 'True'/'False' (case-insensitive)."
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
def get_list(self, key, default=None):
|
|
46
|
+
values = self.get(key, default or [])
|
|
47
|
+
if isinstance(values, str):
|
|
48
|
+
return [v.strip() for v in values.split(',') if v.strip()]
|
|
49
|
+
try:
|
|
50
|
+
return list(values)
|
|
51
|
+
except TypeError:
|
|
52
|
+
return [values]
|
|
53
|
+
|
|
54
|
+
def get_dict(self, key, default=None):
|
|
55
|
+
value = self.get(key, default or {})
|
|
56
|
+
if isinstance(value, str):
|
|
57
|
+
value = json.loads(value)
|
|
58
|
+
try:
|
|
59
|
+
return dict(value)
|
|
60
|
+
except TypeError:
|
|
61
|
+
return value
|
|
62
|
+
|
|
63
|
+
def set(self, key, value):
|
|
64
|
+
self.attributes[key] = value
|
|
65
|
+
|
|
66
|
+
def set_settings(self, module):
|
|
67
|
+
if isinstance(module, str):
|
|
68
|
+
module = import_module(module)
|
|
69
|
+
for key in dir(module):
|
|
70
|
+
if key.isupper():
|
|
71
|
+
self.set(key, getattr(module, key))
|
|
72
|
+
|
|
73
|
+
# 实现 MutableMapping 必须的方法
|
|
74
|
+
def __getitem__(self, item):
|
|
75
|
+
return self.attributes[item]
|
|
76
|
+
|
|
77
|
+
def __setitem__(self, key, value):
|
|
78
|
+
self.set(key, value)
|
|
79
|
+
|
|
80
|
+
def __delitem__(self, key):
|
|
81
|
+
del self.attributes[key]
|
|
82
|
+
|
|
83
|
+
def __iter__(self):
|
|
84
|
+
return iter(self.attributes)
|
|
85
|
+
|
|
86
|
+
def __len__(self):
|
|
87
|
+
return len(self.attributes)
|
|
88
|
+
|
|
89
|
+
def __str__(self):
|
|
90
|
+
return f'<Settings: {self.attributes}>'
|
|
91
|
+
|
|
92
|
+
__repr__ = __str__
|
|
93
|
+
|
|
94
|
+
def update_attributes(self, attributes):
|
|
95
|
+
if attributes is not None:
|
|
96
|
+
for key, value in attributes.items():
|
|
97
|
+
self.set(key, value)
|
|
98
|
+
|
|
99
|
+
def copy(self):
|
|
100
100
|
return deepcopy(self)
|
crawlo/spider/__init__.py
CHANGED
|
@@ -1,36 +1,36 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
from crawlo import Request
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class Spider(object):
|
|
7
|
-
def __init__(self):
|
|
8
|
-
if not hasattr(self, 'start_urls'):
|
|
9
|
-
self.start_urls = []
|
|
10
|
-
self.crawler = None
|
|
11
|
-
|
|
12
|
-
@classmethod
|
|
13
|
-
def create_instance(cls, crawler):
|
|
14
|
-
o = cls()
|
|
15
|
-
o.crawler = crawler
|
|
16
|
-
return o
|
|
17
|
-
|
|
18
|
-
def start_requests(self):
|
|
19
|
-
if self.start_urls:
|
|
20
|
-
for url in self.start_urls:
|
|
21
|
-
yield Request(url=url, dont_filter=True)
|
|
22
|
-
else:
|
|
23
|
-
if hasattr(self, 'start_url') and isinstance(getattr(self, 'start_url'), str):
|
|
24
|
-
yield Request(getattr(self, 'start_url'), dont_filter=True)
|
|
25
|
-
|
|
26
|
-
def parse(self, response):
|
|
27
|
-
raise NotImplementedError
|
|
28
|
-
|
|
29
|
-
async def spider_opened(self):
|
|
30
|
-
pass
|
|
31
|
-
|
|
32
|
-
async def spider_closed(self):
|
|
33
|
-
pass
|
|
34
|
-
|
|
35
|
-
def __str__(self):
|
|
36
|
-
return self.__class__.__name__
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
from crawlo import Request
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Spider(object):
|
|
7
|
+
def __init__(self):
|
|
8
|
+
if not hasattr(self, 'start_urls'):
|
|
9
|
+
self.start_urls = []
|
|
10
|
+
self.crawler = None
|
|
11
|
+
|
|
12
|
+
@classmethod
|
|
13
|
+
def create_instance(cls, crawler):
|
|
14
|
+
o = cls()
|
|
15
|
+
o.crawler = crawler
|
|
16
|
+
return o
|
|
17
|
+
|
|
18
|
+
def start_requests(self):
|
|
19
|
+
if self.start_urls:
|
|
20
|
+
for url in self.start_urls:
|
|
21
|
+
yield Request(url=url, dont_filter=True)
|
|
22
|
+
else:
|
|
23
|
+
if hasattr(self, 'start_url') and isinstance(getattr(self, 'start_url'), str):
|
|
24
|
+
yield Request(getattr(self, 'start_url'), dont_filter=True)
|
|
25
|
+
|
|
26
|
+
def parse(self, response):
|
|
27
|
+
raise NotImplementedError
|
|
28
|
+
|
|
29
|
+
async def spider_opened(self):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
async def spider_closed(self):
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
def __str__(self):
|
|
36
|
+
return self.__class__.__name__
|
crawlo/stats_collector.py
CHANGED
|
@@ -1,47 +1,59 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
# @Time : 2025-05-17 09:57
|
|
5
|
-
# @Author : crawl-coder
|
|
6
|
-
# @Desc : 统计信息收集器
|
|
7
|
-
"""
|
|
8
|
-
from pprint import pformat
|
|
9
|
-
from crawlo.utils.log import get_logger
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class StatsCollector(object):
|
|
13
|
-
|
|
14
|
-
def __init__(self, crawler):
|
|
15
|
-
self.crawler = crawler
|
|
16
|
-
self._dump = self.crawler.settings.get_bool('STATS_DUMP')
|
|
17
|
-
self._stats = {}
|
|
18
|
-
self.logger = get_logger(self.__class__.__name__, "INFO")
|
|
19
|
-
|
|
20
|
-
def inc_value(self, key, count=1, start=0):
|
|
21
|
-
self._stats[key] = self._stats.setdefault(key, start) + count
|
|
22
|
-
|
|
23
|
-
def get_value(self, key, default=None):
|
|
24
|
-
return self._stats.get(key, default)
|
|
25
|
-
|
|
26
|
-
def get_stats(self):
|
|
27
|
-
return self._stats
|
|
28
|
-
|
|
29
|
-
def set_stats(self, stats):
|
|
30
|
-
self._stats = stats
|
|
31
|
-
|
|
32
|
-
def clear_stats(self):
|
|
33
|
-
self._stats.clear()
|
|
34
|
-
|
|
35
|
-
def close_spider(self,
|
|
36
|
-
self._stats['reason'] = reason
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-05-17 09:57
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : 统计信息收集器
|
|
7
|
+
"""
|
|
8
|
+
from pprint import pformat
|
|
9
|
+
from crawlo.utils.log import get_logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StatsCollector(object):
|
|
13
|
+
|
|
14
|
+
def __init__(self, crawler):
|
|
15
|
+
self.crawler = crawler
|
|
16
|
+
self._dump = self.crawler.settings.get_bool('STATS_DUMP')
|
|
17
|
+
self._stats = {}
|
|
18
|
+
self.logger = get_logger(self.__class__.__name__, "INFO")
|
|
19
|
+
|
|
20
|
+
def inc_value(self, key, count=1, start=0):
|
|
21
|
+
self._stats[key] = self._stats.setdefault(key, start) + count
|
|
22
|
+
|
|
23
|
+
def get_value(self, key, default=None):
|
|
24
|
+
return self._stats.get(key, default)
|
|
25
|
+
|
|
26
|
+
def get_stats(self):
|
|
27
|
+
return self._stats
|
|
28
|
+
|
|
29
|
+
def set_stats(self, stats):
|
|
30
|
+
self._stats = stats
|
|
31
|
+
|
|
32
|
+
def clear_stats(self):
|
|
33
|
+
self._stats.clear()
|
|
34
|
+
|
|
35
|
+
def close_spider(self, spider, reason):
|
|
36
|
+
self._stats['reason'] = reason
|
|
37
|
+
|
|
38
|
+
# 首选:使用 spider.name
|
|
39
|
+
# 次选:使用实例的类名
|
|
40
|
+
# 最后:使用一个完全未知的占位符
|
|
41
|
+
spider_name = (
|
|
42
|
+
getattr(spider, 'name', None) or
|
|
43
|
+
spider.__class__.__name__ or
|
|
44
|
+
'<Unknown>'
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
self._stats['spider_name'] = spider_name
|
|
48
|
+
|
|
49
|
+
if self._dump:
|
|
50
|
+
self.logger.info(f'{spider_name} stats: \n{pformat(self._stats)}')
|
|
51
|
+
|
|
52
|
+
def __getitem__(self, item):
|
|
53
|
+
return self._stats[item]
|
|
54
|
+
|
|
55
|
+
def __setitem__(self, key, value):
|
|
56
|
+
self._stats[key] = value
|
|
57
|
+
|
|
58
|
+
def __delitem__(self, key):
|
|
59
|
+
del self._stats[key]
|