crawlo 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +34 -33
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +152 -126
- crawlo/commands/list.py +156 -147
- crawlo/commands/run.py +285 -285
- crawlo/commands/startproject.py +196 -111
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +187 -0
- crawlo/config.py +280 -0
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +171 -158
- crawlo/core/enhanced_engine.py +190 -0
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +166 -57
- crawlo/crawler.py +1028 -495
- crawlo/downloader/__init__.py +242 -78
- crawlo/downloader/aiohttp_downloader.py +212 -199
- crawlo/downloader/cffi_downloader.py +251 -241
- crawlo/downloader/httpx_downloader.py +259 -246
- crawlo/event.py +11 -11
- crawlo/exceptions.py +82 -78
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +34 -34
- crawlo/filters/__init__.py +154 -37
- crawlo/filters/aioredis_filter.py +242 -150
- crawlo/filters/memory_filter.py +269 -202
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +248 -245
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +125 -90
- crawlo/mode_manager.py +201 -0
- crawlo/network/__init__.py +21 -7
- crawlo/network/request.py +311 -203
- crawlo/network/response.py +271 -166
- crawlo/pipelines/__init__.py +22 -13
- crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +317 -0
- crawlo/pipelines/database_dedup_pipeline.py +225 -0
- crawlo/pipelines/json_pipeline.py +219 -0
- crawlo/pipelines/memory_dedup_pipeline.py +116 -0
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/pipelines/redis_dedup_pipeline.py +163 -0
- crawlo/project.py +153 -153
- crawlo/queue/__init__.py +0 -0
- crawlo/queue/pqueue.py +37 -0
- crawlo/queue/queue_manager.py +308 -0
- crawlo/queue/redis_priority_queue.py +209 -0
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +245 -167
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +639 -129
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +30 -27
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +87 -76
- crawlo/templates/project/pipelines.py.tmpl +342 -64
- crawlo/templates/project/run.py.tmpl +252 -0
- crawlo/templates/project/settings.py.tmpl +251 -54
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +178 -32
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/controlled_spider_mixin.py +440 -0
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +287 -0
- crawlo/utils/large_scale_helper.py +344 -0
- crawlo/utils/log.py +128 -128
- crawlo/utils/queue_helper.py +176 -0
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +220 -0
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.3.dist-info/METADATA +635 -0
- crawlo-1.1.3.dist-info/RECORD +113 -0
- examples/__init__.py +7 -7
- examples/controlled_spider_example.py +205 -0
- tests/__init__.py +7 -7
- tests/test_final_validation.py +154 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_redis_config.py +29 -0
- tests/test_redis_queue.py +225 -0
- tests/test_request_serialization.py +71 -0
- tests/test_scheduler.py +242 -0
- crawlo/pipelines/mysql_batch_pipline.py +0 -273
- crawlo/utils/pqueue.py +0 -174
- crawlo-1.1.1.dist-info/METADATA +0 -220
- crawlo-1.1.1.dist-info/RECORD +0 -100
- examples/baidu_spider/__init__.py +0 -7
- examples/baidu_spider/demo.py +0 -94
- examples/baidu_spider/items.py +0 -46
- examples/baidu_spider/middleware.py +0 -49
- examples/baidu_spider/pipeline.py +0 -55
- examples/baidu_spider/run.py +0 -27
- examples/baidu_spider/settings.py +0 -121
- examples/baidu_spider/spiders/__init__.py +0 -7
- examples/baidu_spider/spiders/bai_du.py +0 -61
- examples/baidu_spider/spiders/miit.py +0 -159
- examples/baidu_spider/spiders/sina.py +0 -79
- {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
- {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
crawlo/__init__.py
CHANGED
|
@@ -1,34 +1,35 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
Crawlo - 一个异步爬虫框架
|
|
5
|
-
"""
|
|
6
|
-
from crawlo.spider import Spider
|
|
7
|
-
from crawlo.items
|
|
8
|
-
from crawlo.network.request import Request
|
|
9
|
-
from crawlo.network.response import Response
|
|
10
|
-
from crawlo.downloader import DownloaderBase
|
|
11
|
-
from crawlo.middleware import BaseMiddleware
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
# 版本号:优先从元数据读取
|
|
15
|
-
try:
|
|
16
|
-
from importlib.metadata import version
|
|
17
|
-
__version__ = version("crawlo")
|
|
18
|
-
except Exception:
|
|
19
|
-
# 开发模式下可能未安装,回退到 __version__.py 或 dev
|
|
20
|
-
try:
|
|
21
|
-
from crawlo.__version__ import __version__
|
|
22
|
-
except ImportError:
|
|
23
|
-
__version__ = "dev"
|
|
24
|
-
|
|
25
|
-
# 定义对外 API
|
|
26
|
-
__all__ = [
|
|
27
|
-
'Spider',
|
|
28
|
-
'Item',
|
|
29
|
-
'
|
|
30
|
-
'
|
|
31
|
-
'
|
|
32
|
-
'
|
|
33
|
-
'
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Crawlo - 一个异步爬虫框架
|
|
5
|
+
"""
|
|
6
|
+
from crawlo.spider import Spider
|
|
7
|
+
from crawlo.items import Item, Field
|
|
8
|
+
from crawlo.network.request import Request
|
|
9
|
+
from crawlo.network.response import Response
|
|
10
|
+
from crawlo.downloader import DownloaderBase
|
|
11
|
+
from crawlo.middleware import BaseMiddleware
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# 版本号:优先从元数据读取
|
|
15
|
+
try:
|
|
16
|
+
from importlib.metadata import version
|
|
17
|
+
__version__ = version("crawlo")
|
|
18
|
+
except Exception:
|
|
19
|
+
# 开发模式下可能未安装,回退到 __version__.py 或 dev
|
|
20
|
+
try:
|
|
21
|
+
from crawlo.__version__ import __version__
|
|
22
|
+
except ImportError:
|
|
23
|
+
__version__ = "dev"
|
|
24
|
+
|
|
25
|
+
# 定义对外 API
|
|
26
|
+
__all__ = [
|
|
27
|
+
'Spider',
|
|
28
|
+
'Item',
|
|
29
|
+
'Field',
|
|
30
|
+
'Request',
|
|
31
|
+
'Response',
|
|
32
|
+
'DownloaderBase',
|
|
33
|
+
'BaseMiddleware',
|
|
34
|
+
'__version__',
|
|
34
35
|
]
|
crawlo/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "1.1.
|
|
1
|
+
__version__ = "1.1.3"
|
crawlo/cli.py
CHANGED
|
@@ -1,41 +1,41 @@
|
|
|
1
|
-
# crawlo/cli.py
|
|
2
|
-
# !/usr/bin/python
|
|
3
|
-
# -*- coding: UTF-8 -*-
|
|
4
|
-
import sys
|
|
5
|
-
import argparse
|
|
6
|
-
from crawlo.commands import get_commands
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
def main():
|
|
10
|
-
# 获取所有可用命令
|
|
11
|
-
commands = get_commands()
|
|
12
|
-
|
|
13
|
-
parser = argparse.ArgumentParser(
|
|
14
|
-
description="Crawlo: A lightweight web crawler framework.",
|
|
15
|
-
usage="crawlo <command> [options]"
|
|
16
|
-
)
|
|
17
|
-
parser.add_argument('command', help='Available commands: ' + ', '.join(commands.keys()))
|
|
18
|
-
# 注意:这里不添加具体参数,由子命令解析
|
|
19
|
-
|
|
20
|
-
# 只解析命令
|
|
21
|
-
args, unknown = parser.parse_known_args()
|
|
22
|
-
|
|
23
|
-
if args.command not in commands:
|
|
24
|
-
print(f"Unknown command: {args.command}")
|
|
25
|
-
print(f"Available commands: {', '.join(commands.keys())}")
|
|
26
|
-
sys.exit(1)
|
|
27
|
-
|
|
28
|
-
# 动态导入并执行命令
|
|
29
|
-
try:
|
|
30
|
-
module = __import__(commands[args.command], fromlist=['main'])
|
|
31
|
-
sys.exit(module.main(unknown))
|
|
32
|
-
except ImportError as e:
|
|
33
|
-
print(f"Failed to load command '{args.command}': {e}")
|
|
34
|
-
sys.exit(1)
|
|
35
|
-
except Exception as e:
|
|
36
|
-
print(f"Command '{args.command}' failed: {e}")
|
|
37
|
-
sys.exit(1)
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
if __name__ == '__main__':
|
|
1
|
+
# crawlo/cli.py
|
|
2
|
+
# !/usr/bin/python
|
|
3
|
+
# -*- coding: UTF-8 -*-
|
|
4
|
+
import sys
|
|
5
|
+
import argparse
|
|
6
|
+
from crawlo.commands import get_commands
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def main():
|
|
10
|
+
# 获取所有可用命令
|
|
11
|
+
commands = get_commands()
|
|
12
|
+
|
|
13
|
+
parser = argparse.ArgumentParser(
|
|
14
|
+
description="Crawlo: A lightweight web crawler framework.",
|
|
15
|
+
usage="crawlo <command> [options]"
|
|
16
|
+
)
|
|
17
|
+
parser.add_argument('command', help='Available commands: ' + ', '.join(commands.keys()))
|
|
18
|
+
# 注意:这里不添加具体参数,由子命令解析
|
|
19
|
+
|
|
20
|
+
# 只解析命令
|
|
21
|
+
args, unknown = parser.parse_known_args()
|
|
22
|
+
|
|
23
|
+
if args.command not in commands:
|
|
24
|
+
print(f"Unknown command: {args.command}")
|
|
25
|
+
print(f"Available commands: {', '.join(commands.keys())}")
|
|
26
|
+
sys.exit(1)
|
|
27
|
+
|
|
28
|
+
# 动态导入并执行命令
|
|
29
|
+
try:
|
|
30
|
+
module = __import__(commands[args.command], fromlist=['main'])
|
|
31
|
+
sys.exit(module.main(unknown))
|
|
32
|
+
except ImportError as e:
|
|
33
|
+
print(f"Failed to load command '{args.command}': {e}")
|
|
34
|
+
sys.exit(1)
|
|
35
|
+
except Exception as e:
|
|
36
|
+
print(f"Command '{args.command}' failed: {e}")
|
|
37
|
+
sys.exit(1)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
if __name__ == '__main__':
|
|
41
41
|
main()
|
crawlo/commands/__init__.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
|
|
4
|
-
_commands = {
|
|
5
|
-
'startproject': 'crawlo.commands.startproject',
|
|
6
|
-
'genspider': 'crawlo.commands.genspider',
|
|
7
|
-
'run': 'crawlo.commands.run',
|
|
8
|
-
'check': 'crawlo.commands.check',
|
|
9
|
-
'list': 'crawlo.commands.list',
|
|
10
|
-
'stats': 'crawlo.commands.stats'
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
def get_commands():
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
|
|
4
|
+
_commands = {
|
|
5
|
+
'startproject': 'crawlo.commands.startproject',
|
|
6
|
+
'genspider': 'crawlo.commands.genspider',
|
|
7
|
+
'run': 'crawlo.commands.run',
|
|
8
|
+
'check': 'crawlo.commands.check',
|
|
9
|
+
'list': 'crawlo.commands.list',
|
|
10
|
+
'stats': 'crawlo.commands.stats'
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
def get_commands():
|
|
14
14
|
return _commands
|