crawlo 1.0.9__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (111) hide show
  1. crawlo/__init__.py +33 -24
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -106
  6. crawlo/commands/genspider.py +125 -110
  7. crawlo/commands/list.py +147 -92
  8. crawlo/commands/run.py +286 -181
  9. crawlo/commands/startproject.py +111 -101
  10. crawlo/commands/stats.py +188 -59
  11. crawlo/core/__init__.py +2 -2
  12. crawlo/core/engine.py +158 -158
  13. crawlo/core/processor.py +40 -40
  14. crawlo/core/scheduler.py +57 -57
  15. crawlo/crawler.py +494 -492
  16. crawlo/downloader/__init__.py +78 -78
  17. crawlo/downloader/aiohttp_downloader.py +199 -199
  18. crawlo/downloader/cffi_downloader.py +242 -277
  19. crawlo/downloader/httpx_downloader.py +246 -246
  20. crawlo/event.py +11 -11
  21. crawlo/exceptions.py +78 -78
  22. crawlo/extension/__init__.py +31 -31
  23. crawlo/extension/log_interval.py +49 -49
  24. crawlo/extension/log_stats.py +44 -44
  25. crawlo/extension/logging_extension.py +34 -34
  26. crawlo/filters/__init__.py +37 -37
  27. crawlo/filters/aioredis_filter.py +150 -150
  28. crawlo/filters/memory_filter.py +202 -202
  29. crawlo/items/__init__.py +23 -23
  30. crawlo/items/base.py +21 -21
  31. crawlo/items/fields.py +53 -53
  32. crawlo/items/items.py +104 -104
  33. crawlo/middleware/__init__.py +21 -21
  34. crawlo/middleware/default_header.py +32 -32
  35. crawlo/middleware/download_delay.py +28 -28
  36. crawlo/middleware/middleware_manager.py +135 -135
  37. crawlo/middleware/proxy.py +245 -245
  38. crawlo/middleware/request_ignore.py +30 -30
  39. crawlo/middleware/response_code.py +18 -18
  40. crawlo/middleware/response_filter.py +26 -26
  41. crawlo/middleware/retry.py +90 -90
  42. crawlo/network/__init__.py +7 -7
  43. crawlo/network/request.py +203 -203
  44. crawlo/network/response.py +166 -166
  45. crawlo/pipelines/__init__.py +13 -13
  46. crawlo/pipelines/console_pipeline.py +39 -39
  47. crawlo/pipelines/mongo_pipeline.py +116 -116
  48. crawlo/pipelines/mysql_batch_pipline.py +272 -272
  49. crawlo/pipelines/mysql_pipeline.py +195 -195
  50. crawlo/pipelines/pipeline_manager.py +56 -56
  51. crawlo/project.py +153 -0
  52. crawlo/settings/__init__.py +7 -7
  53. crawlo/settings/default_settings.py +166 -168
  54. crawlo/settings/setting_manager.py +99 -99
  55. crawlo/spider/__init__.py +129 -129
  56. crawlo/stats_collector.py +59 -59
  57. crawlo/subscriber.py +106 -106
  58. crawlo/task_manager.py +27 -27
  59. crawlo/templates/crawlo.cfg.tmpl +10 -10
  60. crawlo/templates/project/__init__.py.tmpl +3 -3
  61. crawlo/templates/project/items.py.tmpl +17 -17
  62. crawlo/templates/project/middlewares.py.tmpl +75 -75
  63. crawlo/templates/project/pipelines.py.tmpl +63 -63
  64. crawlo/templates/project/settings.py.tmpl +54 -54
  65. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  66. crawlo/templates/spider/spider.py.tmpl +31 -31
  67. crawlo/utils/__init__.py +7 -7
  68. crawlo/utils/date_tools.py +233 -233
  69. crawlo/utils/db_helper.py +343 -343
  70. crawlo/utils/func_tools.py +82 -82
  71. crawlo/utils/log.py +128 -128
  72. crawlo/utils/pqueue.py +173 -173
  73. crawlo/utils/request.py +267 -267
  74. crawlo/utils/spider_loader.py +62 -62
  75. crawlo/utils/system.py +11 -11
  76. crawlo/utils/tools.py +4 -4
  77. crawlo/utils/url.py +39 -39
  78. crawlo-1.1.1.dist-info/METADATA +220 -0
  79. crawlo-1.1.1.dist-info/RECORD +100 -0
  80. examples/__init__.py +7 -0
  81. examples/baidu_spider/__init__.py +7 -0
  82. examples/baidu_spider/demo.py +94 -0
  83. examples/baidu_spider/items.py +46 -0
  84. examples/baidu_spider/middleware.py +49 -0
  85. examples/baidu_spider/pipeline.py +55 -0
  86. examples/baidu_spider/run.py +27 -0
  87. examples/baidu_spider/settings.py +121 -0
  88. examples/baidu_spider/spiders/__init__.py +7 -0
  89. examples/baidu_spider/spiders/bai_du.py +61 -0
  90. examples/baidu_spider/spiders/miit.py +159 -0
  91. examples/baidu_spider/spiders/sina.py +79 -0
  92. tests/__init__.py +7 -7
  93. tests/test_proxy_health_check.py +32 -32
  94. tests/test_proxy_middleware_integration.py +136 -136
  95. tests/test_proxy_providers.py +56 -56
  96. tests/test_proxy_stats.py +19 -19
  97. tests/test_proxy_strategies.py +59 -59
  98. crawlo/utils/concurrency_manager.py +0 -125
  99. crawlo/utils/project.py +0 -197
  100. crawlo-1.0.9.dist-info/METADATA +0 -49
  101. crawlo-1.0.9.dist-info/RECORD +0 -97
  102. examples/gxb/__init__.py +0 -0
  103. examples/gxb/items.py +0 -36
  104. examples/gxb/run.py +0 -16
  105. examples/gxb/settings.py +0 -72
  106. examples/gxb/spider/__init__.py +0 -0
  107. examples/gxb/spider/miit_spider.py +0 -180
  108. examples/gxb/spider/telecom_device.py +0 -129
  109. {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
  110. {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
  111. {crawlo-1.0.9.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
crawlo/__init__.py CHANGED
@@ -1,25 +1,34 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Crawlo - 一个异步爬虫框架
5
- """
6
- from crawlo.spider import Spider
7
- from crawlo.items.items import Item
8
- from crawlo.network.request import Request
9
- from crawlo.network.response import Response
10
- from crawlo.downloader import DownloaderBase
11
- from crawlo.middleware import BaseMiddleware
12
-
13
- # 版本号
14
- from crawlo.__version__ import __version__
15
-
16
- # 可选:定义对外暴露的接口
17
- __all__ = [
18
- 'Spider',
19
- 'Item',
20
- 'Request',
21
- 'Response',
22
- 'DownloaderBase',
23
- 'BaseMiddleware',
24
- '__version__',
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Crawlo - 一个异步爬虫框架
5
+ """
6
+ from crawlo.spider import Spider
7
+ from crawlo.items.items import Item
8
+ from crawlo.network.request import Request
9
+ from crawlo.network.response import Response
10
+ from crawlo.downloader import DownloaderBase
11
+ from crawlo.middleware import BaseMiddleware
12
+
13
+
14
+ # 版本号:优先从元数据读取
15
+ try:
16
+ from importlib.metadata import version
17
+ __version__ = version("crawlo")
18
+ except Exception:
19
+ # 开发模式下可能未安装,回退到 __version__.py 或 dev
20
+ try:
21
+ from crawlo.__version__ import __version__
22
+ except ImportError:
23
+ __version__ = "dev"
24
+
25
+ # 定义对外 API
26
+ __all__ = [
27
+ 'Spider',
28
+ 'Item',
29
+ 'Request',
30
+ 'Response',
31
+ 'DownloaderBase',
32
+ 'BaseMiddleware',
33
+ '__version__',
25
34
  ]
crawlo/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.0.9"
1
+ __version__ = "1.1.1"
crawlo/cli.py CHANGED
@@ -1,41 +1,41 @@
1
- # crawlo/cli.py
2
- # !/usr/bin/python
3
- # -*- coding: UTF-8 -*-
4
- import sys
5
- import argparse
6
- from crawlo.commands import get_commands
7
-
8
-
9
- def main():
10
- # 获取所有可用命令
11
- commands = get_commands()
12
-
13
- parser = argparse.ArgumentParser(
14
- description="Crawlo: A lightweight web crawler framework.",
15
- usage="crawlo <command> [options]"
16
- )
17
- parser.add_argument('command', help='Available commands: ' + ', '.join(commands.keys()))
18
- # 注意:这里不添加具体参数,由子命令解析
19
-
20
- # 只解析命令
21
- args, unknown = parser.parse_known_args()
22
-
23
- if args.command not in commands:
24
- print(f"Unknown command: {args.command}")
25
- print(f"Available commands: {', '.join(commands.keys())}")
26
- sys.exit(1)
27
-
28
- # 动态导入并执行命令
29
- try:
30
- module = __import__(commands[args.command], fromlist=['main'])
31
- sys.exit(module.main(unknown))
32
- except ImportError as e:
33
- print(f"Failed to load command '{args.command}': {e}")
34
- sys.exit(1)
35
- except Exception as e:
36
- print(f"Command '{args.command}' failed: {e}")
37
- sys.exit(1)
38
-
39
-
40
- if __name__ == '__main__':
1
+ # crawlo/cli.py
2
+ # !/usr/bin/python
3
+ # -*- coding: UTF-8 -*-
4
+ import sys
5
+ import argparse
6
+ from crawlo.commands import get_commands
7
+
8
+
9
+ def main():
10
+ # 获取所有可用命令
11
+ commands = get_commands()
12
+
13
+ parser = argparse.ArgumentParser(
14
+ description="Crawlo: A lightweight web crawler framework.",
15
+ usage="crawlo <command> [options]"
16
+ )
17
+ parser.add_argument('command', help='Available commands: ' + ', '.join(commands.keys()))
18
+ # 注意:这里不添加具体参数,由子命令解析
19
+
20
+ # 只解析命令
21
+ args, unknown = parser.parse_known_args()
22
+
23
+ if args.command not in commands:
24
+ print(f"Unknown command: {args.command}")
25
+ print(f"Available commands: {', '.join(commands.keys())}")
26
+ sys.exit(1)
27
+
28
+ # 动态导入并执行命令
29
+ try:
30
+ module = __import__(commands[args.command], fromlist=['main'])
31
+ sys.exit(module.main(unknown))
32
+ except ImportError as e:
33
+ print(f"Failed to load command '{args.command}': {e}")
34
+ sys.exit(1)
35
+ except Exception as e:
36
+ print(f"Command '{args.command}' failed: {e}")
37
+ sys.exit(1)
38
+
39
+
40
+ if __name__ == '__main__':
41
41
  main()
@@ -1,14 +1,14 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
-
4
- _commands = {
5
- 'startproject': 'crawlo.commands.startproject',
6
- 'genspider': 'crawlo.commands.genspider',
7
- 'run': 'crawlo.commands.run',
8
- 'check': 'crawlo.commands.check',
9
- 'list': 'crawlo.commands.list',
10
- 'stats': 'crawlo.commands.stats'
11
- }
12
-
13
- def get_commands():
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+
4
+ _commands = {
5
+ 'startproject': 'crawlo.commands.startproject',
6
+ 'genspider': 'crawlo.commands.genspider',
7
+ 'run': 'crawlo.commands.run',
8
+ 'check': 'crawlo.commands.check',
9
+ 'list': 'crawlo.commands.list',
10
+ 'stats': 'crawlo.commands.stats'
11
+ }
12
+
13
+ def get_commands():
14
14
  return _commands