crawlo 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (128) hide show
  1. crawlo/__init__.py +34 -33
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +152 -126
  7. crawlo/commands/list.py +156 -147
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -111
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +187 -0
  12. crawlo/config.py +280 -0
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -158
  15. crawlo/core/enhanced_engine.py +190 -0
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +166 -57
  18. crawlo/crawler.py +1028 -495
  19. crawlo/downloader/__init__.py +242 -78
  20. crawlo/downloader/aiohttp_downloader.py +212 -199
  21. crawlo/downloader/cffi_downloader.py +251 -241
  22. crawlo/downloader/httpx_downloader.py +259 -246
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +82 -78
  25. crawlo/extension/__init__.py +31 -31
  26. crawlo/extension/log_interval.py +49 -49
  27. crawlo/extension/log_stats.py +44 -44
  28. crawlo/extension/logging_extension.py +34 -34
  29. crawlo/filters/__init__.py +154 -37
  30. crawlo/filters/aioredis_filter.py +242 -150
  31. crawlo/filters/memory_filter.py +269 -202
  32. crawlo/items/__init__.py +23 -23
  33. crawlo/items/base.py +21 -21
  34. crawlo/items/fields.py +53 -53
  35. crawlo/items/items.py +104 -104
  36. crawlo/middleware/__init__.py +21 -21
  37. crawlo/middleware/default_header.py +32 -32
  38. crawlo/middleware/download_delay.py +28 -28
  39. crawlo/middleware/middleware_manager.py +135 -135
  40. crawlo/middleware/proxy.py +248 -245
  41. crawlo/middleware/request_ignore.py +30 -30
  42. crawlo/middleware/response_code.py +18 -18
  43. crawlo/middleware/response_filter.py +26 -26
  44. crawlo/middleware/retry.py +125 -90
  45. crawlo/mode_manager.py +201 -0
  46. crawlo/network/__init__.py +21 -7
  47. crawlo/network/request.py +311 -203
  48. crawlo/network/response.py +271 -166
  49. crawlo/pipelines/__init__.py +22 -13
  50. crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
  51. crawlo/pipelines/console_pipeline.py +39 -39
  52. crawlo/pipelines/csv_pipeline.py +317 -0
  53. crawlo/pipelines/database_dedup_pipeline.py +225 -0
  54. crawlo/pipelines/json_pipeline.py +219 -0
  55. crawlo/pipelines/memory_dedup_pipeline.py +116 -0
  56. crawlo/pipelines/mongo_pipeline.py +116 -116
  57. crawlo/pipelines/mysql_pipeline.py +195 -195
  58. crawlo/pipelines/pipeline_manager.py +56 -56
  59. crawlo/pipelines/redis_dedup_pipeline.py +163 -0
  60. crawlo/project.py +153 -153
  61. crawlo/queue/__init__.py +0 -0
  62. crawlo/queue/pqueue.py +37 -0
  63. crawlo/queue/queue_manager.py +308 -0
  64. crawlo/queue/redis_priority_queue.py +209 -0
  65. crawlo/settings/__init__.py +7 -7
  66. crawlo/settings/default_settings.py +245 -167
  67. crawlo/settings/setting_manager.py +99 -99
  68. crawlo/spider/__init__.py +639 -129
  69. crawlo/stats_collector.py +59 -59
  70. crawlo/subscriber.py +106 -106
  71. crawlo/task_manager.py +30 -27
  72. crawlo/templates/crawlo.cfg.tmpl +10 -10
  73. crawlo/templates/project/__init__.py.tmpl +3 -3
  74. crawlo/templates/project/items.py.tmpl +17 -17
  75. crawlo/templates/project/middlewares.py.tmpl +87 -76
  76. crawlo/templates/project/pipelines.py.tmpl +342 -64
  77. crawlo/templates/project/run.py.tmpl +252 -0
  78. crawlo/templates/project/settings.py.tmpl +251 -54
  79. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  80. crawlo/templates/spider/spider.py.tmpl +178 -32
  81. crawlo/utils/__init__.py +7 -7
  82. crawlo/utils/controlled_spider_mixin.py +440 -0
  83. crawlo/utils/date_tools.py +233 -233
  84. crawlo/utils/db_helper.py +343 -343
  85. crawlo/utils/func_tools.py +82 -82
  86. crawlo/utils/large_scale_config.py +287 -0
  87. crawlo/utils/large_scale_helper.py +344 -0
  88. crawlo/utils/log.py +128 -128
  89. crawlo/utils/queue_helper.py +176 -0
  90. crawlo/utils/request.py +267 -267
  91. crawlo/utils/request_serializer.py +220 -0
  92. crawlo/utils/spider_loader.py +62 -62
  93. crawlo/utils/system.py +11 -11
  94. crawlo/utils/tools.py +4 -4
  95. crawlo/utils/url.py +39 -39
  96. crawlo-1.1.3.dist-info/METADATA +635 -0
  97. crawlo-1.1.3.dist-info/RECORD +113 -0
  98. examples/__init__.py +7 -7
  99. examples/controlled_spider_example.py +205 -0
  100. tests/__init__.py +7 -7
  101. tests/test_final_validation.py +154 -0
  102. tests/test_proxy_health_check.py +32 -32
  103. tests/test_proxy_middleware_integration.py +136 -136
  104. tests/test_proxy_providers.py +56 -56
  105. tests/test_proxy_stats.py +19 -19
  106. tests/test_proxy_strategies.py +59 -59
  107. tests/test_redis_config.py +29 -0
  108. tests/test_redis_queue.py +225 -0
  109. tests/test_request_serialization.py +71 -0
  110. tests/test_scheduler.py +242 -0
  111. crawlo/pipelines/mysql_batch_pipline.py +0 -273
  112. crawlo/utils/pqueue.py +0 -174
  113. crawlo-1.1.1.dist-info/METADATA +0 -220
  114. crawlo-1.1.1.dist-info/RECORD +0 -100
  115. examples/baidu_spider/__init__.py +0 -7
  116. examples/baidu_spider/demo.py +0 -94
  117. examples/baidu_spider/items.py +0 -46
  118. examples/baidu_spider/middleware.py +0 -49
  119. examples/baidu_spider/pipeline.py +0 -55
  120. examples/baidu_spider/run.py +0 -27
  121. examples/baidu_spider/settings.py +0 -121
  122. examples/baidu_spider/spiders/__init__.py +0 -7
  123. examples/baidu_spider/spiders/bai_du.py +0 -61
  124. examples/baidu_spider/spiders/miit.py +0 -159
  125. examples/baidu_spider/spiders/sina.py +0 -79
  126. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
  127. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
  128. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
crawlo/__init__.py CHANGED
@@ -1,34 +1,35 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Crawlo - 一个异步爬虫框架
5
- """
6
- from crawlo.spider import Spider
7
- from crawlo.items.items import Item
8
- from crawlo.network.request import Request
9
- from crawlo.network.response import Response
10
- from crawlo.downloader import DownloaderBase
11
- from crawlo.middleware import BaseMiddleware
12
-
13
-
14
- # 版本号:优先从元数据读取
15
- try:
16
- from importlib.metadata import version
17
- __version__ = version("crawlo")
18
- except Exception:
19
- # 开发模式下可能未安装,回退到 __version__.py 或 dev
20
- try:
21
- from crawlo.__version__ import __version__
22
- except ImportError:
23
- __version__ = "dev"
24
-
25
- # 定义对外 API
26
- __all__ = [
27
- 'Spider',
28
- 'Item',
29
- 'Request',
30
- 'Response',
31
- 'DownloaderBase',
32
- 'BaseMiddleware',
33
- '__version__',
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Crawlo - 一个异步爬虫框架
5
+ """
6
+ from crawlo.spider import Spider
7
+ from crawlo.items import Item, Field
8
+ from crawlo.network.request import Request
9
+ from crawlo.network.response import Response
10
+ from crawlo.downloader import DownloaderBase
11
+ from crawlo.middleware import BaseMiddleware
12
+
13
+
14
+ # 版本号:优先从元数据读取
15
+ try:
16
+ from importlib.metadata import version
17
+ __version__ = version("crawlo")
18
+ except Exception:
19
+ # 开发模式下可能未安装,回退到 __version__.py 或 dev
20
+ try:
21
+ from crawlo.__version__ import __version__
22
+ except ImportError:
23
+ __version__ = "dev"
24
+
25
+ # 定义对外 API
26
+ __all__ = [
27
+ 'Spider',
28
+ 'Item',
29
+ 'Field',
30
+ 'Request',
31
+ 'Response',
32
+ 'DownloaderBase',
33
+ 'BaseMiddleware',
34
+ '__version__',
34
35
  ]
crawlo/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.1.1"
1
+ __version__ = "1.1.3"
crawlo/cli.py CHANGED
@@ -1,41 +1,41 @@
1
- # crawlo/cli.py
2
- # !/usr/bin/python
3
- # -*- coding: UTF-8 -*-
4
- import sys
5
- import argparse
6
- from crawlo.commands import get_commands
7
-
8
-
9
- def main():
10
- # 获取所有可用命令
11
- commands = get_commands()
12
-
13
- parser = argparse.ArgumentParser(
14
- description="Crawlo: A lightweight web crawler framework.",
15
- usage="crawlo <command> [options]"
16
- )
17
- parser.add_argument('command', help='Available commands: ' + ', '.join(commands.keys()))
18
- # 注意:这里不添加具体参数,由子命令解析
19
-
20
- # 只解析命令
21
- args, unknown = parser.parse_known_args()
22
-
23
- if args.command not in commands:
24
- print(f"Unknown command: {args.command}")
25
- print(f"Available commands: {', '.join(commands.keys())}")
26
- sys.exit(1)
27
-
28
- # 动态导入并执行命令
29
- try:
30
- module = __import__(commands[args.command], fromlist=['main'])
31
- sys.exit(module.main(unknown))
32
- except ImportError as e:
33
- print(f"Failed to load command '{args.command}': {e}")
34
- sys.exit(1)
35
- except Exception as e:
36
- print(f"Command '{args.command}' failed: {e}")
37
- sys.exit(1)
38
-
39
-
40
- if __name__ == '__main__':
1
+ # crawlo/cli.py
2
+ # !/usr/bin/python
3
+ # -*- coding: UTF-8 -*-
4
+ import sys
5
+ import argparse
6
+ from crawlo.commands import get_commands
7
+
8
+
9
+ def main():
10
+ # 获取所有可用命令
11
+ commands = get_commands()
12
+
13
+ parser = argparse.ArgumentParser(
14
+ description="Crawlo: A lightweight web crawler framework.",
15
+ usage="crawlo <command> [options]"
16
+ )
17
+ parser.add_argument('command', help='Available commands: ' + ', '.join(commands.keys()))
18
+ # 注意:这里不添加具体参数,由子命令解析
19
+
20
+ # 只解析命令
21
+ args, unknown = parser.parse_known_args()
22
+
23
+ if args.command not in commands:
24
+ print(f"Unknown command: {args.command}")
25
+ print(f"Available commands: {', '.join(commands.keys())}")
26
+ sys.exit(1)
27
+
28
+ # 动态导入并执行命令
29
+ try:
30
+ module = __import__(commands[args.command], fromlist=['main'])
31
+ sys.exit(module.main(unknown))
32
+ except ImportError as e:
33
+ print(f"Failed to load command '{args.command}': {e}")
34
+ sys.exit(1)
35
+ except Exception as e:
36
+ print(f"Command '{args.command}' failed: {e}")
37
+ sys.exit(1)
38
+
39
+
40
+ if __name__ == '__main__':
41
41
  main()
@@ -1,14 +1,14 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
-
4
- _commands = {
5
- 'startproject': 'crawlo.commands.startproject',
6
- 'genspider': 'crawlo.commands.genspider',
7
- 'run': 'crawlo.commands.run',
8
- 'check': 'crawlo.commands.check',
9
- 'list': 'crawlo.commands.list',
10
- 'stats': 'crawlo.commands.stats'
11
- }
12
-
13
- def get_commands():
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+
4
+ _commands = {
5
+ 'startproject': 'crawlo.commands.startproject',
6
+ 'genspider': 'crawlo.commands.genspider',
7
+ 'run': 'crawlo.commands.run',
8
+ 'check': 'crawlo.commands.check',
9
+ 'list': 'crawlo.commands.list',
10
+ 'stats': 'crawlo.commands.stats'
11
+ }
12
+
13
+ def get_commands():
14
14
  return _commands