crawlo 1.4.3__tar.gz → 1.4.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (341) hide show
  1. {crawlo-1.4.3/crawlo.egg-info → crawlo-1.4.4}/PKG-INFO +1 -1
  2. crawlo-1.4.4/crawlo/__version__.py +1 -0
  3. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/genspider.py +52 -17
  4. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/crawler.py +6 -0
  5. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/queue/pqueue.py +2 -6
  6. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/queue/queue_manager.py +1 -2
  7. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/settings/default_settings.py +11 -30
  8. crawlo-1.4.4/crawlo/templates/project/settings.py.tmpl +157 -0
  9. crawlo-1.4.4/crawlo/templates/project/settings_distributed.py.tmpl +162 -0
  10. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/settings_gentle.py.tmpl +45 -40
  11. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/settings_high_performance.py.tmpl +45 -40
  12. crawlo-1.4.4/crawlo/templates/project/settings_minimal.py.tmpl +77 -0
  13. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/settings_simple.py.tmpl +45 -40
  14. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/run.py.tmpl +3 -7
  15. {crawlo-1.4.3 → crawlo-1.4.4/crawlo.egg-info}/PKG-INFO +1 -1
  16. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo.egg-info/SOURCES.txt +3 -9
  17. crawlo-1.4.4/tests/test_multi_directory.py +68 -0
  18. crawlo-1.4.4/tests/test_multiple_spider_modules.py +81 -0
  19. crawlo-1.4.4/tests/test_spider_modules.py +85 -0
  20. crawlo-1.4.3/crawlo/__version__.py +0 -1
  21. crawlo-1.4.3/crawlo/templates/project/settings.py.tmpl +0 -171
  22. crawlo-1.4.3/crawlo/templates/project/settings_distributed.py.tmpl +0 -170
  23. crawlo-1.4.3/crawlo/templates/project/settings_minimal.py.tmpl +0 -66
  24. crawlo-1.4.3/examples/test_project/__init__.py +0 -7
  25. crawlo-1.4.3/examples/test_project/run.py +0 -35
  26. crawlo-1.4.3/examples/test_project/test_project/__init__.py +0 -4
  27. crawlo-1.4.3/examples/test_project/test_project/items.py +0 -18
  28. crawlo-1.4.3/examples/test_project/test_project/middlewares.py +0 -119
  29. crawlo-1.4.3/examples/test_project/test_project/pipelines.py +0 -97
  30. crawlo-1.4.3/examples/test_project/test_project/settings.py +0 -170
  31. crawlo-1.4.3/examples/test_project/test_project/spiders/__init__.py +0 -10
  32. crawlo-1.4.3/examples/test_project/test_project/spiders/of_week_dis.py +0 -144
  33. {crawlo-1.4.3 → crawlo-1.4.4}/LICENSE +0 -0
  34. {crawlo-1.4.3 → crawlo-1.4.4}/MANIFEST.in +0 -0
  35. {crawlo-1.4.3 → crawlo-1.4.4}/README.md +0 -0
  36. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/__init__.py +0 -0
  37. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/cli.py +0 -0
  38. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/__init__.py +0 -0
  39. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/check.py +0 -0
  40. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/help.py +0 -0
  41. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/list.py +0 -0
  42. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/run.py +0 -0
  43. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/startproject.py +0 -0
  44. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/stats.py +0 -0
  45. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/commands/utils.py +0 -0
  46. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/config.py +0 -0
  47. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/config_validator.py +0 -0
  48. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/core/__init__.py +0 -0
  49. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/core/engine.py +0 -0
  50. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/core/processor.py +0 -0
  51. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/core/scheduler.py +0 -0
  52. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/data/__init__.py +0 -0
  53. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/data/user_agents.py +0 -0
  54. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/downloader/__init__.py +0 -0
  55. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/downloader/aiohttp_downloader.py +0 -0
  56. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/downloader/cffi_downloader.py +0 -0
  57. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/downloader/httpx_downloader.py +0 -0
  58. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/downloader/hybrid_downloader.py +0 -0
  59. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/downloader/playwright_downloader.py +0 -0
  60. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/downloader/selenium_downloader.py +0 -0
  61. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/event.py +0 -0
  62. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/exceptions.py +0 -0
  63. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/__init__.py +0 -0
  64. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/health_check.py +0 -0
  65. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/log_interval.py +0 -0
  66. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/log_stats.py +0 -0
  67. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/logging_extension.py +0 -0
  68. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/memory_monitor.py +0 -0
  69. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/performance_profiler.py +0 -0
  70. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/extension/request_recorder.py +0 -0
  71. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/factories/__init__.py +0 -0
  72. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/factories/base.py +0 -0
  73. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/factories/crawler.py +0 -0
  74. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/factories/registry.py +0 -0
  75. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/filters/__init__.py +0 -0
  76. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/filters/aioredis_filter.py +0 -0
  77. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/filters/memory_filter.py +0 -0
  78. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/framework.py +0 -0
  79. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/initialization/__init__.py +0 -0
  80. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/initialization/built_in.py +0 -0
  81. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/initialization/context.py +0 -0
  82. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/initialization/core.py +0 -0
  83. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/initialization/phases.py +0 -0
  84. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/initialization/registry.py +0 -0
  85. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/items/__init__.py +0 -0
  86. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/items/base.py +0 -0
  87. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/items/fields.py +0 -0
  88. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/items/items.py +0 -0
  89. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/logging/__init__.py +0 -0
  90. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/logging/async_handler.py +0 -0
  91. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/logging/config.py +0 -0
  92. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/logging/factory.py +0 -0
  93. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/logging/manager.py +0 -0
  94. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/logging/monitor.py +0 -0
  95. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/logging/sampler.py +0 -0
  96. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/__init__.py +0 -0
  97. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/default_header.py +0 -0
  98. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/download_delay.py +0 -0
  99. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/middleware_manager.py +0 -0
  100. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/offsite.py +0 -0
  101. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/proxy.py +0 -0
  102. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/request_ignore.py +0 -0
  103. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/response_code.py +0 -0
  104. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/response_filter.py +0 -0
  105. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/retry.py +0 -0
  106. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/middleware/simple_proxy.py +0 -0
  107. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/mode_manager.py +0 -0
  108. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/network/__init__.py +0 -0
  109. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/network/request.py +0 -0
  110. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/network/response.py +0 -0
  111. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/__init__.py +0 -0
  112. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/bloom_dedup_pipeline.py +0 -0
  113. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/console_pipeline.py +0 -0
  114. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/csv_pipeline.py +0 -0
  115. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/database_dedup_pipeline.py +0 -0
  116. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/json_pipeline.py +0 -0
  117. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/memory_dedup_pipeline.py +0 -0
  118. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/mongo_pipeline.py +0 -0
  119. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/mysql_pipeline.py +0 -0
  120. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/pipeline_manager.py +0 -0
  121. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/pipelines/redis_dedup_pipeline.py +0 -0
  122. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/project.py +0 -0
  123. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/queue/__init__.py +0 -0
  124. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/queue/redis_priority_queue.py +0 -0
  125. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/settings/__init__.py +0 -0
  126. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/settings/setting_manager.py +0 -0
  127. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/spider/__init__.py +0 -0
  128. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/stats_collector.py +0 -0
  129. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/subscriber.py +0 -0
  130. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/task_manager.py +0 -0
  131. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/crawlo.cfg.tmpl +0 -0
  132. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/__init__.py.tmpl +0 -0
  133. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/items.py.tmpl +0 -0
  134. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/middlewares.py.tmpl +0 -0
  135. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/pipelines.py.tmpl +0 -0
  136. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/project/spiders/__init__.py.tmpl +0 -0
  137. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/spider/spider.py.tmpl +0 -0
  138. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/templates/spiders_init.py.tmpl +0 -0
  139. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/__init__.py +0 -0
  140. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/anti_crawler.py +0 -0
  141. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/authenticated_proxy.py +0 -0
  142. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/data_formatter.py +0 -0
  143. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/data_validator.py +0 -0
  144. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/date_tools.py +0 -0
  145. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/distributed_coordinator.py +0 -0
  146. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/encoding_converter.py +0 -0
  147. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/network_diagnostic.py +0 -0
  148. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/request_tools.py +0 -0
  149. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/retry_mechanism.py +0 -0
  150. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/scenario_adapter.py +0 -0
  151. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/tools/text_cleaner.py +0 -0
  152. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/__init__.py +0 -0
  153. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/batch_processor.py +0 -0
  154. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/class_loader.py +0 -0
  155. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/controlled_spider_mixin.py +0 -0
  156. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/db_helper.py +0 -0
  157. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/enhanced_error_handler.py +0 -0
  158. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/env_config.py +0 -0
  159. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/error_handler.py +0 -0
  160. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/fingerprint.py +0 -0
  161. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/func_tools.py +0 -0
  162. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/large_scale_config.py +0 -0
  163. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/large_scale_helper.py +0 -0
  164. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/log.py +0 -0
  165. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/performance_monitor.py +0 -0
  166. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/queue_helper.py +0 -0
  167. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/redis_connection_pool.py +0 -0
  168. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/redis_key_validator.py +0 -0
  169. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/request.py +0 -0
  170. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/request_serializer.py +0 -0
  171. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/spider_loader.py +0 -0
  172. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/system.py +0 -0
  173. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/tools.py +0 -0
  174. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo/utils/url.py +0 -0
  175. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo.egg-info/dependency_links.txt +0 -0
  176. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo.egg-info/entry_points.txt +0 -0
  177. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo.egg-info/requires.txt +0 -0
  178. {crawlo-1.4.3 → crawlo-1.4.4}/crawlo.egg-info/top_level.txt +0 -0
  179. {crawlo-1.4.3 → crawlo-1.4.4}/examples/__init__.py +0 -0
  180. {crawlo-1.4.3 → crawlo-1.4.4}/pyproject.toml +0 -0
  181. {crawlo-1.4.3 → crawlo-1.4.4}/requirements.txt +0 -0
  182. {crawlo-1.4.3 → crawlo-1.4.4}/setup.cfg +0 -0
  183. {crawlo-1.4.3 → crawlo-1.4.4}/tests/__init__.py +0 -0
  184. {crawlo-1.4.3 → crawlo-1.4.4}/tests/advanced_tools_example.py +0 -0
  185. {crawlo-1.4.3 → crawlo-1.4.4}/tests/authenticated_proxy_example.py +0 -0
  186. {crawlo-1.4.3 → crawlo-1.4.4}/tests/baidu_performance_test.py +0 -0
  187. {crawlo-1.4.3 → crawlo-1.4.4}/tests/baidu_test.py +0 -0
  188. {crawlo-1.4.3 → crawlo-1.4.4}/tests/cleaners_example.py +0 -0
  189. {crawlo-1.4.3 → crawlo-1.4.4}/tests/comprehensive_framework_test.py +0 -0
  190. {crawlo-1.4.3 → crawlo-1.4.4}/tests/comprehensive_test.py +0 -0
  191. {crawlo-1.4.3 → crawlo-1.4.4}/tests/comprehensive_testing_summary.md +0 -0
  192. {crawlo-1.4.3 → crawlo-1.4.4}/tests/config_validation_demo.py +0 -0
  193. {crawlo-1.4.3 → crawlo-1.4.4}/tests/controlled_spider_example.py +0 -0
  194. {crawlo-1.4.3 → crawlo-1.4.4}/tests/date_tools_example.py +0 -0
  195. {crawlo-1.4.3 → crawlo-1.4.4}/tests/debug_configure.py +0 -0
  196. {crawlo-1.4.3 → crawlo-1.4.4}/tests/debug_framework_logger.py +0 -0
  197. {crawlo-1.4.3 → crawlo-1.4.4}/tests/debug_log_config.py +0 -0
  198. {crawlo-1.4.3 → crawlo-1.4.4}/tests/debug_log_levels.py +0 -0
  199. {crawlo-1.4.3 → crawlo-1.4.4}/tests/debug_pipelines.py +0 -0
  200. {crawlo-1.4.3 → crawlo-1.4.4}/tests/detailed_log_test.py +0 -0
  201. {crawlo-1.4.3 → crawlo-1.4.4}/tests/distributed_test.py +0 -0
  202. {crawlo-1.4.3 → crawlo-1.4.4}/tests/distributed_test_debug.py +0 -0
  203. {crawlo-1.4.3 → crawlo-1.4.4}/tests/dynamic_loading_example.py +0 -0
  204. {crawlo-1.4.3 → crawlo-1.4.4}/tests/dynamic_loading_test.py +0 -0
  205. {crawlo-1.4.3 → crawlo-1.4.4}/tests/env_config_example.py +0 -0
  206. {crawlo-1.4.3 → crawlo-1.4.4}/tests/error_handling_example.py +0 -0
  207. {crawlo-1.4.3 → crawlo-1.4.4}/tests/final_command_test_report.md +0 -0
  208. {crawlo-1.4.3 → crawlo-1.4.4}/tests/final_comprehensive_test.py +0 -0
  209. {crawlo-1.4.3 → crawlo-1.4.4}/tests/final_log_test.py +0 -0
  210. {crawlo-1.4.3 → crawlo-1.4.4}/tests/final_validation_test.py +0 -0
  211. {crawlo-1.4.3 → crawlo-1.4.4}/tests/fix_log_test.py +0 -0
  212. {crawlo-1.4.3 → crawlo-1.4.4}/tests/framework_performance_test.py +0 -0
  213. {crawlo-1.4.3 → crawlo-1.4.4}/tests/log_buffering_test.py +0 -0
  214. {crawlo-1.4.3 → crawlo-1.4.4}/tests/log_generation_timing_test.py +0 -0
  215. {crawlo-1.4.3 → crawlo-1.4.4}/tests/optimized_performance_test.py +0 -0
  216. {crawlo-1.4.3 → crawlo-1.4.4}/tests/performance_comparison.py +0 -0
  217. {crawlo-1.4.3 → crawlo-1.4.4}/tests/queue_blocking_test.py +0 -0
  218. {crawlo-1.4.3 → crawlo-1.4.4}/tests/queue_test.py +0 -0
  219. {crawlo-1.4.3 → crawlo-1.4.4}/tests/redis_key_validation_demo.py +0 -0
  220. {crawlo-1.4.3 → crawlo-1.4.4}/tests/request_params_example.py +0 -0
  221. {crawlo-1.4.3 → crawlo-1.4.4}/tests/response_improvements_example.py +0 -0
  222. {crawlo-1.4.3 → crawlo-1.4.4}/tests/scrapy_comparison/ofweek_scrapy.py +0 -0
  223. {crawlo-1.4.3 → crawlo-1.4.4}/tests/scrapy_comparison/scrapy_test.py +0 -0
  224. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_command_test.py +0 -0
  225. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_crawlo_test.py +0 -0
  226. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_log_test.py +0 -0
  227. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_log_test2.py +0 -0
  228. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_optimization_test.py +0 -0
  229. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_queue_type_test.py +0 -0
  230. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_spider_test.py +0 -0
  231. {crawlo-1.4.3 → crawlo-1.4.4}/tests/simple_test.py +0 -0
  232. {crawlo-1.4.3 → crawlo-1.4.4}/tests/spider_log_timing_test.py +0 -0
  233. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_advanced_tools.py +0 -0
  234. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_all_commands.py +0 -0
  235. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_all_pipeline_fingerprints.py +0 -0
  236. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_all_redis_key_configs.py +0 -0
  237. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_authenticated_proxy.py +0 -0
  238. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_batch_processor.py +0 -0
  239. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_cleaners.py +0 -0
  240. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_component_factory.py +0 -0
  241. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_comprehensive.py +0 -0
  242. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_config_consistency.py +0 -0
  243. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_config_merge.py +0 -0
  244. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_config_validator.py +0 -0
  245. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_controlled_spider_mixin.py +0 -0
  246. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_crawlo_proxy_integration.py +0 -0
  247. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_date_tools.py +0 -0
  248. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_dedup_fix.py +0 -0
  249. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_dedup_pipeline_consistency.py +0 -0
  250. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_default_header_middleware.py +0 -0
  251. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_distributed.py +0 -0
  252. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_double_crawlo_fix.py +0 -0
  253. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_double_crawlo_fix_simple.py +0 -0
  254. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_download_delay_middleware.py +0 -0
  255. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_downloader_proxy_compatibility.py +0 -0
  256. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_dynamic_downloaders_proxy.py +0 -0
  257. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_dynamic_proxy.py +0 -0
  258. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_dynamic_proxy_config.py +0 -0
  259. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_dynamic_proxy_real.py +0 -0
  260. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_edge_cases.py +0 -0
  261. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_enhanced_error_handler.py +0 -0
  262. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_enhanced_error_handler_comprehensive.py +0 -0
  263. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_env_config.py +0 -0
  264. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_error_handler_compatibility.py +0 -0
  265. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_factories.py +0 -0
  266. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_final_validation.py +0 -0
  267. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_fingerprint_consistency.py +0 -0
  268. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_fingerprint_simple.py +0 -0
  269. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_framework_env_usage.py +0 -0
  270. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_framework_logger.py +0 -0
  271. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_framework_startup.py +0 -0
  272. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_get_component_logger.py +0 -0
  273. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_hash_performance.py +0 -0
  274. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_integration.py +0 -0
  275. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_item_dedup_redis_key.py +0 -0
  276. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_large_scale_config.py +0 -0
  277. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_large_scale_helper.py +0 -0
  278. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_logging_enhancements.py +0 -0
  279. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_logging_final.py +0 -0
  280. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_logging_integration.py +0 -0
  281. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_logging_system.py +0 -0
  282. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_middleware_debug.py +0 -0
  283. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_mode_change.py +0 -0
  284. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_mode_consistency.py +0 -0
  285. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_offsite_middleware.py +0 -0
  286. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_offsite_middleware_simple.py +0 -0
  287. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_parsel.py +0 -0
  288. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_performance.py +0 -0
  289. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_performance_monitor.py +0 -0
  290. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_pipeline_fingerprint_consistency.py +0 -0
  291. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_priority_behavior.py +0 -0
  292. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_priority_consistency.py +0 -0
  293. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_priority_consistency_fixed.py +0 -0
  294. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_api.py +0 -0
  295. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_health_check.py +0 -0
  296. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_middleware.py +0 -0
  297. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_middleware_enhanced.py +0 -0
  298. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_middleware_integration.py +0 -0
  299. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_middleware_refactored.py +0 -0
  300. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_providers.py +0 -0
  301. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_stats.py +0 -0
  302. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_proxy_strategies.py +0 -0
  303. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_queue_empty_check.py +0 -0
  304. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_queue_manager_double_crawlo.py +0 -0
  305. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_queue_manager_redis_key.py +0 -0
  306. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_queue_naming.py +0 -0
  307. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_queue_type.py +0 -0
  308. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_queue_type_redis_config_consistency.py +0 -0
  309. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_random_headers_default.py +0 -0
  310. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_random_headers_necessity.py +0 -0
  311. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_random_user_agent.py +0 -0
  312. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_real_scenario_proxy.py +0 -0
  313. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_redis_config.py +0 -0
  314. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_redis_connection_pool.py +0 -0
  315. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_redis_key_naming.py +0 -0
  316. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_redis_key_validator.py +0 -0
  317. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_redis_queue.py +0 -0
  318. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_redis_queue_name_fix.py +0 -0
  319. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_redis_queue_type_fallback.py +0 -0
  320. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_request_ignore_middleware.py +0 -0
  321. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_request_params.py +0 -0
  322. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_request_serialization.py +0 -0
  323. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_response_code_middleware.py +0 -0
  324. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_response_filter_middleware.py +0 -0
  325. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_response_improvements.py +0 -0
  326. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_retry_middleware.py +0 -0
  327. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_retry_middleware_realistic.py +0 -0
  328. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_scheduler.py +0 -0
  329. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_scheduler_config_update.py +0 -0
  330. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_simple_response.py +0 -0
  331. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_telecom_spider_redis_key.py +0 -0
  332. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_template_content.py +0 -0
  333. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_template_redis_key.py +0 -0
  334. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_tools.py +0 -0
  335. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_user_agent_randomness.py +0 -0
  336. {crawlo-1.4.3 → crawlo-1.4.4}/tests/test_user_agents.py +0 -0
  337. {crawlo-1.4.3 → crawlo-1.4.4}/tests/tools_example.py +0 -0
  338. {crawlo-1.4.3 → crawlo-1.4.4}/tests/untested_features_report.md +0 -0
  339. {crawlo-1.4.3 → crawlo-1.4.4}/tests/verify_debug.py +0 -0
  340. {crawlo-1.4.3 → crawlo-1.4.4}/tests/verify_distributed.py +0 -0
  341. {crawlo-1.4.3 → crawlo-1.4.4}/tests/verify_log_fix.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: crawlo
3
- Version: 1.4.3
3
+ Version: 1.4.4
4
4
  Summary: Crawlo 是一款基于异步IO的高性能Python爬虫框架,支持分布式抓取。
5
5
  Home-page: https://github.com/crawl-coder/Crawlo.git
6
6
  Author: crawl-coder
@@ -0,0 +1 @@
1
+ __version__ = '1.4.4'
@@ -6,15 +6,16 @@
6
6
  # @Desc : 命令行入口:crawlo genspider baidu,创建爬虫。
7
7
  """
8
8
  import sys
9
+ import re
9
10
  from pathlib import Path
10
11
  import configparser
11
12
  import importlib
12
13
  from rich.console import Console
13
14
 
14
15
  from .utils import (
15
- get_project_root,
16
- validate_project_environment,
17
- show_error_panel,
16
+ get_project_root,
17
+ validate_project_environment,
18
+ show_error_panel,
18
19
  show_success_panel,
19
20
  validate_spider_name,
20
21
  is_valid_domain
@@ -35,6 +36,39 @@ def _render_template(tmpl_path, context):
35
36
  return content
36
37
 
37
38
 
39
+ def generate_class_name(spider_name):
40
+ """
41
+ 根据爬虫名称生成类名
42
+ 规则:蛇形命名 → 大驼峰命名 + 'Spider'
43
+ 示例:
44
+ 'news_spider' → 'NewsSpider'
45
+ 'ofweek_standalone' → 'OfweekStandaloneSpider'
46
+ 'baidu' → 'BaiduSpider'
47
+ """
48
+ # 如果名称已包含 'spider' 后缀,先去除
49
+ name_clean = spider_name
50
+
51
+ # 定义要移除的后缀列表
52
+ spider_suffixes = ['_spider', 'spider']
53
+
54
+ # 检查并移除后缀
55
+ for suffix in spider_suffixes:
56
+ if spider_name.endswith(suffix):
57
+ name_clean = spider_name[:-len(suffix)]
58
+ break
59
+
60
+ # 按分隔符拆分单词
61
+ words = re.split(r'[_-]', name_clean)
62
+
63
+ # 将每个单词首字母大写
64
+ capitalized_words = [word.capitalize() for word in words if word]
65
+
66
+ # 组合成类名
67
+ class_name = ''.join(capitalized_words) + 'Spider'
68
+
69
+ return class_name
70
+
71
+
38
72
  def main(args):
39
73
  if len(args) < 2:
40
74
  console.print("[bold red]错误:[/bold red] 用法: [blue]crawlo genspider[/blue] <爬虫名称> <域名>")
@@ -45,11 +79,11 @@ def main(args):
45
79
 
46
80
  spider_name = args[0]
47
81
  domain = args[1]
48
-
82
+
49
83
  # 验证爬虫名称
50
84
  if not validate_spider_name(spider_name):
51
85
  show_error_panel(
52
- "无效的爬虫名称",
86
+ "无效的爬虫名称",
53
87
  f"爬虫名称 '[cyan]{spider_name}[/cyan]' 无效。\n"
54
88
  "爬虫名称应:\n"
55
89
  " • 以小写字母开头\n"
@@ -57,11 +91,11 @@ def main(args):
57
91
  " • 是有效的Python标识符"
58
92
  )
59
93
  return 1
60
-
94
+
61
95
  # 验证域名格式
62
96
  if not is_valid_domain(domain):
63
97
  show_error_panel(
64
- "无效的域名",
98
+ "无效的域名",
65
99
  f"域名 '[cyan]{domain}[/cyan]' 格式无效。\n"
66
100
  "请提供有效的域名,如 'example.com'"
67
101
  )
@@ -72,7 +106,7 @@ def main(args):
72
106
  if not is_valid:
73
107
  show_error_panel("非Crawlo项目", error_msg)
74
108
  return 1
75
-
109
+
76
110
  project_root = get_project_root()
77
111
 
78
112
  # 确定 items 模块的路径
@@ -91,7 +125,8 @@ def main(args):
91
125
  if item_classes:
92
126
  default_item_class = item_classes[0].__name__
93
127
  else:
94
- console.print("[yellow]警告:[/yellow] 在 [cyan]items.py[/cyan] 中未找到项目类,使用 [green]ExampleItem[/green]。")
128
+ console.print(
129
+ "[yellow]警告:[/yellow] 在 [cyan]items.py[/cyan] 中未找到项目类,使用 [green]ExampleItem[/green]。")
95
130
 
96
131
  except ImportError as e:
97
132
  console.print(f"[yellow]警告:[/yellow] 导入 [cyan]{items_module_path}[/cyan] 失败: {e}")
@@ -104,7 +139,7 @@ def main(args):
104
139
  spider_file = spiders_dir / f'{spider_name}.py'
105
140
  if spider_file.exists():
106
141
  show_error_panel(
107
- "爬虫已存在",
142
+ "爬虫已存在",
108
143
  f"爬虫 '[cyan]{spider_name}[/cyan]' 已存在于\n[green]{spider_file}[/green]"
109
144
  )
110
145
  return 1
@@ -113,13 +148,13 @@ def main(args):
113
148
  tmpl_path = TEMPLATES_DIR / 'spider' / 'spider.py.tmpl'
114
149
  if not tmpl_path.exists():
115
150
  show_error_panel(
116
- "模板未找到",
151
+ "模板未找到",
117
152
  f"模板文件未找到于 [cyan]{tmpl_path}[/cyan]"
118
153
  )
119
154
  return 1
120
155
 
121
- # 生成类名
122
- class_name = f"{spider_name.replace('_', '').capitalize()}Spider"
156
+ # 生成类名(使用新的转换函数)
157
+ class_name = generate_class_name(spider_name)
123
158
 
124
159
  context = {
125
160
  'spider_name': spider_name,
@@ -133,7 +168,7 @@ def main(args):
133
168
  content = _render_template(tmpl_path, context)
134
169
  with open(spider_file, 'w', encoding='utf-8') as f:
135
170
  f.write(content)
136
-
171
+
137
172
  console.print(f"[green]爬虫 '[bold]{spider_name}[/bold]' 创建成功![/green]")
138
173
  console.print(f" → 位置: [cyan]{spider_file}[/cyan]")
139
174
  console.print(f" → 类名: [yellow]{class_name}[/yellow]")
@@ -141,12 +176,12 @@ def main(args):
141
176
  console.print("\n[bold]下一步操作:[/bold]")
142
177
  console.print(f" [blue]crawlo run[/blue] {spider_name}")
143
178
  console.print(f" [blue]crawlo check[/blue] {spider_name}")
144
-
179
+
145
180
  return 0
146
-
181
+
147
182
  except Exception as e:
148
183
  show_error_panel(
149
- "创建失败",
184
+ "创建失败",
150
185
  f"创建爬虫失败: {e}"
151
186
  )
152
187
  return 1
@@ -350,6 +350,12 @@ class CrawlerProcess:
350
350
  self._crawlers: List[ModernCrawler] = []
351
351
  self._semaphore = asyncio.Semaphore(max_concurrency)
352
352
  self._logger = get_logger('crawler.process')
353
+
354
+ # 如果没有显式提供spider_modules,则从settings中获取
355
+ if spider_modules is None and self._settings:
356
+ spider_modules = self._settings.get('SPIDER_MODULES')
357
+ self._logger.debug(f"从settings中获取SPIDER_MODULES: {spider_modules}")
358
+
353
359
  self._spider_modules = spider_modules # 保存spider_modules
354
360
 
355
361
  # 如果提供了spider_modules,自动注册这些模块中的爬虫
@@ -1,12 +1,8 @@
1
1
  # -*- coding:UTF-8 -*-
2
- import json
3
- import sys
4
2
  import asyncio
3
+ import sys
5
4
  from asyncio import PriorityQueue
6
- from typing import Optional, Tuple, Any
7
-
8
-
9
- from crawlo import Request
5
+ from typing import Optional, Any
10
6
 
11
7
 
12
8
  class SpiderPriorityQueue(PriorityQueue):
@@ -5,11 +5,10 @@
5
5
  提供简洁、一致的队列接口,自动处理不同队列类型的差异
6
6
  """
7
7
  import asyncio
8
+ import time
8
9
  import traceback
9
10
  from enum import Enum
10
11
  from typing import Optional, Dict, Any, Union, TYPE_CHECKING
11
- import time
12
- import random
13
12
 
14
13
  if TYPE_CHECKING:
15
14
  from crawlo import Request
@@ -6,9 +6,7 @@
6
6
  # 添加环境变量配置工具导入
7
7
  from crawlo.utils.env_config import get_redis_config, get_runtime_config, get_version
8
8
 
9
- # ===========================================================================
10
- # 1. 框架基础配置
11
- # ===========================================================================
9
+ # --------------------------------- 1. 框架基础配置 ------------------------------------
12
10
 
13
11
  # 框架初始化控制
14
12
  FRAMEWORK_INIT_ORDER = [
@@ -27,9 +25,7 @@ VERSION = get_version() # 项目版本号 - 从框架的__version__.py文件中
27
25
  RUN_MODE = runtime_config['CRAWLO_MODE'] # 运行模式:standalone/distributed/auto
28
26
  CONCURRENCY = runtime_config['CONCURRENCY'] # 并发数配置
29
27
 
30
- # ===========================================================================
31
- # 2. 爬虫核心配置
32
- # ===========================================================================
28
+ # --------------------------------- 2. 爬虫核心配置 ------------------------------------
33
29
 
34
30
  # 下载器配置
35
31
  DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # 默认下载器
@@ -53,9 +49,7 @@ QUEUE_TYPE = 'auto' # 队列类型:memory/redis/auto
53
49
  QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
54
50
  QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
55
51
 
56
- # ===========================================================================
57
- # 3. 数据库和过滤器配置
58
- # ===========================================================================
52
+ # --------------------------------- 3. 数据库和过滤器配置 ------------------------------------
59
53
 
60
54
  # MySQL配置
61
55
  MYSQL_HOST = '127.0.0.1'
@@ -100,9 +94,7 @@ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
100
94
  BLOOM_FILTER_CAPACITY = 1000000 # Bloom过滤器容量
101
95
  BLOOM_FILTER_ERROR_RATE = 0.001 # Bloom过滤器错误率
102
96
 
103
- # ===========================================================================
104
- # 4. 中间件配置
105
- # ===========================================================================
97
+ # --------------------------------- 4. 中间件配置 ------------------------------------
106
98
 
107
99
  # 框架中间件列表(框架默认中间件 + 用户自定义中间件)
108
100
  MIDDLEWARES = [
@@ -118,18 +110,14 @@ MIDDLEWARES = [
118
110
  'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
119
111
  ]
120
112
 
121
- # ===========================================================================
122
- # 5. 管道配置
123
- # ===========================================================================
113
+ # --------------------------------- 5. 管道配置 ------------------------------------
124
114
 
125
115
  # 框架数据处理管道列表(框架默认管道 + 用户自定义管道)
126
116
  PIPELINES = [
127
117
  'crawlo.pipelines.console_pipeline.ConsolePipeline',
128
118
  ]
129
119
 
130
- # ===========================================================================
131
- # 6. 扩展配置
132
- # ===========================================================================
120
+ # --------------------------------- 6. 扩展配置 ------------------------------------
133
121
 
134
122
  # 框架扩展组件列表(框架默认扩展 + 用户自定义扩展)
135
123
  EXTENSIONS = [
@@ -138,9 +126,7 @@ EXTENSIONS = [
138
126
  'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
139
127
  ]
140
128
 
141
- # ===========================================================================
142
- # 7. 日志与监控配置
143
- # ===========================================================================
129
+ # --------------------------------- 7. 日志与监控配置 ------------------------------------
144
130
 
145
131
  # 日志配置
146
132
  LOG_LEVEL = None # 日志级别: DEBUG/INFO/WARNING/ERROR,默认为None,由用户在项目settings中设置
@@ -148,13 +134,12 @@ STATS_DUMP = True # 是否周期性输出统计信息
148
134
  LOG_FILE = None # 日志文件路径,将在项目配置中设置
149
135
  LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
150
136
  LOG_ENCODING = 'utf-8'
137
+ LOG_MAX_BYTES = 10 * 1024 * 1024 # 日志轮转大小(字节)
138
+ LOG_BACKUP_COUNT = 5 # 日志备份数量
151
139
 
152
140
  # 日志间隔配置
153
141
  INTERVAL = 60 # 日志输出间隔(秒)
154
142
 
155
- # 自定义日志配置
156
- LOG_ENABLE_CUSTOM = False # 是否启用自定义日志
157
-
158
143
  # 内存监控配置
159
144
  MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
160
145
  MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
@@ -169,9 +154,7 @@ PERFORMANCE_PROFILER_INTERVAL = 300 # 性能分析间隔(秒)
169
154
  # 健康检查配置
170
155
  HEALTH_CHECK_ENABLED = True # 是否启用健康检查
171
156
 
172
- # ===========================================================================
173
- # 8. 网络请求配置
174
- # ===========================================================================
157
+ # --------------------------------- 8. 网络请求配置 ------------------------------------
175
158
 
176
159
  # 默认请求头配置
177
160
  DEFAULT_REQUEST_HEADERS = {
@@ -262,9 +245,7 @@ PLAYWRIGHT_MAX_PAGES_PER_BROWSER = 10 # 单浏览器最大页面数量
262
245
  CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
263
246
  CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
264
247
 
265
- # ===========================================================================
266
- # 9. 数据存储配置
267
- # ===========================================================================
248
+ # --------------------------------- 9. 数据存储配置 ------------------------------------
268
249
 
269
250
  # CSV管道配置
270
251
  CSV_DELIMITER = ',' # CSV分隔符
@@ -0,0 +1,157 @@
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}} 项目配置文件
4
+ =============================
5
+ 基于 Crawlo 框架的爬虫项目配置。
6
+
7
+ 此配置使用 CrawloConfig.standalone() 工厂方法创建单机模式配置,
8
+ 适用于开发测试和中小规模数据采集任务。
9
+ """
10
+
11
+ from crawlo.config import CrawloConfig
12
+
13
+ # 使用单机模式配置工厂创建配置
14
+ config = CrawloConfig.auto(
15
+ project_name='{{project_name}}',
16
+ concurrency=8,
17
+ download_delay=1.0
18
+ )
19
+
20
+ # 将配置转换为当前模块的全局变量
21
+ locals().update(config.to_dict())
22
+
23
+ # =================================== 爬虫配置 ===================================
24
+
25
+ # 爬虫模块配置
26
+ SPIDER_MODULES = ['{{project_name}}.spiders']
27
+
28
+ # 默认请求头配置
29
+ # 为DefaultHeaderMiddleware配置默认请求头
30
+ # DEFAULT_REQUEST_HEADERS = {}
31
+
32
+ # 允许的域名
33
+ # 为OffsiteMiddleware配置允许的域名
34
+ # ALLOWED_DOMAINS = []
35
+
36
+ # 数据管道
37
+ # 如需添加自定义管道,请取消注释并添加
38
+ # PIPELINES = [
39
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(使用asyncmy异步库)
40
+ # # '{{project_name}}.pipelines.CustomPipeline', # 用户自定义管道示例
41
+ # ]
42
+
43
+ # =================================== 系统配置 ===================================
44
+
45
+ # 扩展组件
46
+ # 如需添加自定义扩展,请取消注释并添加
47
+ # EXTENSIONS = [
48
+ # # '{{project_name}}.extensions.CustomExtension', # 用户自定义扩展示例
49
+ # ]
50
+
51
+ # 中间件
52
+ # 如需添加自定义中间件,请取消注释并添加
53
+ # MIDDLEWARES = [
54
+ # # '{{project_name}}.middlewares.CustomMiddleware', # 用户自定义中间件示例
55
+ # ]
56
+
57
+ # 日志配置
58
+ LOG_LEVEL = 'INFO'
59
+ LOG_FILE = 'logs/{{project_name}}.log'
60
+ LOG_ENCODING = 'utf-8' # 明确指定日志文件编码
61
+ STATS_DUMP = True
62
+
63
+ # 输出配置
64
+ OUTPUT_DIR = 'output'
65
+
66
+ # =================================== 数据库配置 ===================================
67
+
68
+ # Redis配置
69
+ REDIS_HOST = '127.0.0.1'
70
+ REDIS_PORT = 6379
71
+ REDIS_PASSWORD = ''
72
+ REDIS_DB = 0
73
+
74
+ # 根据是否有密码生成 URL
75
+ if REDIS_PASSWORD:
76
+ REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
77
+ else:
78
+ REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
79
+
80
+ # MySQL配置
81
+ MYSQL_HOST = '127.0.0.1'
82
+ MYSQL_PORT = 3306
83
+ MYSQL_USER = 'root'
84
+ MYSQL_PASSWORD = '123456'
85
+ MYSQL_DB = '{{project_name}}'
86
+ MYSQL_TABLE = '{{project_name}}_data'
87
+ MYSQL_BATCH_SIZE = 100
88
+ MYSQL_USE_BATCH = False # 是否启用批量插入
89
+
90
+ # MongoDB配置
91
+ MONGO_URI = 'mongodb://localhost:27017'
92
+ MONGO_DATABASE = '{{project_name}}_db'
93
+ MONGO_COLLECTION = '{{project_name}}_items'
94
+ MONGO_MAX_POOL_SIZE = 200
95
+ MONGO_MIN_POOL_SIZE = 20
96
+ MONGO_BATCH_SIZE = 100 # 批量插入条数
97
+ MONGO_USE_BATCH = False # 是否启用批量插入
98
+
99
+ # =================================== 网络配置 ===================================
100
+
101
+ # 代理配置
102
+ # 代理功能默认不启用,如需使用请在项目配置文件中启用并配置相关参数
103
+ PROXY_ENABLED = False # 是否启用代理
104
+
105
+ # 简化版代理配置(适用于SimpleProxyMiddleware)
106
+ PROXY_LIST = [] # 代理列表,例如: ["http://proxy1:8080", "http://proxy2:8080"]
107
+
108
+ # 高级代理配置(适用于ProxyMiddleware)
109
+ PROXY_API_URL = "" # 代理获取接口(请替换为真实地址)
110
+
111
+ # 代理提取方式(支持字段路径或函数)
112
+ # 示例: "proxy" 适用于 {"proxy": "http://1.1.1.1:8080"}
113
+ # 示例: "data.proxy" 适用于 {"data": {"proxy": "http://1.1.1.1:8080"}}
114
+ PROXY_EXTRACTOR = "proxy"
115
+
116
+ # 代理刷新控制
117
+ PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
118
+ PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
119
+
120
+ # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
121
+ CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
122
+
123
+ # 自定义浏览器版本映射(可覆盖默认行为)
124
+ CURL_BROWSER_VERSION_MAP = {
125
+ "chrome": "chrome136",
126
+ "edge": "edge101",
127
+ "safari": "safari184",
128
+ "firefox": "firefox135",
129
+ }
130
+
131
+ # 下载器优化配置
132
+ # 下载器健康检查
133
+ DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
134
+ HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
135
+
136
+ # 请求统计配置
137
+ REQUEST_STATS_ENABLED = True # 是否启用请求统计
138
+ STATS_RESET_ON_START = False # 启动时是否重置统计
139
+
140
+ # HttpX 下载器专用配置
141
+ HTTPX_HTTP2 = True # 是否启用HTTP/2支持
142
+ HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
143
+
144
+ # AioHttp 下载器专用配置
145
+ AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
146
+ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
147
+
148
+ # 通用优化配置
149
+ CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
150
+ CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
151
+
152
+ # 内存监控配置
153
+ # 内存监控扩展默认不启用,如需使用请在项目配置文件中启用
154
+ MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
155
+ MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
156
+ MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
157
+ MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)
@@ -0,0 +1,162 @@
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ {{project_name}} 项目配置文件(分布式版)
4
+ =============================
5
+ 基于 Crawlo 框架的分布式爬虫项目配置。
6
+ 适合大规模数据采集和多节点部署。
7
+
8
+ 此配置使用 CrawloConfig.distributed() 工厂方法创建分布式模式配置,
9
+ 支持多节点协同工作,适用于大规模数据采集任务。
10
+ """
11
+
12
+ from crawlo.config import CrawloConfig
13
+
14
+ # 使用分布式模式配置工厂创建配置
15
+ config = CrawloConfig.distributed(
16
+ project_name='{{project_name}}',
17
+ redis_host='127.0.0.1',
18
+ redis_port=6379,
19
+ redis_password='',
20
+ redis_db=0,
21
+ concurrency=16,
22
+ download_delay=1.0
23
+ )
24
+
25
+ # 将配置转换为当前模块的全局变量
26
+ locals().update(config.to_dict())
27
+
28
+ # =================================== 爬虫配置 ===================================
29
+
30
+ # 爬虫模块配置
31
+ SPIDER_MODULES = ['{{project_name}}.spiders']
32
+
33
+ # 默认请求头配置
34
+ # 为DefaultHeaderMiddleware配置默认请求头
35
+ # DEFAULT_REQUEST_HEADERS = {}
36
+
37
+ # 允许的域名
38
+ # 为OffsiteMiddleware配置允许的域名
39
+ # ALLOWED_DOMAINS = []
40
+
41
+ # 数据管道
42
+ # 如需添加自定义管道,请取消注释并添加
43
+ # PIPELINES = [
44
+ # 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(使用asyncmy异步库)
45
+ # # '{{project_name}}.pipelines.CustomPipeline', # 用户自定义管道示例
46
+ # ]
47
+
48
+ # =================================== 系统配置 ===================================
49
+
50
+ # 扩展组件
51
+ # 如需添加自定义扩展,请取消注释并添加
52
+ # EXTENSIONS = [
53
+ # # '{{project_name}}.extensions.CustomExtension', # 用户自定义扩展示例
54
+ # ]
55
+
56
+ # 中间件
57
+ # 如需添加自定义中间件,请取消注释并添加
58
+ # MIDDLEWARES = [
59
+ # # '{{project_name}}.middlewares.CustomMiddleware', # 用户自定义中间件示例
60
+ # ]
61
+
62
+ # 日志配置
63
+ LOG_LEVEL = 'INFO'
64
+ LOG_FILE = 'logs/{{project_name}}.log'
65
+ LOG_ENCODING = 'utf-8' # 明确指定日志文件编码
66
+ STATS_DUMP = True
67
+
68
+ # 输出配置
69
+ OUTPUT_DIR = 'output'
70
+
71
+ # =================================== 数据库配置 ===================================
72
+
73
+ # Redis配置
74
+ REDIS_HOST = '127.0.0.1'
75
+ REDIS_PORT = 6379
76
+ REDIS_PASSWORD = ''
77
+ REDIS_DB = 0
78
+
79
+ # 根据是否有密码生成 URL
80
+ if REDIS_PASSWORD:
81
+ REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
82
+ else:
83
+ REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
84
+
85
+ # MySQL配置
86
+ MYSQL_HOST = '127.0.0.1'
87
+ MYSQL_PORT = 3306
88
+ MYSQL_USER = 'root'
89
+ MYSQL_PASSWORD = '123456'
90
+ MYSQL_DB = '{{project_name}}'
91
+ MYSQL_TABLE = '{{project_name}}_data'
92
+ MYSQL_BATCH_SIZE = 100
93
+ MYSQL_USE_BATCH = True # 是否启用批量插入
94
+
95
+ # MongoDB配置
96
+ MONGO_URI = 'mongodb://localhost:27017'
97
+ MONGO_DATABASE = '{{project_name}}_db'
98
+ MONGO_COLLECTION = '{{project_name}}_items'
99
+ MONGO_MAX_POOL_SIZE = 200
100
+ MONGO_MIN_POOL_SIZE = 20
101
+ MONGO_BATCH_SIZE = 100 # 批量插入条数
102
+ MONGO_USE_BATCH = True # 是否启用批量插入
103
+
104
+ # =================================== 网络配置 ===================================
105
+
106
+ # 代理配置
107
+ # 代理功能默认不启用,如需使用请在项目配置文件中启用并配置相关参数
108
+ PROXY_ENABLED = False # 是否启用代理
109
+
110
+ # 简化版代理配置(适用于SimpleProxyMiddleware)
111
+ PROXY_LIST = [] # 代理列表,例如: ["http://proxy1:8080", "http://proxy2:8080"]
112
+
113
+ # 高级代理配置(适用于ProxyMiddleware)
114
+ PROXY_API_URL = "" # 代理获取接口(请替换为真实地址)
115
+
116
+ # 代理提取方式(支持字段路径或函数)
117
+ # 示例: "proxy" 适用于 {"proxy": "http://1.1.1.1:8080"}
118
+ # 示例: "data.proxy" 适用于 {"data": {"proxy": "http://1.1.1.1:8080"}}
119
+ PROXY_EXTRACTOR = "proxy"
120
+
121
+ # 代理刷新控制
122
+ PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
123
+ PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
124
+
125
+ # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
126
+ CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
127
+
128
+ # 自定义浏览器版本映射(可覆盖默认行为)
129
+ CURL_BROWSER_VERSION_MAP = {
130
+ "chrome": "chrome136",
131
+ "edge": "edge101",
132
+ "safari": "safari184",
133
+ "firefox": "firefox135",
134
+ }
135
+
136
+ # 下载器优化配置
137
+ # 下载器健康检查
138
+ DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
139
+ HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
140
+
141
+ # 请求统计配置
142
+ REQUEST_STATS_ENABLED = True # 是否启用请求统计
143
+ STATS_RESET_ON_START = False # 启动时是否重置统计
144
+
145
+ # HttpX 下载器专用配置
146
+ HTTPX_HTTP2 = True # 是否启用HTTP/2支持
147
+ HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
148
+
149
+ # AioHttp 下载器专用配置
150
+ AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
151
+ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
152
+
153
+ # 通用优化配置
154
+ CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
155
+ CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
156
+
157
+ # 内存监控配置
158
+ # 内存监控扩展默认不启用,如需使用请在项目配置文件中启用
159
+ MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
160
+ MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
161
+ MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
162
+ MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)