crawlo 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (190) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +292 -285
  13. crawlo/commands/startproject.py +419 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +312 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +281 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +212 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +61 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +167 -162
  71. crawlo/project.py +188 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +334 -307
  74. crawlo/queue/redis_priority_queue.py +299 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +219 -278
  77. crawlo/settings/setting_manager.py +123 -100
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/METADATA +401 -403
  126. crawlo-1.1.6.dist-info/RECORD +189 -0
  127. examples/__init__.py +7 -7
  128. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +82 -0
  129. tests/__init__.py +7 -7
  130. tests/advanced_tools_example.py +276 -0
  131. tests/authenticated_proxy_example.py +237 -0
  132. tests/cleaners_example.py +161 -0
  133. tests/config_validation_demo.py +103 -0
  134. {examples → tests}/controlled_spider_example.py +205 -205
  135. tests/date_tools_example.py +181 -0
  136. tests/dynamic_loading_example.py +524 -0
  137. tests/dynamic_loading_test.py +105 -0
  138. tests/env_config_example.py +134 -0
  139. tests/error_handling_example.py +172 -0
  140. tests/redis_key_validation_demo.py +131 -0
  141. tests/response_improvements_example.py +145 -0
  142. tests/test_advanced_tools.py +149 -0
  143. tests/test_all_redis_key_configs.py +146 -0
  144. tests/test_authenticated_proxy.py +142 -0
  145. tests/test_cleaners.py +55 -0
  146. tests/test_comprehensive.py +147 -0
  147. tests/test_config_validator.py +194 -0
  148. tests/test_date_tools.py +124 -0
  149. tests/test_double_crawlo_fix.py +208 -0
  150. tests/test_double_crawlo_fix_simple.py +125 -0
  151. tests/test_dynamic_downloaders_proxy.py +125 -0
  152. tests/test_dynamic_proxy.py +93 -0
  153. tests/test_dynamic_proxy_config.py +147 -0
  154. tests/test_dynamic_proxy_real.py +110 -0
  155. tests/test_edge_cases.py +304 -0
  156. tests/test_enhanced_error_handler.py +271 -0
  157. tests/test_env_config.py +122 -0
  158. tests/test_error_handler_compatibility.py +113 -0
  159. tests/test_final_validation.py +153 -153
  160. tests/test_framework_env_usage.py +104 -0
  161. tests/test_integration.py +357 -0
  162. tests/test_item_dedup_redis_key.py +123 -0
  163. tests/test_parsel.py +30 -0
  164. tests/test_performance.py +328 -0
  165. tests/test_proxy_health_check.py +32 -32
  166. tests/test_proxy_middleware_integration.py +136 -136
  167. tests/test_proxy_providers.py +56 -56
  168. tests/test_proxy_stats.py +19 -19
  169. tests/test_proxy_strategies.py +59 -59
  170. tests/test_queue_manager_double_crawlo.py +231 -0
  171. tests/test_queue_manager_redis_key.py +177 -0
  172. tests/test_redis_config.py +28 -28
  173. tests/test_redis_connection_pool.py +295 -0
  174. tests/test_redis_key_naming.py +182 -0
  175. tests/test_redis_key_validator.py +124 -0
  176. tests/test_redis_queue.py +224 -224
  177. tests/test_request_serialization.py +70 -70
  178. tests/test_response_improvements.py +153 -0
  179. tests/test_scheduler.py +241 -241
  180. tests/test_simple_response.py +62 -0
  181. tests/test_telecom_spider_redis_key.py +206 -0
  182. tests/test_template_content.py +88 -0
  183. tests/test_template_redis_key.py +135 -0
  184. tests/test_tools.py +154 -0
  185. tests/tools_example.py +258 -0
  186. crawlo/core/enhanced_engine.py +0 -190
  187. crawlo-1.1.4.dist-info/RECORD +0 -117
  188. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/WHEEL +0 -0
  189. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/entry_points.txt +0 -0
  190. {crawlo-1.1.4.dist-info → crawlo-1.1.6.dist-info}/top_level.txt +0 -0
@@ -1,26 +1,26 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from crawlo.utils.log import get_logger
4
- from crawlo.exceptions import IgnoreRequestError
5
-
6
-
7
- class ResponseFilterMiddleware:
8
-
9
- def __init__(self, allowed_codes, log_level):
10
- self.allowed_codes = allowed_codes
11
- self.logger = get_logger(self.__class__.__name__, log_level)
12
-
13
- @classmethod
14
- def create_instance(cls, crawler):
15
- o = cls(
16
- allowed_codes=crawler.settings.get_list('ALLOWED_CODES'),
17
- log_level=crawler.settings.get('LOG_LEVEL')
18
- )
19
- return o
20
-
21
- def process_response(self, request, response, spider):
22
- if 200 <= response.status_code < 300:
23
- return response
24
- if response.status_code in self.allowed_codes:
25
- return response
26
- raise IgnoreRequestError(f"response status_code/non-200")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from crawlo.utils.log import get_logger
4
+ from crawlo.exceptions import IgnoreRequestError
5
+
6
+
7
+ class ResponseFilterMiddleware:
8
+
9
+ def __init__(self, allowed_codes, log_level):
10
+ self.allowed_codes = allowed_codes
11
+ self.logger = get_logger(self.__class__.__name__, log_level)
12
+
13
+ @classmethod
14
+ def create_instance(cls, crawler):
15
+ o = cls(
16
+ allowed_codes=crawler.settings.get_list('ALLOWED_CODES'),
17
+ log_level=crawler.settings.get('LOG_LEVEL')
18
+ )
19
+ return o
20
+
21
+ def process_response(self, request, response, spider):
22
+ if 200 <= response.status_code < 300:
23
+ return response
24
+ if response.status_code in self.allowed_codes:
25
+ return response
26
+ raise IgnoreRequestError(f"response status_code/non-200")
@@ -1,125 +1,125 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from typing import List
4
- import asyncio
5
-
6
- try:
7
- from anyio import EndOfStream
8
- except ImportError:
9
- # 如果 anyio 不可用或者 EndOfStream 不存在,创建一个占位符
10
- class EndOfStream(Exception):
11
- pass
12
-
13
- try:
14
- from httpcore import ReadError
15
- except ImportError:
16
- class ReadError(Exception):
17
- pass
18
-
19
- try:
20
- from httpx import RemoteProtocolError, ConnectError, ReadTimeout
21
- except ImportError:
22
- class RemoteProtocolError(Exception):
23
- pass
24
- class ConnectError(Exception):
25
- pass
26
- class ReadTimeout(Exception):
27
- pass
28
-
29
- try:
30
- from aiohttp.client_exceptions import ClientConnectionError, ClientPayloadError
31
- from aiohttp import ClientConnectorError, ClientTimeout, ClientConnectorSSLError, ClientResponseError
32
- except ImportError:
33
- class ClientConnectionError(Exception):
34
- pass
35
- class ClientPayloadError(Exception):
36
- pass
37
- class ClientConnectorError(Exception):
38
- pass
39
- class ClientTimeout(Exception):
40
- pass
41
- class ClientConnectorSSLError(Exception):
42
- pass
43
- class ClientResponseError(Exception):
44
- pass
45
-
46
- from crawlo.utils.log import get_logger
47
- from crawlo.stats_collector import StatsCollector
48
-
49
- _retry_exceptions = [
50
- EndOfStream,
51
- ReadError,
52
- asyncio.TimeoutError,
53
- ConnectError,
54
- ReadTimeout,
55
- ClientConnectorError,
56
- ClientResponseError,
57
- RemoteProtocolError,
58
- ClientTimeout,
59
- ClientConnectorSSLError,
60
- ClientPayloadError,
61
- ClientConnectionError
62
- ]
63
-
64
-
65
- class RetryMiddleware(object):
66
-
67
- def __init__(
68
- self,
69
- *,
70
- retry_http_codes: List,
71
- ignore_http_codes: List,
72
- max_retry_times: int,
73
- retry_exceptions: List,
74
- stats: StatsCollector,
75
- retry_priority: int
76
- ):
77
- self.retry_http_codes = retry_http_codes
78
- self.ignore_http_codes = ignore_http_codes
79
- self.max_retry_times = max_retry_times
80
- self.retry_exceptions = tuple(retry_exceptions + _retry_exceptions)
81
- self.retry_priority = retry_priority
82
- self.stats = stats
83
- self.logger = get_logger(self.__class__.__name__)
84
-
85
- @classmethod
86
- def create_instance(cls, crawler):
87
- o = cls(
88
- retry_http_codes=crawler.settings.get_list('RETRY_HTTP_CODES'),
89
- ignore_http_codes=crawler.settings.get_list('IGNORE_HTTP_CODES'),
90
- max_retry_times=crawler.settings.get_int('MAX_RETRY_TIMES'),
91
- retry_exceptions=crawler.settings.get_list('RETRY_EXCEPTIONS'),
92
- stats=crawler.stats,
93
- retry_priority=crawler.settings.get_int('RETRY_PRIORITY')
94
- )
95
- return o
96
-
97
- def process_response(self, request, response, spider):
98
- if request.meta.get('dont_retry', False):
99
- return response
100
- if response.status_code in self.ignore_http_codes:
101
- return response
102
- if response.status_code in self.retry_http_codes:
103
- # 重试逻辑
104
- reason = f"response code {response.status_code}"
105
- return self._retry(request, reason, spider) or response
106
- return response
107
-
108
- def process_exception(self, request, exc, spider):
109
- if isinstance(exc, self.retry_exceptions) and not request.meta.get('dont_retry', False):
110
- return self._retry(request=request, reason=type(exc).__name__, spider=spider)
111
-
112
- def _retry(self, request, reason, spider):
113
- retry_times = request.meta.get('retry_times', 0)
114
- if retry_times < self.max_retry_times:
115
- retry_times += 1
116
- self.logger.info(f"{spider} {request} {reason} retrying {retry_times} time...")
117
- request.meta['retry_times'] = retry_times
118
- # request.dont_retry = True
119
- request.meta['dont_retry'] = True
120
- request.priority = request.priority + self.retry_priority
121
- self.stats.inc_value("retry_count")
122
- return request
123
- else:
124
- self.logger.warning(f"{spider} {request} {reason} retry max {self.max_retry_times} times, give up.")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from typing import List
4
+ import asyncio
5
+
6
+ try:
7
+ from anyio import EndOfStream
8
+ except ImportError:
9
+ # 如果 anyio 不可用或者 EndOfStream 不存在,创建一个占位符
10
+ class EndOfStream(Exception):
11
+ pass
12
+
13
+ try:
14
+ from httpcore import ReadError
15
+ except ImportError:
16
+ class ReadError(Exception):
17
+ pass
18
+
19
+ try:
20
+ from httpx import RemoteProtocolError, ConnectError, ReadTimeout
21
+ except ImportError:
22
+ class RemoteProtocolError(Exception):
23
+ pass
24
+ class ConnectError(Exception):
25
+ pass
26
+ class ReadTimeout(Exception):
27
+ pass
28
+
29
+ try:
30
+ from aiohttp.client_exceptions import ClientConnectionError, ClientPayloadError
31
+ from aiohttp import ClientConnectorError, ClientTimeout, ClientConnectorSSLError, ClientResponseError
32
+ except ImportError:
33
+ class ClientConnectionError(Exception):
34
+ pass
35
+ class ClientPayloadError(Exception):
36
+ pass
37
+ class ClientConnectorError(Exception):
38
+ pass
39
+ class ClientTimeout(Exception):
40
+ pass
41
+ class ClientConnectorSSLError(Exception):
42
+ pass
43
+ class ClientResponseError(Exception):
44
+ pass
45
+
46
+ from crawlo.utils.log import get_logger
47
+ from crawlo.stats_collector import StatsCollector
48
+
49
+ _retry_exceptions = [
50
+ EndOfStream,
51
+ ReadError,
52
+ asyncio.TimeoutError,
53
+ ConnectError,
54
+ ReadTimeout,
55
+ ClientConnectorError,
56
+ ClientResponseError,
57
+ RemoteProtocolError,
58
+ ClientTimeout,
59
+ ClientConnectorSSLError,
60
+ ClientPayloadError,
61
+ ClientConnectionError
62
+ ]
63
+
64
+
65
+ class RetryMiddleware(object):
66
+
67
+ def __init__(
68
+ self,
69
+ *,
70
+ retry_http_codes: List,
71
+ ignore_http_codes: List,
72
+ max_retry_times: int,
73
+ retry_exceptions: List,
74
+ stats: StatsCollector,
75
+ retry_priority: int
76
+ ):
77
+ self.retry_http_codes = retry_http_codes
78
+ self.ignore_http_codes = ignore_http_codes
79
+ self.max_retry_times = max_retry_times
80
+ self.retry_exceptions = tuple(retry_exceptions + _retry_exceptions)
81
+ self.retry_priority = retry_priority
82
+ self.stats = stats
83
+ self.logger = get_logger(self.__class__.__name__)
84
+
85
+ @classmethod
86
+ def create_instance(cls, crawler):
87
+ o = cls(
88
+ retry_http_codes=crawler.settings.get_list('RETRY_HTTP_CODES'),
89
+ ignore_http_codes=crawler.settings.get_list('IGNORE_HTTP_CODES'),
90
+ max_retry_times=crawler.settings.get_int('MAX_RETRY_TIMES'),
91
+ retry_exceptions=crawler.settings.get_list('RETRY_EXCEPTIONS'),
92
+ stats=crawler.stats,
93
+ retry_priority=crawler.settings.get_int('RETRY_PRIORITY')
94
+ )
95
+ return o
96
+
97
+ def process_response(self, request, response, spider):
98
+ if request.meta.get('dont_retry', False):
99
+ return response
100
+ if response.status_code in self.ignore_http_codes:
101
+ return response
102
+ if response.status_code in self.retry_http_codes:
103
+ # 重试逻辑
104
+ reason = f"response code {response.status_code}"
105
+ return self._retry(request, reason, spider) or response
106
+ return response
107
+
108
+ def process_exception(self, request, exc, spider):
109
+ if isinstance(exc, self.retry_exceptions) and not request.meta.get('dont_retry', False):
110
+ return self._retry(request=request, reason=type(exc).__name__, spider=spider)
111
+
112
+ def _retry(self, request, reason, spider):
113
+ retry_times = request.meta.get('retry_times', 0)
114
+ if retry_times < self.max_retry_times:
115
+ retry_times += 1
116
+ self.logger.info(f"{spider} {request} {reason} retrying {retry_times} time...")
117
+ request.meta['retry_times'] = retry_times
118
+ # request.dont_retry = True
119
+ request.meta['dont_retry'] = True
120
+ request.priority = request.priority + self.retry_priority
121
+ self.stats.inc_value("retry_count")
122
+ return request
123
+ else:
124
+ self.logger.warning(f"{spider} {request} {reason} retry max {self.max_retry_times} times, give up.")
125
125
  return None