crawlo 1.1.4__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (186) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +285 -285
  13. crawlo/commands/startproject.py +300 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +309 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +280 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +206 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +56 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +166 -162
  71. crawlo/project.py +153 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +320 -307
  74. crawlo/queue/redis_priority_queue.py +277 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +216 -278
  77. crawlo/settings/setting_manager.py +99 -99
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/METADATA +401 -403
  126. crawlo-1.1.5.dist-info/RECORD +185 -0
  127. examples/__init__.py +7 -7
  128. tests/__init__.py +7 -7
  129. tests/advanced_tools_example.py +276 -0
  130. tests/authenticated_proxy_example.py +237 -0
  131. tests/cleaners_example.py +161 -0
  132. tests/config_validation_demo.py +103 -0
  133. {examples → tests}/controlled_spider_example.py +205 -205
  134. tests/date_tools_example.py +181 -0
  135. tests/dynamic_loading_example.py +524 -0
  136. tests/dynamic_loading_test.py +105 -0
  137. tests/env_config_example.py +134 -0
  138. tests/error_handling_example.py +172 -0
  139. tests/redis_key_validation_demo.py +131 -0
  140. tests/response_improvements_example.py +145 -0
  141. tests/test_advanced_tools.py +149 -0
  142. tests/test_all_redis_key_configs.py +146 -0
  143. tests/test_authenticated_proxy.py +142 -0
  144. tests/test_cleaners.py +55 -0
  145. tests/test_comprehensive.py +147 -0
  146. tests/test_config_validator.py +194 -0
  147. tests/test_date_tools.py +124 -0
  148. tests/test_dynamic_downloaders_proxy.py +125 -0
  149. tests/test_dynamic_proxy.py +93 -0
  150. tests/test_dynamic_proxy_config.py +147 -0
  151. tests/test_dynamic_proxy_real.py +110 -0
  152. tests/test_edge_cases.py +304 -0
  153. tests/test_enhanced_error_handler.py +271 -0
  154. tests/test_env_config.py +122 -0
  155. tests/test_error_handler_compatibility.py +113 -0
  156. tests/test_final_validation.py +153 -153
  157. tests/test_framework_env_usage.py +104 -0
  158. tests/test_integration.py +357 -0
  159. tests/test_item_dedup_redis_key.py +123 -0
  160. tests/test_parsel.py +30 -0
  161. tests/test_performance.py +328 -0
  162. tests/test_proxy_health_check.py +32 -32
  163. tests/test_proxy_middleware_integration.py +136 -136
  164. tests/test_proxy_providers.py +56 -56
  165. tests/test_proxy_stats.py +19 -19
  166. tests/test_proxy_strategies.py +59 -59
  167. tests/test_queue_manager_redis_key.py +177 -0
  168. tests/test_redis_config.py +28 -28
  169. tests/test_redis_connection_pool.py +295 -0
  170. tests/test_redis_key_naming.py +182 -0
  171. tests/test_redis_key_validator.py +124 -0
  172. tests/test_redis_queue.py +224 -224
  173. tests/test_request_serialization.py +70 -70
  174. tests/test_response_improvements.py +153 -0
  175. tests/test_scheduler.py +241 -241
  176. tests/test_simple_response.py +62 -0
  177. tests/test_telecom_spider_redis_key.py +206 -0
  178. tests/test_template_content.py +88 -0
  179. tests/test_template_redis_key.py +135 -0
  180. tests/test_tools.py +154 -0
  181. tests/tools_example.py +258 -0
  182. crawlo/core/enhanced_engine.py +0 -190
  183. crawlo-1.1.4.dist-info/RECORD +0 -117
  184. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
  185. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
  186. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
@@ -1,135 +1,135 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from pprint import pformat
4
- from types import MethodType
5
- from asyncio import create_task
6
- from collections import defaultdict
7
- from typing import List, Dict, Callable, Optional
8
-
9
- from crawlo import Request, Response
10
- from crawlo.utils.log import get_logger
11
- from crawlo.project import load_class
12
- from crawlo.middleware import BaseMiddleware
13
- from crawlo.project import common_call
14
- from crawlo.event import ignore_request, response_received
15
- from crawlo.exceptions import MiddlewareInitError, InvalidOutputError, RequestMethodError, IgnoreRequestError, \
16
- NotConfiguredError
17
-
18
-
19
- class MiddlewareManager:
20
-
21
- def __init__(self, crawler):
22
- self.crawler = crawler
23
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
- self.middlewares: List = []
25
- self.methods: Dict[str, List[MethodType]] = defaultdict(list)
26
- middlewares = self.crawler.settings.get_list('MIDDLEWARES')
27
- self._add_middleware(middlewares)
28
- self._add_method()
29
-
30
- self.download_method: Callable = crawler.engine.downloader.download
31
- self._stats = crawler.stats
32
-
33
- async def _process_request(self, request: Request):
34
- for method in self.methods['process_request']:
35
- result = await common_call(method, request, self.crawler.spider)
36
- if result is None:
37
- continue
38
- if isinstance(result, (Request, Response)):
39
- return result
40
- raise InvalidOutputError(
41
- f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(result).__name__}"
42
- )
43
- return await self.download_method(request)
44
-
45
- async def _process_response(self, request: Request, response: Response):
46
- for method in reversed(self.methods['process_response']):
47
- try:
48
- response = await common_call(method, request, response, self.crawler.spider)
49
- except IgnoreRequestError as exp:
50
- create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
51
- if isinstance(response, Request):
52
- return response
53
- if isinstance(response, Response):
54
- continue
55
- raise InvalidOutputError(
56
- f"{method.__self__.__class__.__name__}. must return Request or Response, got {type(response).__name__}"
57
- )
58
- return response
59
-
60
- async def _process_exception(self, request: Request, exp: Exception):
61
- for method in self.methods['process_exception']:
62
- response = await common_call(method, request, exp, self.crawler.spider)
63
- if response is None:
64
- continue
65
- if isinstance(response, (Request, Response)):
66
- return response
67
- if response:
68
- break
69
- raise InvalidOutputError(
70
- f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(response).__name__}"
71
- )
72
- else:
73
- raise exp
74
-
75
- async def download(self, request) -> Optional[Response]:
76
- """ called in the download method. """
77
- try:
78
- response = await self._process_request(request)
79
- except KeyError:
80
- raise RequestMethodError(f"{request.method.lower()} is not supported")
81
- except IgnoreRequestError as exp:
82
- create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
83
- response = await self._process_exception(request, exp)
84
- except Exception as exp:
85
- self._stats.inc_value(f'download_error/{exp.__class__.__name__}')
86
- response = await self._process_exception(request, exp)
87
- else:
88
- create_task(self.crawler.subscriber.notify(response_received, response, self.crawler.spider))
89
- # self.crawler.stats.inc_value('response_received_count')
90
- if isinstance(response, Response):
91
- response = await self._process_response(request, response)
92
- if isinstance(response, Request):
93
- await self.crawler.engine.enqueue_request(request)
94
- return None
95
- return response
96
-
97
- @classmethod
98
- def create_instance(cls, *args, **kwargs):
99
- return cls(*args, **kwargs)
100
-
101
- def _add_middleware(self, middlewares):
102
- enabled_middlewares = [m for m in middlewares if self._validate_middleware(m)]
103
- if enabled_middlewares:
104
- self.logger.info(f'enabled middleware:\n {pformat(enabled_middlewares)}')
105
-
106
- def _validate_middleware(self, middleware):
107
- middleware_cls = load_class(middleware)
108
- if not hasattr(middleware_cls, 'create_instance'):
109
- raise MiddlewareInitError(
110
- f"Middleware init failed, must inherit from `BaseMiddleware` or have a `create_instance` method"
111
- )
112
- try:
113
- instance = middleware_cls.create_instance(self.crawler)
114
- self.middlewares.append(instance)
115
- return True
116
- except NotConfiguredError:
117
- return False
118
-
119
- def _add_method(self):
120
- for middleware in self.middlewares:
121
- if hasattr(middleware, 'process_request'):
122
- if self._validate_middleware_method(method_name='process_request', middleware=middleware):
123
- self.methods['process_request'].append(middleware.process_request)
124
- if hasattr(middleware, 'process_response'):
125
- if self._validate_middleware_method(method_name='process_response', middleware=middleware):
126
- self.methods['process_response'].append(middleware.process_response)
127
- if hasattr(middleware, 'process_exception'):
128
- if self._validate_middleware_method(method_name='process_exception', middleware=middleware):
129
- self.methods['process_exception'].append(middleware.process_exception)
130
-
131
- @staticmethod
132
- def _validate_middleware_method(method_name, middleware) -> bool:
133
- method = getattr(type(middleware), method_name)
134
- base_method = getattr(BaseMiddleware, method_name)
135
- return False if method == base_method else True
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from pprint import pformat
4
+ from types import MethodType
5
+ from asyncio import create_task
6
+ from collections import defaultdict
7
+ from typing import List, Dict, Callable, Optional
8
+
9
+ from crawlo import Request, Response
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.project import load_class
12
+ from crawlo.middleware import BaseMiddleware
13
+ from crawlo.project import common_call
14
+ from crawlo.event import ignore_request, response_received
15
+ from crawlo.exceptions import MiddlewareInitError, InvalidOutputError, RequestMethodError, IgnoreRequestError, \
16
+ NotConfiguredError
17
+
18
+
19
+ class MiddlewareManager:
20
+
21
+ def __init__(self, crawler):
22
+ self.crawler = crawler
23
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
+ self.middlewares: List = []
25
+ self.methods: Dict[str, List[MethodType]] = defaultdict(list)
26
+ middlewares = self.crawler.settings.get_list('MIDDLEWARES')
27
+ self._add_middleware(middlewares)
28
+ self._add_method()
29
+
30
+ self.download_method: Callable = crawler.engine.downloader.download
31
+ self._stats = crawler.stats
32
+
33
+ async def _process_request(self, request: Request):
34
+ for method in self.methods['process_request']:
35
+ result = await common_call(method, request, self.crawler.spider)
36
+ if result is None:
37
+ continue
38
+ if isinstance(result, (Request, Response)):
39
+ return result
40
+ raise InvalidOutputError(
41
+ f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(result).__name__}"
42
+ )
43
+ return await self.download_method(request)
44
+
45
+ async def _process_response(self, request: Request, response: Response):
46
+ for method in reversed(self.methods['process_response']):
47
+ try:
48
+ response = await common_call(method, request, response, self.crawler.spider)
49
+ except IgnoreRequestError as exp:
50
+ create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
51
+ if isinstance(response, Request):
52
+ return response
53
+ if isinstance(response, Response):
54
+ continue
55
+ raise InvalidOutputError(
56
+ f"{method.__self__.__class__.__name__}. must return Request or Response, got {type(response).__name__}"
57
+ )
58
+ return response
59
+
60
+ async def _process_exception(self, request: Request, exp: Exception):
61
+ for method in self.methods['process_exception']:
62
+ response = await common_call(method, request, exp, self.crawler.spider)
63
+ if response is None:
64
+ continue
65
+ if isinstance(response, (Request, Response)):
66
+ return response
67
+ if response:
68
+ break
69
+ raise InvalidOutputError(
70
+ f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(response).__name__}"
71
+ )
72
+ else:
73
+ raise exp
74
+
75
+ async def download(self, request) -> Optional[Response]:
76
+ """ called in the download method. """
77
+ try:
78
+ response = await self._process_request(request)
79
+ except KeyError:
80
+ raise RequestMethodError(f"{request.method.lower()} is not supported")
81
+ except IgnoreRequestError as exp:
82
+ create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
83
+ response = await self._process_exception(request, exp)
84
+ except Exception as exp:
85
+ self._stats.inc_value(f'download_error/{exp.__class__.__name__}')
86
+ response = await self._process_exception(request, exp)
87
+ else:
88
+ create_task(self.crawler.subscriber.notify(response_received, response, self.crawler.spider))
89
+ # self.crawler.stats.inc_value('response_received_count')
90
+ if isinstance(response, Response):
91
+ response = await self._process_response(request, response)
92
+ if isinstance(response, Request):
93
+ await self.crawler.engine.enqueue_request(request)
94
+ return None
95
+ return response
96
+
97
+ @classmethod
98
+ def create_instance(cls, *args, **kwargs):
99
+ return cls(*args, **kwargs)
100
+
101
+ def _add_middleware(self, middlewares):
102
+ enabled_middlewares = [m for m in middlewares if self._validate_middleware(m)]
103
+ if enabled_middlewares:
104
+ self.logger.info(f'enabled middleware:\n {pformat(enabled_middlewares)}')
105
+
106
+ def _validate_middleware(self, middleware):
107
+ middleware_cls = load_class(middleware)
108
+ if not hasattr(middleware_cls, 'create_instance'):
109
+ raise MiddlewareInitError(
110
+ f"Middleware init failed, must inherit from `BaseMiddleware` or have a `create_instance` method"
111
+ )
112
+ try:
113
+ instance = middleware_cls.create_instance(self.crawler)
114
+ self.middlewares.append(instance)
115
+ return True
116
+ except NotConfiguredError:
117
+ return False
118
+
119
+ def _add_method(self):
120
+ for middleware in self.middlewares:
121
+ if hasattr(middleware, 'process_request'):
122
+ if self._validate_middleware_method(method_name='process_request', middleware=middleware):
123
+ self.methods['process_request'].append(middleware.process_request)
124
+ if hasattr(middleware, 'process_response'):
125
+ if self._validate_middleware_method(method_name='process_response', middleware=middleware):
126
+ self.methods['process_response'].append(middleware.process_response)
127
+ if hasattr(middleware, 'process_exception'):
128
+ if self._validate_middleware_method(method_name='process_exception', middleware=middleware):
129
+ self.methods['process_exception'].append(middleware.process_exception)
130
+
131
+ @staticmethod
132
+ def _validate_middleware_method(method_name, middleware) -> bool:
133
+ method = getattr(type(middleware), method_name)
134
+ base_method = getattr(BaseMiddleware, method_name)
135
+ return False if method == base_method else True