crawlo 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (128) hide show
  1. crawlo/__init__.py +34 -33
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +152 -126
  7. crawlo/commands/list.py +156 -147
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -111
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +187 -0
  12. crawlo/config.py +280 -0
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -158
  15. crawlo/core/enhanced_engine.py +190 -0
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +166 -57
  18. crawlo/crawler.py +1028 -495
  19. crawlo/downloader/__init__.py +242 -78
  20. crawlo/downloader/aiohttp_downloader.py +212 -199
  21. crawlo/downloader/cffi_downloader.py +251 -241
  22. crawlo/downloader/httpx_downloader.py +259 -246
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +82 -78
  25. crawlo/extension/__init__.py +31 -31
  26. crawlo/extension/log_interval.py +49 -49
  27. crawlo/extension/log_stats.py +44 -44
  28. crawlo/extension/logging_extension.py +34 -34
  29. crawlo/filters/__init__.py +154 -37
  30. crawlo/filters/aioredis_filter.py +242 -150
  31. crawlo/filters/memory_filter.py +269 -202
  32. crawlo/items/__init__.py +23 -23
  33. crawlo/items/base.py +21 -21
  34. crawlo/items/fields.py +53 -53
  35. crawlo/items/items.py +104 -104
  36. crawlo/middleware/__init__.py +21 -21
  37. crawlo/middleware/default_header.py +32 -32
  38. crawlo/middleware/download_delay.py +28 -28
  39. crawlo/middleware/middleware_manager.py +135 -135
  40. crawlo/middleware/proxy.py +248 -245
  41. crawlo/middleware/request_ignore.py +30 -30
  42. crawlo/middleware/response_code.py +18 -18
  43. crawlo/middleware/response_filter.py +26 -26
  44. crawlo/middleware/retry.py +125 -90
  45. crawlo/mode_manager.py +201 -0
  46. crawlo/network/__init__.py +21 -7
  47. crawlo/network/request.py +311 -203
  48. crawlo/network/response.py +271 -166
  49. crawlo/pipelines/__init__.py +22 -13
  50. crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
  51. crawlo/pipelines/console_pipeline.py +39 -39
  52. crawlo/pipelines/csv_pipeline.py +317 -0
  53. crawlo/pipelines/database_dedup_pipeline.py +225 -0
  54. crawlo/pipelines/json_pipeline.py +219 -0
  55. crawlo/pipelines/memory_dedup_pipeline.py +116 -0
  56. crawlo/pipelines/mongo_pipeline.py +116 -116
  57. crawlo/pipelines/mysql_pipeline.py +195 -195
  58. crawlo/pipelines/pipeline_manager.py +56 -56
  59. crawlo/pipelines/redis_dedup_pipeline.py +163 -0
  60. crawlo/project.py +153 -153
  61. crawlo/queue/__init__.py +0 -0
  62. crawlo/queue/pqueue.py +37 -0
  63. crawlo/queue/queue_manager.py +308 -0
  64. crawlo/queue/redis_priority_queue.py +209 -0
  65. crawlo/settings/__init__.py +7 -7
  66. crawlo/settings/default_settings.py +245 -167
  67. crawlo/settings/setting_manager.py +99 -99
  68. crawlo/spider/__init__.py +639 -129
  69. crawlo/stats_collector.py +59 -59
  70. crawlo/subscriber.py +106 -106
  71. crawlo/task_manager.py +30 -27
  72. crawlo/templates/crawlo.cfg.tmpl +10 -10
  73. crawlo/templates/project/__init__.py.tmpl +3 -3
  74. crawlo/templates/project/items.py.tmpl +17 -17
  75. crawlo/templates/project/middlewares.py.tmpl +87 -76
  76. crawlo/templates/project/pipelines.py.tmpl +342 -64
  77. crawlo/templates/project/run.py.tmpl +252 -0
  78. crawlo/templates/project/settings.py.tmpl +251 -54
  79. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  80. crawlo/templates/spider/spider.py.tmpl +178 -32
  81. crawlo/utils/__init__.py +7 -7
  82. crawlo/utils/controlled_spider_mixin.py +440 -0
  83. crawlo/utils/date_tools.py +233 -233
  84. crawlo/utils/db_helper.py +343 -343
  85. crawlo/utils/func_tools.py +82 -82
  86. crawlo/utils/large_scale_config.py +287 -0
  87. crawlo/utils/large_scale_helper.py +344 -0
  88. crawlo/utils/log.py +128 -128
  89. crawlo/utils/queue_helper.py +176 -0
  90. crawlo/utils/request.py +267 -267
  91. crawlo/utils/request_serializer.py +220 -0
  92. crawlo/utils/spider_loader.py +62 -62
  93. crawlo/utils/system.py +11 -11
  94. crawlo/utils/tools.py +4 -4
  95. crawlo/utils/url.py +39 -39
  96. crawlo-1.1.3.dist-info/METADATA +635 -0
  97. crawlo-1.1.3.dist-info/RECORD +113 -0
  98. examples/__init__.py +7 -7
  99. examples/controlled_spider_example.py +205 -0
  100. tests/__init__.py +7 -7
  101. tests/test_final_validation.py +154 -0
  102. tests/test_proxy_health_check.py +32 -32
  103. tests/test_proxy_middleware_integration.py +136 -136
  104. tests/test_proxy_providers.py +56 -56
  105. tests/test_proxy_stats.py +19 -19
  106. tests/test_proxy_strategies.py +59 -59
  107. tests/test_redis_config.py +29 -0
  108. tests/test_redis_queue.py +225 -0
  109. tests/test_request_serialization.py +71 -0
  110. tests/test_scheduler.py +242 -0
  111. crawlo/pipelines/mysql_batch_pipline.py +0 -273
  112. crawlo/utils/pqueue.py +0 -174
  113. crawlo-1.1.1.dist-info/METADATA +0 -220
  114. crawlo-1.1.1.dist-info/RECORD +0 -100
  115. examples/baidu_spider/__init__.py +0 -7
  116. examples/baidu_spider/demo.py +0 -94
  117. examples/baidu_spider/items.py +0 -46
  118. examples/baidu_spider/middleware.py +0 -49
  119. examples/baidu_spider/pipeline.py +0 -55
  120. examples/baidu_spider/run.py +0 -27
  121. examples/baidu_spider/settings.py +0 -121
  122. examples/baidu_spider/spiders/__init__.py +0 -7
  123. examples/baidu_spider/spiders/bai_du.py +0 -61
  124. examples/baidu_spider/spiders/miit.py +0 -159
  125. examples/baidu_spider/spiders/sina.py +0 -79
  126. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
  127. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
  128. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
crawlo/items/fields.py CHANGED
@@ -1,54 +1,54 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Field 类定义
5
- """
6
-
7
- from typing import Any, Optional, Type
8
-
9
-
10
- class Field:
11
- """
12
- 字段定义类,用于定义 Item 的字段属性和验证规则
13
- """
14
- def __init__(
15
- self,
16
- nullable: bool = True,
17
- *,
18
- default: Any = None,
19
- field_type: Optional[Type] = None,
20
- max_length: Optional[int] = None,
21
- description: str = ""
22
- ):
23
- self.nullable = nullable
24
- self.default = default
25
- self.field_type = field_type
26
- self.max_length = max_length
27
- self.description = description
28
-
29
- def validate(self, value: Any, field_name: str = "") -> Any:
30
- """
31
- 验证字段值是否符合规则
32
- """
33
- if value is None or (isinstance(value, str) and value.strip() == ""):
34
- if self.default is not None:
35
- return self.default
36
- elif not self.nullable:
37
- raise ValueError(
38
- f"字段 '{field_name}' 不允许为空。"
39
- )
40
-
41
- if value is not None and not (isinstance(value, str) and value.strip() == ""):
42
- if self.field_type and not isinstance(value, self.field_type):
43
- raise TypeError(
44
- f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
45
- )
46
- if self.max_length and len(str(value)) > self.max_length:
47
- raise ValueError(
48
- f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
49
- )
50
-
51
- return value
52
-
53
- def __repr__(self):
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Field 类定义
5
+ """
6
+
7
+ from typing import Any, Optional, Type
8
+
9
+
10
+ class Field:
11
+ """
12
+ 字段定义类,用于定义 Item 的字段属性和验证规则
13
+ """
14
+ def __init__(
15
+ self,
16
+ nullable: bool = True,
17
+ *,
18
+ default: Any = None,
19
+ field_type: Optional[Type] = None,
20
+ max_length: Optional[int] = None,
21
+ description: str = ""
22
+ ):
23
+ self.nullable = nullable
24
+ self.default = default
25
+ self.field_type = field_type
26
+ self.max_length = max_length
27
+ self.description = description
28
+
29
+ def validate(self, value: Any, field_name: str = "") -> Any:
30
+ """
31
+ 验证字段值是否符合规则
32
+ """
33
+ if value is None or (isinstance(value, str) and value.strip() == ""):
34
+ if self.default is not None:
35
+ return self.default
36
+ elif not self.nullable:
37
+ raise ValueError(
38
+ f"字段 '{field_name}' 不允许为空。"
39
+ )
40
+
41
+ if value is not None and not (isinstance(value, str) and value.strip() == ""):
42
+ if self.field_type and not isinstance(value, self.field_type):
43
+ raise TypeError(
44
+ f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
45
+ )
46
+ if self.max_length and len(str(value)) > self.max_length:
47
+ raise ValueError(
48
+ f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
49
+ )
50
+
51
+ return value
52
+
53
+ def __repr__(self):
54
54
  return f"<Field nullable={self.nullable} type={self.field_type} default={self.default}>"
crawlo/items/items.py CHANGED
@@ -1,105 +1,105 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Item 类定义
5
- """
6
- from copy import deepcopy
7
- from pprint import pformat
8
- from typing import Any, Iterator, Dict
9
- from collections.abc import MutableMapping
10
-
11
- from .base import ItemMeta
12
- from crawlo.exceptions import ItemInitError, ItemAttributeError
13
-
14
-
15
- class Item(MutableMapping, metaclass=ItemMeta):
16
- """
17
- 数据项基类,用于定义结构化数据
18
- """
19
- FIELDS: Dict[str, Any] = {}
20
-
21
- def __init__(self, *args, **kwargs):
22
- if args:
23
- raise ItemInitError(f"{self.__class__.__name__} 不支持位置参数:{args},请使用关键字参数初始化。")
24
-
25
- self._values: Dict[str, Any] = {}
26
-
27
- # 初始化字段,默认值填充
28
- for field_name, field_obj in self.FIELDS.items():
29
- if field_obj.default is not None:
30
- self._values[field_name] = field_obj.default
31
-
32
- # 覆盖默认值或设置新值
33
- for key, value in kwargs.items():
34
- self[key] = value
35
-
36
- def __getitem__(self, item: str) -> Any:
37
- return self._values[item]
38
-
39
- def __setitem__(self, key: str, value: Any) -> None:
40
- if key not in self.FIELDS:
41
- raise KeyError(f"{self.__class__.__name__} 不包含字段:{key}")
42
-
43
- field = self.FIELDS[key]
44
- try:
45
- validated_value = field.validate(value, field_name=key)
46
- self._values[key] = validated_value
47
- except Exception as e:
48
- error_lines = [
49
- "",
50
- "【字段校验失败】",
51
- f"字段名称: {key}",
52
- f"数据类型: {type(value)}",
53
- f"原始值: {repr(value)}",
54
- f"是否允许空值: {field.nullable}",
55
- f"错误原因: {str(e)}",
56
- ""
57
- ]
58
- detailed_error = "\n".join(error_lines)
59
- raise type(e)(detailed_error) from e
60
-
61
- def __delitem__(self, key: str) -> None:
62
- del self._values[key]
63
-
64
- def __setattr__(self, key: str, value: Any) -> None:
65
- if not key.startswith("_"):
66
- raise AttributeError(
67
- f"设置字段值请使用 item[{key!r}] = {value!r}"
68
- )
69
- super().__setattr__(key, value)
70
-
71
- def __getattr__(self, item: str) -> Any:
72
- raise AttributeError(
73
- f"{self.__class__.__name__} 不支持字段:{item}。"
74
- f"请先在 `{self.__class__.__name__}` 中声明该字段,再通过 item[{item!r}] 获取。"
75
- )
76
-
77
- def __getattribute__(self, item: str) -> Any:
78
- try:
79
- field = super().__getattribute__("FIELDS")
80
- if isinstance(field, dict) and item in field:
81
- raise ItemAttributeError(
82
- f"获取字段值请使用 item[{item!r}]"
83
- )
84
- except AttributeError:
85
- pass # 如果 FIELDS 尚未定义,继续执行后续逻辑
86
- return super().__getattribute__(item)
87
-
88
- def __repr__(self) -> str:
89
- return pformat(dict(self))
90
-
91
- __str__ = __repr__
92
-
93
- def __iter__(self) -> Iterator[str]:
94
- return iter(self._values)
95
-
96
- def __len__(self) -> int:
97
- return len(self._values)
98
-
99
- def to_dict(self) -> Dict[str, Any]:
100
- """转换为字典"""
101
- return dict(self)
102
-
103
- def copy(self) -> "Item":
104
- """深拷贝当前 Item"""
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Item 类定义
5
+ """
6
+ from copy import deepcopy
7
+ from pprint import pformat
8
+ from typing import Any, Iterator, Dict
9
+ from collections.abc import MutableMapping
10
+
11
+ from .base import ItemMeta
12
+ from crawlo.exceptions import ItemInitError, ItemAttributeError
13
+
14
+
15
+ class Item(MutableMapping, metaclass=ItemMeta):
16
+ """
17
+ 数据项基类,用于定义结构化数据
18
+ """
19
+ FIELDS: Dict[str, Any] = {}
20
+
21
+ def __init__(self, *args, **kwargs):
22
+ if args:
23
+ raise ItemInitError(f"{self.__class__.__name__} 不支持位置参数:{args},请使用关键字参数初始化。")
24
+
25
+ self._values: Dict[str, Any] = {}
26
+
27
+ # 初始化字段,默认值填充
28
+ for field_name, field_obj in self.FIELDS.items():
29
+ if field_obj.default is not None:
30
+ self._values[field_name] = field_obj.default
31
+
32
+ # 覆盖默认值或设置新值
33
+ for key, value in kwargs.items():
34
+ self[key] = value
35
+
36
+ def __getitem__(self, item: str) -> Any:
37
+ return self._values[item]
38
+
39
+ def __setitem__(self, key: str, value: Any) -> None:
40
+ if key not in self.FIELDS:
41
+ raise KeyError(f"{self.__class__.__name__} 不包含字段:{key}")
42
+
43
+ field = self.FIELDS[key]
44
+ try:
45
+ validated_value = field.validate(value, field_name=key)
46
+ self._values[key] = validated_value
47
+ except Exception as e:
48
+ error_lines = [
49
+ "",
50
+ "【字段校验失败】",
51
+ f"字段名称: {key}",
52
+ f"数据类型: {type(value)}",
53
+ f"原始值: {repr(value)}",
54
+ f"是否允许空值: {field.nullable}",
55
+ f"错误原因: {str(e)}",
56
+ ""
57
+ ]
58
+ detailed_error = "\n".join(error_lines)
59
+ raise type(e)(detailed_error) from e
60
+
61
+ def __delitem__(self, key: str) -> None:
62
+ del self._values[key]
63
+
64
+ def __setattr__(self, key: str, value: Any) -> None:
65
+ if not key.startswith("_"):
66
+ raise AttributeError(
67
+ f"设置字段值请使用 item[{key!r}] = {value!r}"
68
+ )
69
+ super().__setattr__(key, value)
70
+
71
+ def __getattr__(self, item: str) -> Any:
72
+ raise AttributeError(
73
+ f"{self.__class__.__name__} 不支持字段:{item}。"
74
+ f"请先在 `{self.__class__.__name__}` 中声明该字段,再通过 item[{item!r}] 获取。"
75
+ )
76
+
77
+ def __getattribute__(self, item: str) -> Any:
78
+ try:
79
+ field = super().__getattribute__("FIELDS")
80
+ if isinstance(field, dict) and item in field:
81
+ raise ItemAttributeError(
82
+ f"获取字段值请使用 item[{item!r}]"
83
+ )
84
+ except AttributeError:
85
+ pass # 如果 FIELDS 尚未定义,继续执行后续逻辑
86
+ return super().__getattribute__(item)
87
+
88
+ def __repr__(self) -> str:
89
+ return pformat(dict(self))
90
+
91
+ __str__ = __repr__
92
+
93
+ def __iter__(self) -> Iterator[str]:
94
+ return iter(self._values)
95
+
96
+ def __len__(self) -> int:
97
+ return len(self._values)
98
+
99
+ def to_dict(self) -> Dict[str, Any]:
100
+ """转换为字典"""
101
+ return dict(self)
102
+
103
+ def copy(self) -> "Item":
104
+ """深拷贝当前 Item"""
105
105
  return deepcopy(self)
@@ -1,21 +1,21 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from crawlo import Request, Response
4
-
5
-
6
- class BaseMiddleware(object):
7
- def process_request(self, request, spider) -> None | Request | Response:
8
- # 请求预处理
9
- pass
10
-
11
- def process_response(self, request, response, spider) -> Request | Response:
12
- # 响应预处理
13
- pass
14
-
15
- def process_exception(self, request, exp, spider) -> None | Request | Response:
16
- # 异常预处理
17
- pass
18
-
19
- @classmethod
20
- def create_instance(cls, crawler):
21
- return cls()
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from crawlo import Request, Response
4
+
5
+
6
+ class BaseMiddleware(object):
7
+ def process_request(self, request, spider) -> None | Request | Response:
8
+ # 请求预处理
9
+ pass
10
+
11
+ def process_response(self, request, response, spider) -> Request | Response:
12
+ # 响应预处理
13
+ pass
14
+
15
+ def process_exception(self, request, exp, spider) -> None | Request | Response:
16
+ # 异常预处理
17
+ pass
18
+
19
+ @classmethod
20
+ def create_instance(cls, crawler):
21
+ return cls()
@@ -1,32 +1,32 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from crawlo.event import spider_opened
4
-
5
-
6
- class DefaultHeaderMiddleware(object):
7
-
8
- def __init__(self, user_agent, headers, spider):
9
- self.user_agent = user_agent
10
- self.headers = headers
11
- self.spider = spider
12
-
13
- @classmethod
14
- def create_instance(cls, crawler):
15
- o = cls(
16
- user_agent=crawler.settings.get('USER_AGENT'),
17
- headers=crawler.settings.get_dict('DEFAULT_HEADERS'),
18
- spider=crawler.spider
19
- )
20
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
21
- return o
22
-
23
- async def spider_opened(self):
24
- self.user_agent = getattr(self.spider, 'user_agent', self.user_agent)
25
- self.headers = getattr(self.spider, 'headers', self.headers)
26
- if self.user_agent:
27
- self.headers.setdefault('User-Agent', self.user_agent)
28
-
29
- def process_request(self, request, _spider):
30
- if self.headers:
31
- for key, value in self.headers.items():
32
- request.headers.setdefault(key, value)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from crawlo.event import spider_opened
4
+
5
+
6
+ class DefaultHeaderMiddleware(object):
7
+
8
+ def __init__(self, user_agent, headers, spider):
9
+ self.user_agent = user_agent
10
+ self.headers = headers
11
+ self.spider = spider
12
+
13
+ @classmethod
14
+ def create_instance(cls, crawler):
15
+ o = cls(
16
+ user_agent=crawler.settings.get('USER_AGENT'),
17
+ headers=crawler.settings.get_dict('DEFAULT_HEADERS'),
18
+ spider=crawler.spider
19
+ )
20
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
21
+ return o
22
+
23
+ async def spider_opened(self):
24
+ self.user_agent = getattr(self.spider, 'user_agent', self.user_agent)
25
+ self.headers = getattr(self.spider, 'headers', self.headers)
26
+ if self.user_agent:
27
+ self.headers.setdefault('User-Agent', self.user_agent)
28
+
29
+ def process_request(self, request, _spider):
30
+ if self.headers:
31
+ for key, value in self.headers.items():
32
+ request.headers.setdefault(key, value)
@@ -1,28 +1,28 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from asyncio import sleep
4
- from random import uniform
5
- from crawlo.utils.log import get_logger
6
- from crawlo.exceptions import NotConfiguredError
7
-
8
-
9
- class DownloadDelayMiddleware(object):
10
-
11
- def __init__(self, settings, log_level):
12
- self.delay = settings.get_float("DOWNLOAD_DELAY")
13
- if not self.delay:
14
- raise NotConfiguredError
15
- self.randomness = settings.get_bool("RANDOMNESS")
16
- self.floor, self.upper = settings.get_list("RANDOM_RANGE")
17
- self.logger = get_logger(self.__class__.__name__, log_level)
18
-
19
- @classmethod
20
- def create_instance(cls, crawler):
21
- o = cls(settings=crawler.settings, log_level=crawler.settings.get('LOG_LEVEL'))
22
- return o
23
-
24
- async def process_request(self, _request, _spider):
25
- if self.randomness:
26
- await sleep(uniform(self.delay * self.floor, self.delay * self.upper))
27
- else:
28
- await sleep(self.delay)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from asyncio import sleep
4
+ from random import uniform
5
+ from crawlo.utils.log import get_logger
6
+ from crawlo.exceptions import NotConfiguredError
7
+
8
+
9
+ class DownloadDelayMiddleware(object):
10
+
11
+ def __init__(self, settings, log_level):
12
+ self.delay = settings.get_float("DOWNLOAD_DELAY")
13
+ if not self.delay:
14
+ raise NotConfiguredError
15
+ self.randomness = settings.get_bool("RANDOMNESS")
16
+ self.floor, self.upper = settings.get_list("RANDOM_RANGE")
17
+ self.logger = get_logger(self.__class__.__name__, log_level)
18
+
19
+ @classmethod
20
+ def create_instance(cls, crawler):
21
+ o = cls(settings=crawler.settings, log_level=crawler.settings.get('LOG_LEVEL'))
22
+ return o
23
+
24
+ async def process_request(self, _request, _spider):
25
+ if self.randomness:
26
+ await sleep(uniform(self.delay * self.floor, self.delay * self.upper))
27
+ else:
28
+ await sleep(self.delay)