crawlo 1.0.2__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (79) hide show
  1. crawlo/__init__.py +9 -6
  2. crawlo/__version__.py +1 -2
  3. crawlo/core/__init__.py +2 -2
  4. crawlo/core/engine.py +158 -158
  5. crawlo/core/processor.py +40 -40
  6. crawlo/core/scheduler.py +57 -59
  7. crawlo/crawler.py +242 -222
  8. crawlo/downloader/__init__.py +78 -78
  9. crawlo/downloader/aiohttp_downloader.py +259 -96
  10. crawlo/downloader/httpx_downloader.py +187 -48
  11. crawlo/downloader/playwright_downloader.py +160 -160
  12. crawlo/event.py +11 -11
  13. crawlo/exceptions.py +64 -64
  14. crawlo/extension/__init__.py +31 -31
  15. crawlo/extension/log_interval.py +49 -49
  16. crawlo/extension/log_stats.py +44 -44
  17. crawlo/filters/__init__.py +37 -37
  18. crawlo/filters/aioredis_filter.py +157 -129
  19. crawlo/filters/memory_filter.py +202 -203
  20. crawlo/filters/redis_filter.py +119 -119
  21. crawlo/items/__init__.py +62 -62
  22. crawlo/items/items.py +118 -118
  23. crawlo/middleware/__init__.py +21 -21
  24. crawlo/middleware/default_header.py +32 -32
  25. crawlo/middleware/download_delay.py +28 -28
  26. crawlo/middleware/middleware_manager.py +140 -140
  27. crawlo/middleware/request_ignore.py +30 -30
  28. crawlo/middleware/response_code.py +18 -18
  29. crawlo/middleware/response_filter.py +26 -26
  30. crawlo/middleware/retry.py +90 -90
  31. crawlo/network/__init__.py +7 -7
  32. crawlo/network/request.py +204 -233
  33. crawlo/network/response.py +166 -162
  34. crawlo/pipelines/__init__.py +13 -13
  35. crawlo/pipelines/console_pipeline.py +39 -39
  36. crawlo/pipelines/mongo_pipeline.py +116 -116
  37. crawlo/pipelines/mysql_batch_pipline.py +133 -133
  38. crawlo/pipelines/mysql_pipeline.py +195 -195
  39. crawlo/pipelines/pipeline_manager.py +56 -56
  40. crawlo/settings/__init__.py +7 -7
  41. crawlo/settings/default_settings.py +93 -89
  42. crawlo/settings/setting_manager.py +99 -99
  43. crawlo/spider/__init__.py +36 -36
  44. crawlo/stats_collector.py +59 -47
  45. crawlo/subscriber.py +106 -106
  46. crawlo/task_manager.py +27 -27
  47. crawlo/templates/item_template.tmpl +21 -21
  48. crawlo/templates/project_template/main.py +32 -32
  49. crawlo/templates/project_template/setting.py +189 -189
  50. crawlo/templates/spider_template.tmpl +30 -30
  51. crawlo/utils/__init__.py +7 -7
  52. crawlo/utils/concurrency_manager.py +124 -124
  53. crawlo/utils/date_tools.py +177 -177
  54. crawlo/utils/func_tools.py +82 -82
  55. crawlo/utils/log.py +39 -39
  56. crawlo/utils/pqueue.py +173 -173
  57. crawlo/utils/project.py +59 -59
  58. crawlo/utils/request.py +122 -85
  59. crawlo/utils/system.py +11 -11
  60. crawlo/utils/tools.py +302 -302
  61. crawlo/utils/url.py +39 -39
  62. {crawlo-1.0.2.dist-info → crawlo-1.0.3.dist-info}/METADATA +48 -48
  63. crawlo-1.0.3.dist-info/RECORD +80 -0
  64. {crawlo-1.0.2.dist-info → crawlo-1.0.3.dist-info}/top_level.txt +1 -0
  65. tests/__init__.py +7 -0
  66. tests/baidu_spider/__init__.py +7 -0
  67. tests/baidu_spider/demo.py +94 -0
  68. tests/baidu_spider/items.py +25 -0
  69. tests/baidu_spider/middleware.py +49 -0
  70. tests/baidu_spider/pipeline.py +55 -0
  71. tests/baidu_spider/request_fingerprints.txt +9 -0
  72. tests/baidu_spider/run.py +27 -0
  73. tests/baidu_spider/settings.py +78 -0
  74. tests/baidu_spider/spiders/__init__.py +7 -0
  75. tests/baidu_spider/spiders/bai_du.py +61 -0
  76. tests/baidu_spider/spiders/sina.py +79 -0
  77. crawlo-1.0.2.dist-info/RECORD +0 -68
  78. {crawlo-1.0.2.dist-info → crawlo-1.0.3.dist-info}/WHEEL +0 -0
  79. {crawlo-1.0.2.dist-info → crawlo-1.0.3.dist-info}/entry_points.txt +0 -0
crawlo/network/request.py CHANGED
@@ -1,234 +1,205 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- import json
4
- from copy import deepcopy
5
- from urllib.parse import urlencode
6
- from w3lib.url import safe_url_string
7
- from typing import Dict, Optional, Callable, Union, Any, TypeVar, List
8
-
9
- from crawlo.utils.url import escape_ajax
10
-
11
- _Request = TypeVar("_Request", bound="Request")
12
-
13
-
14
- class RequestPriority:
15
- HIGH = -100
16
- NORMAL = 0
17
- LOW = 100
18
-
19
-
20
- class Request:
21
- """
22
- 封装一个 HTTP 请求对象,用于爬虫框架中表示一个待抓取的请求任务。
23
- 支持设置回调函数、请求头、请求体、优先级、元数据等。
24
- """
25
-
26
- __slots__ = (
27
- '_url',
28
- '_meta',
29
- 'callback',
30
- 'cb_kwargs',
31
- 'err_back',
32
- 'headers',
33
- 'body',
34
- 'method',
35
- 'cookies',
36
- 'priority',
37
- 'encoding',
38
- 'dont_filter',
39
- 'timeout',
40
- 'proxy',
41
- 'allow_redirects',
42
- 'auth',
43
- 'verify',
44
- 'flags'
45
- )
46
-
47
- def __init__(
48
- self,
49
- url: str,
50
- callback: Optional[Callable] = None,
51
- method: Optional[str] = 'GET',
52
- headers: Optional[Dict[str, str]] = None,
53
- body: Optional[Union[Dict, bytes, str]] = None,
54
- form_data: Optional[Dict] = None,
55
- json_body: Optional[Dict] = None, # ✅ 参数名从 json 改为 json_body
56
- cb_kwargs: Optional[Dict[str, Any]] = None,
57
- err_back: Optional[Callable] = None,
58
- cookies: Optional[Dict[str, str]] = None,
59
- meta: Optional[Dict[str, Any]] = None,
60
- priority: int = RequestPriority.NORMAL,
61
- dont_filter: bool = False,
62
- timeout: Optional[float] = None,
63
- proxy: Optional[str] = None,
64
- allow_redirects: bool = True,
65
- auth: Optional[tuple] = None,
66
- verify: bool = True,
67
- flags: Optional[List[str]] = None,
68
- encoding: str = 'utf-8'
69
- ):
70
- """
71
- 初始化请求对象。
72
-
73
- 参数说明:
74
- :param url: 请求的 URL 地址(必须)
75
- :param callback: 响应处理回调函数(可选)
76
- :param method: HTTP 请求方法,默认为 GET
77
- :param headers: 请求头(可选)
78
- :param body: 请求体(可为 dict、bytes 或 str)
79
- :param form_data 表单数据,自动设置为 POST 并构造 x-www-form-urlencoded 请求体
80
- :param json_body: 用于构造 JSON 请求体,自动设置 Content-Type 为 application/json
81
- :param cb_kwargs: 传递给回调函数的额外参数(可选)
82
- :param err_back: 请求失败时的错误回调函数(可选)
83
- :param cookies: 请求 cookies(可选)
84
- :param meta: 元数据字典,用于在请求间传递数据
85
- :param priority: 请求优先级,数值越小优先级越高(默认为 0)
86
- :param dont_filter: 是否跳过去重过滤(默认为 False)
87
- :param timeout: 请求超时时间(秒)
88
- :param proxy: 代理地址(如:http://127.0.0.1:8080)
89
- :param allow_redirects: 是否允许重定向(默认为 True)
90
- :param auth: 认证信息,格式为 (username, password)
91
- :param verify: 是否验证 SSL 证书(默认为 True)
92
- :param flags: 请求标记(调试、重试等用途)
93
- """
94
- self.callback = callback
95
- self.method = str(method).upper()
96
- self.headers = headers or {}
97
- self.body = body
98
- self.cb_kwargs = cb_kwargs or {}
99
- self.err_back = err_back
100
- self.cookies = cookies or {}
101
- self.priority = -priority # 高优先级值更小,便于排序
102
- self._meta = deepcopy(meta) if meta is not None else {}
103
- self.timeout = self._meta.get('download_timeout', timeout)
104
- self.proxy = proxy
105
- self.allow_redirects = allow_redirects
106
- self.auth = auth
107
- self.verify = verify
108
- self.flags = flags or []
109
-
110
- # 默认编码
111
- self.encoding = encoding
112
-
113
- # 优先使用 json_body 参数
114
- if json_body is not None:
115
- if 'Content-Type' not in self.headers:
116
- self.headers['Content-Type'] = 'application/json'
117
- self.body = json.dumps(json_body, ensure_ascii=False).encode(self.encoding)
118
- if self.method == 'GET':
119
- self.method = 'POST'
120
-
121
- # 其次使用 form_data
122
- elif form_data is not None:
123
- if self.method == 'GET':
124
- self.method = 'POST'
125
- if 'Content-Type' not in self.headers:
126
- self.headers['Content-Type'] = 'application/x-www-form-urlencoded'
127
- self.body = urlencode(form_data)
128
-
129
- # 最后处理 body 为 dict 的情况
130
- elif isinstance(self.body, dict):
131
- if 'Content-Type' not in self.headers:
132
- self.headers['Content-Type'] = 'application/json'
133
- self.body = json.dumps(self.body, ensure_ascii=False).encode(self.encoding)
134
-
135
- self.dont_filter = dont_filter
136
- self._set_url(url)
137
-
138
- def copy(self: _Request) -> _Request:
139
- """
140
- 创建当前 Request 的副本,用于避免引用共享数据。
141
-
142
- :return: 一个新的 Request 实例
143
- """
144
- return type(self)(
145
- url=self.url,
146
- callback=self.callback,
147
- method=self.method,
148
- headers=self.headers.copy(),
149
- body=self.body,
150
- form_data=None, # form_data 不参与复制
151
- json_body=None, # json_body 参数也不参与复制
152
- cb_kwargs=deepcopy(self.cb_kwargs),
153
- err_back=self.err_back,
154
- cookies=self.cookies.copy(),
155
- meta=deepcopy(self._meta),
156
- priority=-self.priority,
157
- dont_filter=self.dont_filter,
158
- timeout=self.timeout,
159
- proxy=self.proxy,
160
- allow_redirects=self.allow_redirects,
161
- auth=self.auth,
162
- verify=self.verify,
163
- flags=self.flags.copy(),
164
- )
165
-
166
- def set_meta(self, key: str, value: Any) -> None:
167
- """
168
- 设置 meta 中的某个键值对。
169
-
170
- :param key: 要设置的键
171
- :param value: 对应的值
172
- """
173
- self._meta[key] = value
174
-
175
- def _set_url(self, url: str) -> None:
176
- """
177
- 设置并验证 URL,确保其格式正确且包含 scheme。
178
-
179
- :param url: 原始 URL 字符串
180
- :raises TypeError: 如果传入的不是字符串
181
- :raises ValueError: 如果 URL 没有 scheme
182
- """
183
- if not isinstance(url, str):
184
- raise TypeError(f"Request url 必须为字符串类型,当前类型为 {type(url).__name__}")
185
-
186
- s = safe_url_string(url, self.encoding)
187
- escaped_url = escape_ajax(s)
188
- self._url = escaped_url
189
-
190
- if not self._url.startswith(('http://', 'https://', 'about:', '')):
191
- raise ValueError(f"请求 URL 缺少 scheme(如 http://): {self._url}")
192
-
193
- @property
194
- def url(self) -> str:
195
- """
196
- 获取请求的 URL。
197
-
198
- :return: 当前请求的 URL 字符串
199
- """
200
- return self._url
201
-
202
- @property
203
- def meta(self) -> Dict[str, Any]:
204
- """
205
- 获取请求的元数据。
206
-
207
- :return: 元数据字典
208
- """
209
- return self._meta
210
-
211
- def __str__(self) -> str:
212
- """
213
- 返回对象的字符串表示,用于调试和日志输出。
214
-
215
- :return: 字符串 <Request url=... method=...>
216
- """
217
- return f'<Request url={self.url} method={self.method}>'
218
-
219
- def __repr__(self) -> str:
220
- """
221
- 返回对象的官方字符串表示。
222
-
223
- :return: 字符串,与 __str__ 相同
224
- """
225
- return str(self)
226
-
227
- def __lt__(self, other: _Request) -> bool:
228
- """
229
- 比较两个请求的优先级,用于排序。
230
-
231
- :param other: 另一个 Request 对象
232
- :return: 如果当前请求优先级更高(数值更小)返回 True
233
- """
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ import json
4
+ from copy import deepcopy
5
+ from urllib.parse import urlencode
6
+ from w3lib.url import safe_url_string
7
+ from typing import Dict, Optional, Callable, Union, Any, TypeVar, List
8
+
9
+ from crawlo.utils.url import escape_ajax
10
+
11
+
12
+ _Request = TypeVar("_Request", bound="Request")
13
+
14
+
15
+ class RequestPriority:
16
+ """请求优先级常量"""
17
+ HIGH = -100
18
+ NORMAL = 0
19
+ LOW = 100
20
+
21
+
22
+ class Request:
23
+ """
24
+ 封装一个 HTTP 请求对象,用于爬虫框架中表示一个待抓取的请求任务。
25
+ 支持 JSON、表单、原始 body 提交,自动处理 Content-Type 与编码。
26
+ 不支持文件上传(multipart/form-data),保持轻量。
27
+ """
28
+
29
+ __slots__ = (
30
+ '_url',
31
+ '_meta',
32
+ 'callback',
33
+ 'cb_kwargs',
34
+ 'err_back',
35
+ 'headers',
36
+ 'body',
37
+ 'method',
38
+ 'cookies',
39
+ 'priority',
40
+ 'encoding',
41
+ 'dont_filter',
42
+ 'timeout',
43
+ 'proxy',
44
+ 'allow_redirects',
45
+ 'auth',
46
+ 'verify',
47
+ 'flags',
48
+ # 保留高层参数用于 copy()
49
+ '_json_body',
50
+ '_form_data'
51
+ )
52
+
53
+ def __init__(
54
+ self,
55
+ url: str,
56
+ callback: Optional[Callable] = None,
57
+ method: Optional[str] = 'GET',
58
+ headers: Optional[Dict[str, str]] = None,
59
+ body: Optional[Union[bytes, str, Dict[Any, Any]]] = None,
60
+ form_data: Optional[Dict[Any, Any]] = None,
61
+ json_body: Optional[Dict[Any, Any]] = None,
62
+ cb_kwargs: Optional[Dict[str, Any]] = None,
63
+ cookies: Optional[Dict[str, str]] = None,
64
+ meta: Optional[Dict[str, Any]] = None,
65
+ priority: int = RequestPriority.NORMAL,
66
+ dont_filter: bool = False,
67
+ timeout: Optional[float] = None,
68
+ proxy: Optional[str] = None,
69
+ allow_redirects: bool = True,
70
+ auth: Optional[tuple] = None,
71
+ verify: bool = True,
72
+ flags: Optional[List[str]] = None,
73
+ encoding: str = 'utf-8'
74
+ ):
75
+ """
76
+ 初始化请求对象。
77
+
78
+ :param url: 请求 URL(必须)
79
+ :param callback: 成功回调函数
80
+ :param method: HTTP 方法,默认 GET
81
+ :param headers: 请求头
82
+ :param body: 原始请求体(bytes/str),若为 dict 且未使用 json_body/form_data,则自动转为 JSON
83
+ :param form_data: 表单数据,自动转为 application/x-www-form-urlencoded
84
+ :param json_body: JSON 数据,自动序列化并设置 Content-Type
85
+ :param cb_kwargs: 传递给 callback 的额外参数
86
+ :param cookies: Cookies 字典
87
+ :param meta: 元数据(跨中间件传递数据)
88
+ :param priority: 优先级(数值越小越优先)
89
+ :param dont_filter: 是否跳过去重
90
+ :param timeout: 超时时间(秒)
91
+ :param proxy: 代理地址,如 http://127.0.0.1:8080
92
+ :param allow_redirects: 是否允许重定向
93
+ :param auth: 认证元组 (username, password)
94
+ :param verify: 是否验证 SSL 证书
95
+ :param flags: 标记(用于调试或分类)
96
+ :param encoding: 字符编码,默认 utf-8
97
+ """
98
+ self.callback = callback
99
+ self.method = str(method).upper()
100
+ self.headers = headers or {}
101
+ self.cookies = cookies or {}
102
+ self.priority = -priority # 用于排序:值越小优先级越高
103
+ self._meta = deepcopy(meta) if meta is not None else {}
104
+ self.timeout = self._meta.get('download_timeout', timeout)
105
+ self.proxy = proxy
106
+ self.allow_redirects = allow_redirects
107
+ self.auth = auth
108
+ self.verify = verify
109
+ self.flags = flags or []
110
+ self.encoding = encoding
111
+ self.cb_kwargs = cb_kwargs or {}
112
+ self.body = body
113
+ # 保存高层语义参数(用于 copy)
114
+ self._json_body = json_body
115
+ self._form_data = form_data
116
+
117
+ # 构建 body
118
+ if json_body is not None:
119
+ if 'Content-Type' not in self.headers:
120
+ self.headers['Content-Type'] = 'application/json'
121
+ self.body = json.dumps(json_body, ensure_ascii=False).encode(encoding)
122
+ if self.method == 'GET':
123
+ self.method = 'POST'
124
+
125
+ elif form_data is not None:
126
+ if self.method == 'GET':
127
+ self.method = 'POST'
128
+ if 'Content-Type' not in self.headers:
129
+ self.headers['Content-Type'] = 'application/x-www-form-urlencoded'
130
+ query_str = urlencode(form_data)
131
+ self.body = query_str.encode(encoding) # 显式编码为 bytes
132
+
133
+
134
+ else:
135
+ # 处理原始 body
136
+ if isinstance(self.body, dict):
137
+ if 'Content-Type' not in self.headers:
138
+ self.headers['Content-Type'] = 'application/json'
139
+ self.body = json.dumps(self.body, ensure_ascii=False).encode(encoding)
140
+ elif isinstance(self.body, str):
141
+ self.body = self.body.encode(encoding)
142
+
143
+ self.dont_filter = dont_filter
144
+ self._set_url(url)
145
+
146
+ def copy(self: _Request) -> _Request:
147
+ """
148
+ 创建当前请求的副本,保留所有高层语义(json_body/form_data)。
149
+ """
150
+ return type(self)(
151
+ url=self.url,
152
+ callback=self.callback,
153
+ method=self.method,
154
+ headers=self.headers.copy(),
155
+ body=None, # 由 form_data/json_body 重新生成
156
+ form_data=self._form_data,
157
+ json_body=self._json_body,
158
+ cb_kwargs=deepcopy(self.cb_kwargs),
159
+ err_back=self.err_back,
160
+ cookies=self.cookies.copy(),
161
+ meta=deepcopy(self._meta),
162
+ priority=-self.priority,
163
+ dont_filter=self.dont_filter,
164
+ timeout=self.timeout,
165
+ proxy=self.proxy,
166
+ allow_redirects=self.allow_redirects,
167
+ auth=self.auth,
168
+ verify=self.verify,
169
+ flags=self.flags.copy(),
170
+ encoding=self.encoding
171
+ )
172
+
173
+ def set_meta(self, key: str, value: Any) -> None:
174
+ """设置 meta 中的某个键值。"""
175
+ self._meta[key] = value
176
+
177
+ def _set_url(self, url: str) -> None:
178
+ """安全设置 URL,确保格式正确。"""
179
+ if not isinstance(url, str):
180
+ raise TypeError(f"Request url 必须为字符串,当前类型: {type(url).__name__}")
181
+
182
+ s = safe_url_string(url, self.encoding)
183
+ escaped_url = escape_ajax(s)
184
+ self._url = escaped_url
185
+
186
+ if not self._url.startswith(('http://', 'https://')):
187
+ raise ValueError(f"URL 缺少 scheme: {self._url}")
188
+
189
+ @property
190
+ def url(self) -> str:
191
+ return self._url
192
+
193
+ @property
194
+ def meta(self) -> Dict[str, Any]:
195
+ return self._meta
196
+
197
+ def __str__(self) -> str:
198
+ return f'<Request url={self.url} method={self.method}>'
199
+
200
+ def __repr__(self) -> str:
201
+ return str(self)
202
+
203
+ def __lt__(self, other: _Request) -> bool:
204
+ """用于按优先级排序"""
234
205
  return self.priority < other.priority