crawlo 1.1.0__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (111) hide show
  1. crawlo/__init__.py +33 -24
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -155
  6. crawlo/commands/genspider.py +125 -110
  7. crawlo/commands/list.py +147 -119
  8. crawlo/commands/run.py +285 -170
  9. crawlo/commands/startproject.py +111 -101
  10. crawlo/commands/stats.py +188 -167
  11. crawlo/core/__init__.py +2 -2
  12. crawlo/core/engine.py +158 -158
  13. crawlo/core/processor.py +40 -40
  14. crawlo/core/scheduler.py +57 -57
  15. crawlo/crawler.py +494 -492
  16. crawlo/downloader/__init__.py +78 -78
  17. crawlo/downloader/aiohttp_downloader.py +199 -199
  18. crawlo/downloader/cffi_downloader.py +242 -277
  19. crawlo/downloader/httpx_downloader.py +246 -246
  20. crawlo/event.py +11 -11
  21. crawlo/exceptions.py +78 -78
  22. crawlo/extension/__init__.py +31 -31
  23. crawlo/extension/log_interval.py +49 -49
  24. crawlo/extension/log_stats.py +44 -44
  25. crawlo/extension/logging_extension.py +34 -34
  26. crawlo/filters/__init__.py +37 -37
  27. crawlo/filters/aioredis_filter.py +150 -150
  28. crawlo/filters/memory_filter.py +202 -202
  29. crawlo/items/__init__.py +23 -23
  30. crawlo/items/base.py +21 -21
  31. crawlo/items/fields.py +53 -53
  32. crawlo/items/items.py +104 -104
  33. crawlo/middleware/__init__.py +21 -21
  34. crawlo/middleware/default_header.py +32 -32
  35. crawlo/middleware/download_delay.py +28 -28
  36. crawlo/middleware/middleware_manager.py +135 -135
  37. crawlo/middleware/proxy.py +245 -245
  38. crawlo/middleware/request_ignore.py +30 -30
  39. crawlo/middleware/response_code.py +18 -18
  40. crawlo/middleware/response_filter.py +26 -26
  41. crawlo/middleware/retry.py +90 -90
  42. crawlo/network/__init__.py +7 -7
  43. crawlo/network/request.py +203 -203
  44. crawlo/network/response.py +166 -166
  45. crawlo/pipelines/__init__.py +13 -13
  46. crawlo/pipelines/console_pipeline.py +39 -39
  47. crawlo/pipelines/mongo_pipeline.py +116 -116
  48. crawlo/pipelines/mysql_batch_pipline.py +272 -272
  49. crawlo/pipelines/mysql_pipeline.py +195 -195
  50. crawlo/pipelines/pipeline_manager.py +56 -56
  51. crawlo/project.py +153 -0
  52. crawlo/settings/__init__.py +7 -7
  53. crawlo/settings/default_settings.py +166 -168
  54. crawlo/settings/setting_manager.py +99 -99
  55. crawlo/spider/__init__.py +129 -129
  56. crawlo/stats_collector.py +59 -59
  57. crawlo/subscriber.py +106 -106
  58. crawlo/task_manager.py +27 -27
  59. crawlo/templates/crawlo.cfg.tmpl +10 -10
  60. crawlo/templates/project/__init__.py.tmpl +3 -3
  61. crawlo/templates/project/items.py.tmpl +17 -17
  62. crawlo/templates/project/middlewares.py.tmpl +75 -75
  63. crawlo/templates/project/pipelines.py.tmpl +63 -63
  64. crawlo/templates/project/settings.py.tmpl +54 -54
  65. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  66. crawlo/templates/spider/spider.py.tmpl +31 -31
  67. crawlo/utils/__init__.py +7 -7
  68. crawlo/utils/date_tools.py +233 -233
  69. crawlo/utils/db_helper.py +343 -343
  70. crawlo/utils/func_tools.py +82 -82
  71. crawlo/utils/log.py +128 -128
  72. crawlo/utils/pqueue.py +173 -173
  73. crawlo/utils/request.py +267 -267
  74. crawlo/utils/spider_loader.py +62 -62
  75. crawlo/utils/system.py +11 -11
  76. crawlo/utils/tools.py +4 -4
  77. crawlo/utils/url.py +39 -39
  78. crawlo-1.1.1.dist-info/METADATA +220 -0
  79. crawlo-1.1.1.dist-info/RECORD +100 -0
  80. examples/__init__.py +7 -0
  81. examples/baidu_spider/__init__.py +7 -0
  82. examples/baidu_spider/demo.py +94 -0
  83. examples/baidu_spider/items.py +46 -0
  84. examples/baidu_spider/middleware.py +49 -0
  85. examples/baidu_spider/pipeline.py +55 -0
  86. examples/baidu_spider/run.py +27 -0
  87. examples/baidu_spider/settings.py +121 -0
  88. examples/baidu_spider/spiders/__init__.py +7 -0
  89. examples/baidu_spider/spiders/bai_du.py +61 -0
  90. examples/baidu_spider/spiders/miit.py +159 -0
  91. examples/baidu_spider/spiders/sina.py +79 -0
  92. tests/__init__.py +7 -7
  93. tests/test_proxy_health_check.py +32 -32
  94. tests/test_proxy_middleware_integration.py +136 -136
  95. tests/test_proxy_providers.py +56 -56
  96. tests/test_proxy_stats.py +19 -19
  97. tests/test_proxy_strategies.py +59 -59
  98. crawlo/utils/concurrency_manager.py +0 -125
  99. crawlo/utils/project.py +0 -197
  100. crawlo-1.1.0.dist-info/METADATA +0 -49
  101. crawlo-1.1.0.dist-info/RECORD +0 -97
  102. examples/gxb/__init__.py +0 -0
  103. examples/gxb/items.py +0 -36
  104. examples/gxb/run.py +0 -16
  105. examples/gxb/settings.py +0 -72
  106. examples/gxb/spider/__init__.py +0 -2
  107. examples/gxb/spider/miit_spider.py +0 -180
  108. examples/gxb/spider/telecom_device.py +0 -129
  109. {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
  110. {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
  111. {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
@@ -1,82 +1,82 @@
1
- # -*- coding: UTF-8 -*-
2
- from typing import Union, AsyncGenerator, Generator
3
- from inspect import isgenerator, isasyncgen
4
- from crawlo import Response, Request, Item
5
- from crawlo.exceptions import TransformTypeError
6
-
7
- T = Union[Request, Item]
8
-
9
-
10
- async def transform(
11
- func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
12
- response: Response
13
- ) -> AsyncGenerator[Union[T, Exception], None]:
14
- """
15
- 转换回调函数的输出为统一异步生成器
16
-
17
- Args:
18
- func: 同步或异步生成器函数
19
- response: 当前响应对象
20
-
21
- Yields:
22
- Union[T, Exception]: 生成请求/Item或异常对象
23
-
24
- Raises:
25
- TransformTypeError: 当输入类型不符合要求时
26
- """
27
-
28
- def _set_meta(obj: T) -> T:
29
- """统一设置请求的depth元数据"""
30
- if isinstance(obj, Request):
31
- obj.meta.setdefault('depth', response.meta.get('depth', 0))
32
- return obj
33
-
34
- # 类型检查前置
35
- if not (isgenerator(func) or isasyncgen(func)):
36
- raise TransformTypeError(
37
- f'Callback must return generator or async generator, got {type(func).__name__}'
38
- )
39
-
40
- try:
41
- if isgenerator(func):
42
- # 同步生成器处理
43
- for item in func:
44
- yield _set_meta(item)
45
- else:
46
- # 异步生成器处理
47
- async for item in func:
48
- yield _set_meta(item)
49
-
50
- except Exception as e:
51
- yield e
52
-
53
- # #!/usr/bin/python
54
- # # -*- coding:UTF-8 -*-
55
- # from typing import Callable, Union
56
- # from inspect import isgenerator, isasyncgen
57
- # from crawlo import Response, Request, Item
58
- # from crawlo.exceptions import TransformTypeError
59
- #
60
- #
61
- # T = Union[Request, Item]
62
- #
63
- #
64
- # async def transform(func: Callable, response: Response):
65
- # def set_request(t: T) -> T:
66
- # if isinstance(t, Request):
67
- # t.meta['depth'] = response.meta['depth']
68
- # return t
69
- # try:
70
- # if isgenerator(func):
71
- # for f in func:
72
- # yield set_request(f)
73
- # elif isasyncgen(func):
74
- # async for f in func:
75
- # yield set_request(f)
76
- # else:
77
- # raise TransformTypeError(
78
- # f'callback return type error: {type(func)} must be `generator` or `async generator`'
79
- # )
80
- # except Exception as exp:
81
- # yield exp
82
-
1
+ # -*- coding: UTF-8 -*-
2
+ from typing import Union, AsyncGenerator, Generator
3
+ from inspect import isgenerator, isasyncgen
4
+ from crawlo import Response, Request, Item
5
+ from crawlo.exceptions import TransformTypeError
6
+
7
+ T = Union[Request, Item]
8
+
9
+
10
+ async def transform(
11
+ func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
12
+ response: Response
13
+ ) -> AsyncGenerator[Union[T, Exception], None]:
14
+ """
15
+ 转换回调函数的输出为统一异步生成器
16
+
17
+ Args:
18
+ func: 同步或异步生成器函数
19
+ response: 当前响应对象
20
+
21
+ Yields:
22
+ Union[T, Exception]: 生成请求/Item或异常对象
23
+
24
+ Raises:
25
+ TransformTypeError: 当输入类型不符合要求时
26
+ """
27
+
28
+ def _set_meta(obj: T) -> T:
29
+ """统一设置请求的depth元数据"""
30
+ if isinstance(obj, Request):
31
+ obj.meta.setdefault('depth', response.meta.get('depth', 0))
32
+ return obj
33
+
34
+ # 类型检查前置
35
+ if not (isgenerator(func) or isasyncgen(func)):
36
+ raise TransformTypeError(
37
+ f'Callback must return generator or async generator, got {type(func).__name__}'
38
+ )
39
+
40
+ try:
41
+ if isgenerator(func):
42
+ # 同步生成器处理
43
+ for item in func:
44
+ yield _set_meta(item)
45
+ else:
46
+ # 异步生成器处理
47
+ async for item in func:
48
+ yield _set_meta(item)
49
+
50
+ except Exception as e:
51
+ yield e
52
+
53
+ # #!/usr/bin/python
54
+ # # -*- coding:UTF-8 -*-
55
+ # from typing import Callable, Union
56
+ # from inspect import isgenerator, isasyncgen
57
+ # from crawlo import Response, Request, Item
58
+ # from crawlo.exceptions import TransformTypeError
59
+ #
60
+ #
61
+ # T = Union[Request, Item]
62
+ #
63
+ #
64
+ # async def transform(func: Callable, response: Response):
65
+ # def set_request(t: T) -> T:
66
+ # if isinstance(t, Request):
67
+ # t.meta['depth'] = response.meta['depth']
68
+ # return t
69
+ # try:
70
+ # if isgenerator(func):
71
+ # for f in func:
72
+ # yield set_request(f)
73
+ # elif isasyncgen(func):
74
+ # async for f in func:
75
+ # yield set_request(f)
76
+ # else:
77
+ # raise TransformTypeError(
78
+ # f'callback return type error: {type(func)} must be `generator` or `async generator`'
79
+ # )
80
+ # except Exception as exp:
81
+ # yield exp
82
+
crawlo/utils/log.py CHANGED
@@ -1,129 +1,129 @@
1
- # -*- coding: UTF-8 -*-
2
- """
3
- 日志管理器:安全版本,使用字符串化 key 避免 unhashable 问题
4
- """
5
- import os
6
- from logging import (
7
- Formatter,
8
- StreamHandler,
9
- FileHandler,
10
- Logger,
11
- DEBUG,
12
- INFO,
13
- WARNING,
14
- ERROR,
15
- CRITICAL,
16
- )
17
-
18
- LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
19
-
20
-
21
- class LoggerManager:
22
- logger_cache = {}
23
- _default_filename = None
24
- _default_level = INFO
25
- _default_file_level = INFO
26
- _default_console_level = INFO
27
- _default_log_format = LOG_FORMAT
28
- _default_encoding = 'utf-8'
29
-
30
- _level_map = {
31
- 'DEBUG': DEBUG,
32
- 'INFO': INFO,
33
- 'WARNING': WARNING,
34
- 'ERROR': ERROR,
35
- 'CRITICAL': CRITICAL,
36
- }
37
-
38
- @classmethod
39
- def _to_level(cls, level):
40
- """安全转换为日志级别 int"""
41
- if level is None:
42
- return INFO
43
- if isinstance(level, int):
44
- return level
45
- if isinstance(level, str):
46
- return cls._level_map.get(level.upper(), INFO)
47
- if hasattr(level, 'get'): # 如 SettingManager 或 dict
48
- lv = level.get('LOG_LEVEL')
49
- if isinstance(lv, int):
50
- return lv
51
- if isinstance(lv, str):
52
- return cls._level_map.get(lv.upper(), INFO)
53
- return INFO
54
-
55
- @classmethod
56
- def configure(cls, settings=None, **kwargs):
57
- """
58
- 使用 settings 对象或关键字参数配置日志
59
- """
60
- # 优先使用 settings,否则用 kwargs
61
- get_val = settings.get if hasattr(settings, 'get') else (lambda k, d=None: kwargs.get(k, d))
62
-
63
- filename = get_val('LOG_FILE')
64
- level = get_val('LOG_LEVEL', 'INFO')
65
- file_level = get_val('LOG_FILE_LEVEL', level)
66
- console_level = get_val('LOG_CONSOLE_LEVEL', level)
67
- log_format = get_val('LOG_FORMAT', LOG_FORMAT)
68
- encoding = get_val('LOG_ENCODING', 'utf-8')
69
-
70
- cls._default_filename = filename
71
- cls._default_level = cls._to_level(level)
72
- cls._default_file_level = cls._to_level(file_level)
73
- cls._default_console_level = cls._to_level(console_level)
74
- cls._default_log_format = log_format
75
- cls._default_encoding = encoding
76
-
77
- @classmethod
78
- def get_logger(cls, name='default', level=None, filename=None):
79
- """
80
- 简化接口,只暴露必要参数
81
- """
82
- # 确定最终参数
83
- final_level = cls._to_level(level) if level is not None else cls._default_level
84
- final_filename = filename if filename is not None else cls._default_filename
85
-
86
- # ✅ 安全的字符串化 key,避免任何 unhashable 类型
87
- key_parts = [
88
- name,
89
- str(final_level),
90
- final_filename or 'no_file',
91
- ]
92
- key = '|'.join(key_parts) # 如 "my_spider|20|logs/app.log"
93
-
94
- if key in cls.logger_cache:
95
- return cls.logger_cache[key]
96
-
97
- # 创建 logger
98
- _logger = Logger(name=name)
99
- _logger.setLevel(final_level)
100
-
101
- formatter = Formatter(cls._default_log_format)
102
-
103
- # 控制台
104
- if cls._default_console_level is not False:
105
- ch = StreamHandler()
106
- ch.setFormatter(formatter)
107
- ch.setLevel(cls._default_console_level)
108
- _logger.addHandler(ch)
109
-
110
- # 文件
111
- if final_filename:
112
- try:
113
- log_dir = os.path.dirname(final_filename)
114
- if log_dir and not os.path.exists(log_dir):
115
- os.makedirs(log_dir, exist_ok=True)
116
-
117
- fh = FileHandler(final_filename, encoding=cls._default_encoding)
118
- fh.setFormatter(formatter)
119
- fh.setLevel(cls._default_file_level)
120
- _logger.addHandler(fh)
121
- except Exception as e:
122
- print(f"[Logger] 无法创建日志文件 {final_filename}: {e}")
123
-
124
- cls.logger_cache[key] = _logger
125
- return _logger
126
-
127
-
128
- # 全局快捷函数
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ 日志管理器:安全版本,使用字符串化 key 避免 unhashable 问题
4
+ """
5
+ import os
6
+ from logging import (
7
+ Formatter,
8
+ StreamHandler,
9
+ FileHandler,
10
+ Logger,
11
+ DEBUG,
12
+ INFO,
13
+ WARNING,
14
+ ERROR,
15
+ CRITICAL,
16
+ )
17
+
18
+ LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
19
+
20
+
21
+ class LoggerManager:
22
+ logger_cache = {}
23
+ _default_filename = None
24
+ _default_level = INFO
25
+ _default_file_level = INFO
26
+ _default_console_level = INFO
27
+ _default_log_format = LOG_FORMAT
28
+ _default_encoding = 'utf-8'
29
+
30
+ _level_map = {
31
+ 'DEBUG': DEBUG,
32
+ 'INFO': INFO,
33
+ 'WARNING': WARNING,
34
+ 'ERROR': ERROR,
35
+ 'CRITICAL': CRITICAL,
36
+ }
37
+
38
+ @classmethod
39
+ def _to_level(cls, level):
40
+ """安全转换为日志级别 int"""
41
+ if level is None:
42
+ return INFO
43
+ if isinstance(level, int):
44
+ return level
45
+ if isinstance(level, str):
46
+ return cls._level_map.get(level.upper(), INFO)
47
+ if hasattr(level, 'get'): # 如 SettingManager 或 dict
48
+ lv = level.get('LOG_LEVEL')
49
+ if isinstance(lv, int):
50
+ return lv
51
+ if isinstance(lv, str):
52
+ return cls._level_map.get(lv.upper(), INFO)
53
+ return INFO
54
+
55
+ @classmethod
56
+ def configure(cls, settings=None, **kwargs):
57
+ """
58
+ 使用 settings 对象或关键字参数配置日志
59
+ """
60
+ # 优先使用 settings,否则用 kwargs
61
+ get_val = settings.get if hasattr(settings, 'get') else (lambda k, d=None: kwargs.get(k, d))
62
+
63
+ filename = get_val('LOG_FILE')
64
+ level = get_val('LOG_LEVEL', 'INFO')
65
+ file_level = get_val('LOG_FILE_LEVEL', level)
66
+ console_level = get_val('LOG_CONSOLE_LEVEL', level)
67
+ log_format = get_val('LOG_FORMAT', LOG_FORMAT)
68
+ encoding = get_val('LOG_ENCODING', 'utf-8')
69
+
70
+ cls._default_filename = filename
71
+ cls._default_level = cls._to_level(level)
72
+ cls._default_file_level = cls._to_level(file_level)
73
+ cls._default_console_level = cls._to_level(console_level)
74
+ cls._default_log_format = log_format
75
+ cls._default_encoding = encoding
76
+
77
+ @classmethod
78
+ def get_logger(cls, name='default', level=None, filename=None):
79
+ """
80
+ 简化接口,只暴露必要参数
81
+ """
82
+ # 确定最终参数
83
+ final_level = cls._to_level(level) if level is not None else cls._default_level
84
+ final_filename = filename if filename is not None else cls._default_filename
85
+
86
+ # ✅ 安全的字符串化 key,避免任何 unhashable 类型
87
+ key_parts = [
88
+ name,
89
+ str(final_level),
90
+ final_filename or 'no_file',
91
+ ]
92
+ key = '|'.join(key_parts) # 如 "my_spider|20|logs/app.log"
93
+
94
+ if key in cls.logger_cache:
95
+ return cls.logger_cache[key]
96
+
97
+ # 创建 logger
98
+ _logger = Logger(name=name)
99
+ _logger.setLevel(final_level)
100
+
101
+ formatter = Formatter(cls._default_log_format)
102
+
103
+ # 控制台
104
+ if cls._default_console_level is not False:
105
+ ch = StreamHandler()
106
+ ch.setFormatter(formatter)
107
+ ch.setLevel(cls._default_console_level)
108
+ _logger.addHandler(ch)
109
+
110
+ # 文件
111
+ if final_filename:
112
+ try:
113
+ log_dir = os.path.dirname(final_filename)
114
+ if log_dir and not os.path.exists(log_dir):
115
+ os.makedirs(log_dir, exist_ok=True)
116
+
117
+ fh = FileHandler(final_filename, encoding=cls._default_encoding)
118
+ fh.setFormatter(formatter)
119
+ fh.setLevel(cls._default_file_level)
120
+ _logger.addHandler(fh)
121
+ except Exception as e:
122
+ print(f"[Logger] 无法创建日志文件 {final_filename}: {e}")
123
+
124
+ cls.logger_cache[key] = _logger
125
+ return _logger
126
+
127
+
128
+ # 全局快捷函数
129
129
  get_logger = LoggerManager.get_logger