crawlo 1.0.4__py3-none-any.whl → 1.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (112) hide show
  1. crawlo/__init__.py +25 -9
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +41 -0
  4. crawlo/commands/__init__.py +10 -0
  5. crawlo/commands/genspider.py +111 -0
  6. crawlo/commands/run.py +149 -0
  7. crawlo/commands/startproject.py +101 -0
  8. crawlo/core/__init__.py +2 -2
  9. crawlo/core/engine.py +158 -158
  10. crawlo/core/processor.py +40 -40
  11. crawlo/core/scheduler.py +57 -57
  12. crawlo/crawler.py +219 -242
  13. crawlo/downloader/__init__.py +78 -78
  14. crawlo/downloader/aiohttp_downloader.py +200 -259
  15. crawlo/downloader/cffi_downloader.py +277 -0
  16. crawlo/downloader/httpx_downloader.py +246 -187
  17. crawlo/event.py +11 -11
  18. crawlo/exceptions.py +78 -64
  19. crawlo/extension/__init__.py +31 -31
  20. crawlo/extension/log_interval.py +49 -49
  21. crawlo/extension/log_stats.py +44 -44
  22. crawlo/extension/logging_extension.py +35 -0
  23. crawlo/filters/__init__.py +37 -37
  24. crawlo/filters/aioredis_filter.py +150 -150
  25. crawlo/filters/memory_filter.py +202 -202
  26. crawlo/items/__init__.py +22 -62
  27. crawlo/items/base.py +31 -0
  28. crawlo/items/fields.py +54 -0
  29. crawlo/items/items.py +105 -119
  30. crawlo/middleware/__init__.py +21 -21
  31. crawlo/middleware/default_header.py +32 -32
  32. crawlo/middleware/download_delay.py +28 -28
  33. crawlo/middleware/middleware_manager.py +135 -140
  34. crawlo/middleware/proxy.py +246 -0
  35. crawlo/middleware/request_ignore.py +30 -30
  36. crawlo/middleware/response_code.py +18 -18
  37. crawlo/middleware/response_filter.py +26 -26
  38. crawlo/middleware/retry.py +90 -90
  39. crawlo/network/__init__.py +7 -7
  40. crawlo/network/request.py +203 -204
  41. crawlo/network/response.py +166 -166
  42. crawlo/pipelines/__init__.py +13 -13
  43. crawlo/pipelines/console_pipeline.py +39 -39
  44. crawlo/pipelines/mongo_pipeline.py +116 -116
  45. crawlo/pipelines/mysql_batch_pipline.py +273 -134
  46. crawlo/pipelines/mysql_pipeline.py +195 -195
  47. crawlo/pipelines/pipeline_manager.py +56 -56
  48. crawlo/settings/__init__.py +7 -7
  49. crawlo/settings/default_settings.py +169 -94
  50. crawlo/settings/setting_manager.py +99 -99
  51. crawlo/spider/__init__.py +41 -36
  52. crawlo/stats_collector.py +59 -59
  53. crawlo/subscriber.py +106 -106
  54. crawlo/task_manager.py +27 -27
  55. crawlo/templates/crawlo.cfg.tmpl +11 -0
  56. crawlo/templates/project/__init__.py.tmpl +4 -0
  57. crawlo/templates/project/items.py.tmpl +18 -0
  58. crawlo/templates/project/middlewares.py.tmpl +76 -0
  59. crawlo/templates/project/pipelines.py.tmpl +64 -0
  60. crawlo/templates/project/settings.py.tmpl +54 -0
  61. crawlo/templates/project/spiders/__init__.py.tmpl +6 -0
  62. crawlo/templates/spider/spider.py.tmpl +32 -0
  63. crawlo/utils/__init__.py +7 -7
  64. crawlo/utils/concurrency_manager.py +124 -124
  65. crawlo/utils/date_tools.py +233 -177
  66. crawlo/utils/db_helper.py +344 -0
  67. crawlo/utils/func_tools.py +82 -82
  68. crawlo/utils/log.py +129 -39
  69. crawlo/utils/pqueue.py +173 -173
  70. crawlo/utils/project.py +199 -59
  71. crawlo/utils/request.py +267 -122
  72. crawlo/utils/spider_loader.py +63 -0
  73. crawlo/utils/system.py +11 -11
  74. crawlo/utils/tools.py +5 -303
  75. crawlo/utils/url.py +39 -39
  76. {crawlo-1.0.4.dist-info → crawlo-1.0.6.dist-info}/METADATA +49 -48
  77. crawlo-1.0.6.dist-info/RECORD +94 -0
  78. crawlo-1.0.6.dist-info/entry_points.txt +2 -0
  79. {crawlo-1.0.4.dist-info → crawlo-1.0.6.dist-info}/top_level.txt +1 -0
  80. examples/gxb/items.py +36 -0
  81. examples/gxb/run.py +16 -0
  82. examples/gxb/settings.py +72 -0
  83. examples/gxb/spider/__init__.py +0 -0
  84. examples/gxb/spider/miit_spider.py +180 -0
  85. examples/gxb/spider/telecom_device.py +129 -0
  86. tests/__init__.py +7 -7
  87. tests/test_proxy_health_check.py +33 -0
  88. tests/test_proxy_middleware_integration.py +137 -0
  89. tests/test_proxy_providers.py +57 -0
  90. tests/test_proxy_stats.py +20 -0
  91. tests/test_proxy_strategies.py +60 -0
  92. crawlo/downloader/playwright_downloader.py +0 -161
  93. crawlo/templates/item_template.tmpl +0 -22
  94. crawlo/templates/project_template/main.py +0 -33
  95. crawlo/templates/project_template/setting.py +0 -190
  96. crawlo/templates/spider_template.tmpl +0 -31
  97. crawlo-1.0.4.dist-info/RECORD +0 -79
  98. crawlo-1.0.4.dist-info/entry_points.txt +0 -2
  99. tests/baidu_spider/__init__.py +0 -7
  100. tests/baidu_spider/demo.py +0 -94
  101. tests/baidu_spider/items.py +0 -25
  102. tests/baidu_spider/middleware.py +0 -49
  103. tests/baidu_spider/pipeline.py +0 -55
  104. tests/baidu_spider/request_fingerprints.txt +0 -9
  105. tests/baidu_spider/run.py +0 -27
  106. tests/baidu_spider/settings.py +0 -80
  107. tests/baidu_spider/spiders/__init__.py +0 -7
  108. tests/baidu_spider/spiders/bai_du.py +0 -61
  109. tests/baidu_spider/spiders/sina.py +0 -79
  110. {crawlo-1.0.4.dist-info → crawlo-1.0.6.dist-info}/WHEEL +0 -0
  111. {crawlo/templates/project_template/items → examples}/__init__.py +0 -0
  112. {crawlo/templates/project_template/spiders → examples/gxb}/__init__.py +0 -0
@@ -1,202 +1,202 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import os
4
- import threading
5
- from weakref import WeakSet
6
- from typing import Set, TextIO, Optional
7
-
8
- from crawlo import Request
9
- from crawlo.filters import BaseFilter
10
- from crawlo.utils.log import get_logger
11
- from crawlo.utils.request import request_fingerprint
12
-
13
-
14
- class MemoryFilter(BaseFilter):
15
- """基于内存的高效请求去重过滤器,适用于单机爬虫"""
16
-
17
- def __init__(self, crawler):
18
- """
19
- 初始化内存过滤器
20
-
21
- :param crawler: 爬虫实例,用于获取配置
22
- """
23
- self.fingerprints: Set[str] = set() # 主指纹存储
24
- self._temp_weak_refs = WeakSet() # 弱引用临时存储(可选)
25
-
26
- debug = crawler.settings.get_bool('FILTER_DEBUG', False)
27
- logger = get_logger(
28
- self.__class__.__name__, # 使用类名替代字符串
29
- crawler.settings.get('LOG_LEVEL', 'INFO')
30
- )
31
- super().__init__(logger, crawler.stats, debug)
32
-
33
- # 性能计数器
34
- self._dupe_count = 0
35
- self._unique_count = 0
36
-
37
- def add_fingerprint(self, fp: str) -> None:
38
- """
39
- 添加请求指纹
40
-
41
- :param fp: 请求指纹字符串
42
- :raises TypeError: 如果指纹不是字符串类型
43
- """
44
- if not isinstance(fp, str):
45
- raise TypeError(f"指纹必须是字符串类型,得到 {type(fp)}")
46
-
47
- self.fingerprints.add(fp)
48
- self._unique_count += 1
49
- # self.logger.debug(f"添加指纹: {fp[:10]}...") # 日志截断防止过长
50
-
51
- def requested(self, request: Request) -> bool:
52
- """
53
- 检查请求是否重复(主要接口)
54
-
55
- :param request: 请求对象
56
- :return: 是否重复
57
- """
58
- fp = request_fingerprint(request)
59
- if fp in self:
60
- self._dupe_count += 1
61
- # self.logger.debug(f"发现重复请求: {fp[:10]}...")
62
- return True
63
-
64
- self.add_fingerprint(fp)
65
- return False
66
-
67
- def __contains__(self, item: str) -> bool:
68
- """
69
- 支持 in 操作符检查
70
-
71
- :param item: 要检查的指纹
72
- :return: 是否已存在
73
- """
74
- return item in self.fingerprints
75
-
76
- @property
77
- def stats_summary(self) -> dict:
78
- """获取过滤器统计信息"""
79
- return {
80
- 'capacity': len(self.fingerprints),
81
- 'duplicates': self._dupe_count,
82
- 'uniques': self._unique_count,
83
- 'memory_usage': self._estimate_memory()
84
- }
85
-
86
- def _estimate_memory(self) -> str:
87
- """估算内存使用量(近似值)"""
88
- avg_item_size = sum(len(x) for x in self.fingerprints) / max(1, len(self.fingerprints))
89
- total = len(self.fingerprints) * (avg_item_size + 50) # 50字节额外开销
90
- return f"{total / (1024 * 1024):.2f} MB"
91
-
92
- def clear(self) -> None:
93
- """清空所有指纹数据"""
94
- self.fingerprints.clear()
95
- self._dupe_count = 0
96
- self._unique_count = 0
97
-
98
- def close(self) -> None:
99
- """关闭过滤器(清理资源)"""
100
- self.clear()
101
-
102
- # 兼容旧版异步接口
103
- async def closed(self):
104
- """兼容异步接口"""
105
- self.close()
106
-
107
-
108
- class MemoryFileFilter(BaseFilter):
109
- """基于内存的请求指纹过滤器,支持原子化文件持久化"""
110
-
111
- def __init__(self, crawler):
112
- """
113
- 初始化过滤器
114
- :param crawler: Scrapy Crawler对象,用于获取配置
115
- """
116
- self.fingerprints: Set[str] = set() # 主存储集合
117
- self._lock = threading.RLock() # 线程安全锁
118
- self._file: Optional[TextIO] = None # 文件句柄
119
-
120
- debug = crawler.settings.get_bool("FILTER_DEBUG", False)
121
- logger = get_logger(
122
- self.__class__.__name__, # 使用类名作为日志标识
123
- crawler.settings.get("LOG_LEVEL", "INFO")
124
- )
125
- super().__init__(logger, crawler.stats, debug)
126
-
127
- # 初始化文件存储
128
- request_dir = crawler.settings.get("REQUEST_DIR")
129
- if request_dir:
130
- self._init_file_store(request_dir)
131
-
132
- def _init_file_store(self, request_dir: str) -> None:
133
- """原子化初始化文件存储"""
134
- with self._lock:
135
- try:
136
- os.makedirs(request_dir, exist_ok=True)
137
- file_path = os.path.join(request_dir, 'request_fingerprints.txt')
138
-
139
- # 原子化操作:读取现有指纹
140
- if os.path.exists(file_path):
141
- with open(file_path, 'r', encoding='utf-8') as f:
142
- self.fingerprints.update(
143
- line.strip() for line in f
144
- if line.strip()
145
- )
146
-
147
- # 以追加模式打开文件
148
- self._file = open(file_path, 'a+', encoding='utf-8')
149
- self.logger.info(f"Initialized fingerprint file: {file_path}")
150
-
151
- except Exception as e:
152
- self.logger.error(f"Failed to init file store: {str(e)}")
153
- raise
154
-
155
- def add_fingerprint(self, fp: str) -> None:
156
- """
157
- 线程安全的指纹添加操作
158
- :param fp: 请求指纹字符串
159
- """
160
- with self._lock:
161
- if fp not in self.fingerprints:
162
- self.fingerprints.add(fp)
163
- self._persist_fp(fp)
164
-
165
- def _persist_fp(self, fp: str) -> None:
166
- """持久化指纹到文件(需在锁保护下调用)"""
167
- if self._file:
168
- try:
169
- self._file.write(f"{fp}\n")
170
- self._file.flush()
171
- os.fsync(self._file.fileno()) # 确保写入磁盘
172
- except IOError as e:
173
- self.logger.error(f"Failed to persist fingerprint: {str(e)}")
174
-
175
- def __contains__(self, item: str) -> bool:
176
- """
177
- 线程安全的指纹检查
178
- :param item: 要检查的指纹
179
- :return: 是否已存在
180
- """
181
- with self._lock:
182
- return item in self.fingerprints
183
-
184
- def close(self) -> None:
185
- """安全关闭资源(同步方法)"""
186
- with self._lock:
187
- if self._file and not self._file.closed:
188
- try:
189
- self._file.flush()
190
- os.fsync(self._file.fileno())
191
- finally:
192
- self._file.close()
193
- self.logger.info(f"Closed fingerprint file: {self._file.name}")
194
-
195
- def __del__(self):
196
- """析构函数双保险"""
197
- self.close()
198
-
199
- # 兼容异步接口
200
- async def closed(self):
201
- """标准的关闭入口"""
202
- self.close()
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import os
4
+ import threading
5
+ from weakref import WeakSet
6
+ from typing import Set, TextIO, Optional
7
+
8
+ from crawlo import Request
9
+ from crawlo.filters import BaseFilter
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.utils.request import request_fingerprint
12
+
13
+
14
+ class MemoryFilter(BaseFilter):
15
+ """基于内存的高效请求去重过滤器,适用于单机爬虫"""
16
+
17
+ def __init__(self, crawler):
18
+ """
19
+ 初始化内存过滤器
20
+
21
+ :param crawler: 爬虫实例,用于获取配置
22
+ """
23
+ self.fingerprints: Set[str] = set() # 主指纹存储
24
+ self._temp_weak_refs = WeakSet() # 弱引用临时存储(可选)
25
+
26
+ debug = crawler.settings.get_bool('FILTER_DEBUG', False)
27
+ logger = get_logger(
28
+ self.__class__.__name__, # 使用类名替代字符串
29
+ crawler.settings.get('LOG_LEVEL', 'INFO')
30
+ )
31
+ super().__init__(logger, crawler.stats, debug)
32
+
33
+ # 性能计数器
34
+ self._dupe_count = 0
35
+ self._unique_count = 0
36
+
37
+ def add_fingerprint(self, fp: str) -> None:
38
+ """
39
+ 添加请求指纹
40
+
41
+ :param fp: 请求指纹字符串
42
+ :raises TypeError: 如果指纹不是字符串类型
43
+ """
44
+ if not isinstance(fp, str):
45
+ raise TypeError(f"指纹必须是字符串类型,得到 {type(fp)}")
46
+
47
+ self.fingerprints.add(fp)
48
+ self._unique_count += 1
49
+ # self.logger.debug(f"添加指纹: {fp[:10]}...") # 日志截断防止过长
50
+
51
+ def requested(self, request: Request) -> bool:
52
+ """
53
+ 检查请求是否重复(主要接口)
54
+
55
+ :param request: 请求对象
56
+ :return: 是否重复
57
+ """
58
+ fp = request_fingerprint(request)
59
+ if fp in self:
60
+ self._dupe_count += 1
61
+ # self.logger.debug(f"发现重复请求: {fp[:10]}...")
62
+ return True
63
+
64
+ self.add_fingerprint(fp)
65
+ return False
66
+
67
+ def __contains__(self, item: str) -> bool:
68
+ """
69
+ 支持 in 操作符检查
70
+
71
+ :param item: 要检查的指纹
72
+ :return: 是否已存在
73
+ """
74
+ return item in self.fingerprints
75
+
76
+ @property
77
+ def stats_summary(self) -> dict:
78
+ """获取过滤器统计信息"""
79
+ return {
80
+ 'capacity': len(self.fingerprints),
81
+ 'duplicates': self._dupe_count,
82
+ 'uniques': self._unique_count,
83
+ 'memory_usage': self._estimate_memory()
84
+ }
85
+
86
+ def _estimate_memory(self) -> str:
87
+ """估算内存使用量(近似值)"""
88
+ avg_item_size = sum(len(x) for x in self.fingerprints) / max(1, len(self.fingerprints))
89
+ total = len(self.fingerprints) * (avg_item_size + 50) # 50字节额外开销
90
+ return f"{total / (1024 * 1024):.2f} MB"
91
+
92
+ def clear(self) -> None:
93
+ """清空所有指纹数据"""
94
+ self.fingerprints.clear()
95
+ self._dupe_count = 0
96
+ self._unique_count = 0
97
+
98
+ def close(self) -> None:
99
+ """关闭过滤器(清理资源)"""
100
+ self.clear()
101
+
102
+ # 兼容旧版异步接口
103
+ async def closed(self):
104
+ """兼容异步接口"""
105
+ self.close()
106
+
107
+
108
+ class MemoryFileFilter(BaseFilter):
109
+ """基于内存的请求指纹过滤器,支持原子化文件持久化"""
110
+
111
+ def __init__(self, crawler):
112
+ """
113
+ 初始化过滤器
114
+ :param crawler: Scrapy Crawler对象,用于获取配置
115
+ """
116
+ self.fingerprints: Set[str] = set() # 主存储集合
117
+ self._lock = threading.RLock() # 线程安全锁
118
+ self._file: Optional[TextIO] = None # 文件句柄
119
+
120
+ debug = crawler.settings.get_bool("FILTER_DEBUG", False)
121
+ logger = get_logger(
122
+ self.__class__.__name__, # 使用类名作为日志标识
123
+ crawler.settings.get("LOG_LEVEL", "INFO")
124
+ )
125
+ super().__init__(logger, crawler.stats, debug)
126
+
127
+ # 初始化文件存储
128
+ request_dir = crawler.settings.get("REQUEST_DIR")
129
+ if request_dir:
130
+ self._init_file_store(request_dir)
131
+
132
+ def _init_file_store(self, request_dir: str) -> None:
133
+ """原子化初始化文件存储"""
134
+ with self._lock:
135
+ try:
136
+ os.makedirs(request_dir, exist_ok=True)
137
+ file_path = os.path.join(request_dir, 'request_fingerprints.txt')
138
+
139
+ # 原子化操作:读取现有指纹
140
+ if os.path.exists(file_path):
141
+ with open(file_path, 'r', encoding='utf-8') as f:
142
+ self.fingerprints.update(
143
+ line.strip() for line in f
144
+ if line.strip()
145
+ )
146
+
147
+ # 以追加模式打开文件
148
+ self._file = open(file_path, 'a+', encoding='utf-8')
149
+ self.logger.info(f"Initialized fingerprint file: {file_path}")
150
+
151
+ except Exception as e:
152
+ self.logger.error(f"Failed to init file store: {str(e)}")
153
+ raise
154
+
155
+ def add_fingerprint(self, fp: str) -> None:
156
+ """
157
+ 线程安全的指纹添加操作
158
+ :param fp: 请求指纹字符串
159
+ """
160
+ with self._lock:
161
+ if fp not in self.fingerprints:
162
+ self.fingerprints.add(fp)
163
+ self._persist_fp(fp)
164
+
165
+ def _persist_fp(self, fp: str) -> None:
166
+ """持久化指纹到文件(需在锁保护下调用)"""
167
+ if self._file:
168
+ try:
169
+ self._file.write(f"{fp}\n")
170
+ self._file.flush()
171
+ os.fsync(self._file.fileno()) # 确保写入磁盘
172
+ except IOError as e:
173
+ self.logger.error(f"Failed to persist fingerprint: {str(e)}")
174
+
175
+ def __contains__(self, item: str) -> bool:
176
+ """
177
+ 线程安全的指纹检查
178
+ :param item: 要检查的指纹
179
+ :return: 是否已存在
180
+ """
181
+ with self._lock:
182
+ return item in self.fingerprints
183
+
184
+ def close(self) -> None:
185
+ """安全关闭资源(同步方法)"""
186
+ with self._lock:
187
+ if self._file and not self._file.closed:
188
+ try:
189
+ self._file.flush()
190
+ os.fsync(self._file.fileno())
191
+ finally:
192
+ self._file.close()
193
+ self.logger.info(f"Closed fingerprint file: {self._file.name}")
194
+
195
+ def __del__(self):
196
+ """析构函数双保险"""
197
+ self.close()
198
+
199
+ # 兼容异步接口
200
+ async def closed(self):
201
+ """标准的关闭入口"""
202
+ self.close()
crawlo/items/__init__.py CHANGED
@@ -1,62 +1,22 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from abc import ABCMeta
4
- from typing import Any, Optional, Type
5
-
6
-
7
- class Field:
8
- def __init__(
9
- self,
10
- nullable: bool = True,
11
- *,
12
- default: Any = None,
13
- field_type: Optional[Type] = None,
14
- max_length: Optional[int] = None,
15
- description: str = ""
16
- ):
17
- self.nullable = nullable
18
- self.default = default
19
- self.field_type = field_type
20
- self.max_length = max_length
21
- self.description = description
22
-
23
- def validate(self, value: Any, field_name: str = "") -> Any:
24
- if value is None or (isinstance(value, str) and value.strip() == ""):
25
- if self.default is not None:
26
- return self.default
27
- elif not self.nullable:
28
- raise ValueError(
29
- f"字段 '{field_name}' 不允许为空。"
30
- )
31
-
32
- if value is not None and not (isinstance(value, str) and value.strip() == ""):
33
- if self.field_type and not isinstance(value, self.field_type):
34
- raise TypeError(
35
- f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
36
- )
37
- if self.max_length and len(str(value)) > self.max_length:
38
- raise ValueError(
39
- f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
40
- )
41
-
42
- return value
43
-
44
- def __repr__(self):
45
- return f"<Field required={self.nullable} type={self.field_type} default={self.default}>"
46
-
47
-
48
- class ItemMeta(ABCMeta):
49
- """
50
- 元类
51
- """
52
- def __new__(mcs, name, bases, attrs):
53
- field = {}
54
- cls_attr = {}
55
- for k, v in attrs.items():
56
- if isinstance(v, Field):
57
- field[k] = v
58
- else:
59
- cls_attr[k] = v
60
- cls_instance = super().__new__(mcs, name, bases, attrs)
61
- cls_instance.FIELDS = field
62
- return cls_instance
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ crawlo.items
5
+ ===============
6
+ 提供 Item 和 Field 类用于数据定义和验证。
7
+ """
8
+ from .fields import Field
9
+ from .items import Item
10
+ from .base import ItemMeta
11
+ from crawlo.exceptions import ItemInitError, ItemAttributeError
12
+
13
+ __all__ = [
14
+ 'Item',
15
+ 'Field',
16
+ 'ItemMeta',
17
+ 'ItemInitError',
18
+ 'ItemAttributeError'
19
+ ]
20
+
21
+
22
+
crawlo/items/base.py ADDED
@@ -0,0 +1,31 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 基础元类定义
5
+ """
6
+ from abc import ABCMeta
7
+
8
+ from crawlo.items import Field
9
+
10
+
11
+ class ItemMeta(ABCMeta):
12
+ """
13
+ 元类,用于自动收集 Item 类中的 Field 定义
14
+ """
15
+
16
+ def __new__(mcs, name, bases, attrs):
17
+ fields = {}
18
+ cls_attrs = {}
19
+
20
+ # 收集所有 Field 实例
21
+ for attr_name, attr_value in attrs.items():
22
+ if isinstance(attr_value, Field):
23
+ fields[attr_name] = attr_value
24
+ else:
25
+ cls_attrs[attr_name] = attr_value
26
+
27
+ # 创建类实例
28
+ cls_instance = super().__new__(mcs, name, bases, cls_attrs)
29
+ cls_instance.FIELDS = fields
30
+
31
+ return cls_instance
crawlo/items/fields.py ADDED
@@ -0,0 +1,54 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Field 类定义
5
+ """
6
+
7
+ from typing import Any, Optional, Type
8
+
9
+
10
+ class Field:
11
+ """
12
+ 字段定义类,用于定义 Item 的字段属性和验证规则
13
+ """
14
+ def __init__(
15
+ self,
16
+ nullable: bool = True,
17
+ *,
18
+ default: Any = None,
19
+ field_type: Optional[Type] = None,
20
+ max_length: Optional[int] = None,
21
+ description: str = ""
22
+ ):
23
+ self.nullable = nullable
24
+ self.default = default
25
+ self.field_type = field_type
26
+ self.max_length = max_length
27
+ self.description = description
28
+
29
+ def validate(self, value: Any, field_name: str = "") -> Any:
30
+ """
31
+ 验证字段值是否符合规则
32
+ """
33
+ if value is None or (isinstance(value, str) and value.strip() == ""):
34
+ if self.default is not None:
35
+ return self.default
36
+ elif not self.nullable:
37
+ raise ValueError(
38
+ f"字段 '{field_name}' 不允许为空。"
39
+ )
40
+
41
+ if value is not None and not (isinstance(value, str) and value.strip() == ""):
42
+ if self.field_type and not isinstance(value, self.field_type):
43
+ raise TypeError(
44
+ f"字段 '{field_name}' 类型错误:期望类型 {self.field_type}, 得到 {type(value)},值:{value!r}"
45
+ )
46
+ if self.max_length and len(str(value)) > self.max_length:
47
+ raise ValueError(
48
+ f"字段 '{field_name}' 长度超限:最大长度 {self.max_length},当前长度 {len(str(value))},值:{value!r}"
49
+ )
50
+
51
+ return value
52
+
53
+ def __repr__(self):
54
+ return f"<Field nullable={self.nullable} type={self.field_type} default={self.default}>"