crawlo 1.4.0__py3-none-any.whl → 1.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (45) hide show
  1. crawlo/__init__.py +9 -4
  2. crawlo/__version__.py +1 -1
  3. crawlo/core/__init__.py +8 -2
  4. crawlo/core/scheduler.py +2 -2
  5. crawlo/downloader/aiohttp_downloader.py +7 -2
  6. crawlo/extension/log_interval.py +44 -7
  7. crawlo/initialization/__init__.py +6 -2
  8. crawlo/middleware/middleware_manager.py +1 -1
  9. crawlo/mode_manager.py +13 -7
  10. crawlo/pipelines/bloom_dedup_pipeline.py +5 -15
  11. crawlo/pipelines/database_dedup_pipeline.py +5 -8
  12. crawlo/pipelines/memory_dedup_pipeline.py +5 -15
  13. crawlo/pipelines/redis_dedup_pipeline.py +2 -15
  14. crawlo/project.py +18 -7
  15. crawlo/settings/default_settings.py +114 -150
  16. crawlo/settings/setting_manager.py +14 -9
  17. crawlo/tools/distributed_coordinator.py +4 -8
  18. crawlo/utils/fingerprint.py +123 -0
  19. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/METADATA +1 -1
  20. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/RECORD +45 -29
  21. examples/test_project/__init__.py +7 -0
  22. examples/test_project/run.py +35 -0
  23. examples/test_project/test_project/__init__.py +4 -0
  24. examples/test_project/test_project/items.py +18 -0
  25. examples/test_project/test_project/middlewares.py +119 -0
  26. examples/test_project/test_project/pipelines.py +97 -0
  27. examples/test_project/test_project/settings.py +170 -0
  28. examples/test_project/test_project/spiders/__init__.py +10 -0
  29. examples/test_project/test_project/spiders/of_week_dis.py +144 -0
  30. tests/debug_framework_logger.py +1 -1
  31. tests/debug_log_levels.py +1 -1
  32. tests/test_all_pipeline_fingerprints.py +134 -0
  33. tests/test_default_header_middleware.py +242 -87
  34. tests/test_fingerprint_consistency.py +136 -0
  35. tests/test_fingerprint_simple.py +52 -0
  36. tests/test_framework_logger.py +1 -1
  37. tests/test_framework_startup.py +1 -1
  38. tests/test_hash_performance.py +100 -0
  39. tests/test_mode_change.py +1 -1
  40. tests/test_offsite_middleware.py +185 -162
  41. tests/test_offsite_middleware_simple.py +204 -0
  42. tests/test_pipeline_fingerprint_consistency.py +87 -0
  43. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/WHEEL +0 -0
  44. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/entry_points.txt +0 -0
  45. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/top_level.txt +0 -0
@@ -6,6 +6,10 @@
6
6
  # 添加环境变量配置工具导入
7
7
  from crawlo.utils.env_config import get_redis_config, get_runtime_config, get_version
8
8
 
9
+ # ===========================================================================
10
+ # 1. 框架基础配置
11
+ # ===========================================================================
12
+
9
13
  # 框架初始化控制
10
14
  FRAMEWORK_INIT_ORDER = [
11
15
  'log_system', # 日志系统
@@ -16,42 +20,26 @@ FRAMEWORK_INIT_ORDER = [
16
20
  ]
17
21
  FRAMEWORK_INIT_STATE = 'uninitialized'
18
22
 
19
- # ============================== 项目基础配置 ==============================
20
-
21
- # 项目名称(用于日志、Redis Key 等标识)
22
- PROJECT_NAME = get_runtime_config()['PROJECT_NAME']
23
-
24
- # 确保项目名称不为空
25
- if not PROJECT_NAME or PROJECT_NAME == 'None':
26
- PROJECT_NAME = 'crawlo'
27
-
28
- # 项目版本号 - 从框架的__version__.py文件中读取,如果不存在则使用默认值
29
- VERSION = get_version()
30
-
31
- # 运行模式:standalone/distributed/auto
32
- RUN_MODE = get_runtime_config()['CRAWLO_MODE']
33
-
34
- # 并发数配置 - 优化默认值以提高性能
35
- CONCURRENCY = get_runtime_config()['CONCURRENCY']
36
-
37
- # ============================== 爬虫核心配置 ==============================
38
-
39
- # 默认下载器
40
- DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader"
41
-
42
- # 请求延迟(秒)- 优化默认值以提高性能
43
- DOWNLOAD_DELAY = 0.5
44
-
45
- # 随机延迟配置
46
- RANDOMNESS = False # 是否启用随机延迟
23
+ # 项目基础配置
24
+ runtime_config = get_runtime_config()
25
+ PROJECT_NAME = runtime_config['PROJECT_NAME'] # 项目名称(用于日志、Redis Key 等标识)
26
+ VERSION = get_version() # 项目版本号 - 从框架的__version__.py文件中读取,如果不存在则使用默认值
27
+ RUN_MODE = runtime_config['CRAWLO_MODE'] # 运行模式:standalone/distributed/auto
28
+ CONCURRENCY = runtime_config['CONCURRENCY'] # 并发数配置
29
+
30
+ # ===========================================================================
31
+ # 2. 爬虫核心配置
32
+ # ===========================================================================
33
+
34
+ # 下载器配置
35
+ DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # 默认下载器
36
+ DOWNLOAD_DELAY = 0.5 # 请求延迟(秒)
37
+ RANDOMNESS = True # 是否启用随机延迟
47
38
  RANDOM_RANGE = [0.5, 1.5] # 随机延迟范围因子,实际延迟 = DOWNLOAD_DELAY * RANDOM_RANGE[0] 到 DOWNLOAD_DELAY * RANDOM_RANGE[1]
48
39
 
49
- # 深度优先级(负数表示深度优先,正数表示广度优先)
50
- DEPTH_PRIORITY = 1
51
-
52
- # 调度器队列最大大小 - 优化默认值以提高性能
53
- SCHEDULER_MAX_QUEUE_SIZE = 5000
54
- # 背压控制配置 - 优化默认值以提高性能
40
+ # 调度器配置
41
+ DEPTH_PRIORITY = 1 # 深度优先级(负数表示深度优先,正数表示广度优先)
42
+ SCHEDULER_MAX_QUEUE_SIZE = 5000 # 调度器队列最大大小
55
43
  BACKPRESSURE_RATIO = 0.9 # 背压触发阈值(队列大小达到最大容量的90%时触发背压控制)
56
44
 
57
45
  # 请求生成控制
@@ -59,22 +47,17 @@ REQUEST_GENERATION_BATCH_SIZE = 10 # 请求生成批处理大小
59
47
  REQUEST_GENERATION_INTERVAL = 0.01 # 请求生成间隔(秒)
60
48
  ENABLE_CONTROLLED_REQUEST_GENERATION = False # 是否启用受控请求生成
61
49
 
62
- # 调度器队列名称(遵循统一命名规范)
63
- # 当使用Redis队列时,取消注释并设置此值,或在项目配置文件中设置
64
- # SCHEDULER_QUEUE_NAME = f"crawlo:{PROJECT_NAME}:queue:requests"
65
-
66
- # 队列类型:memory/redis/auto
67
- QUEUE_TYPE = 'auto'
68
-
69
50
  # 队列配置
51
+ QUEUE_TYPE = 'auto' # 队列类型:memory/redis/auto
52
+ # SCHEDULER_QUEUE_NAME = f"crawlo:{PROJECT_NAME}:queue:requests" # 调度器队列名称(遵循统一命名规范)
70
53
  QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
71
54
  QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
72
55
 
73
- # 默认使用内存过滤器和去重管道,确保在无Redis环境下也能正常运行
74
- # 在auto模式下,如果Redis可用,框架会自动更新为Redis实现以提供更好的去重能力
75
- DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
76
- FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
56
+ # ===========================================================================
57
+ # 3. 数据库和过滤器配置
58
+ # ===========================================================================
77
59
 
60
+ # MySQL配置
78
61
  MYSQL_HOST = '127.0.0.1'
79
62
  MYSQL_PORT = 3306
80
63
  MYSQL_USER = 'root'
@@ -84,8 +67,7 @@ MYSQL_TABLE = 'crawlo'
84
67
  MYSQL_BATCH_SIZE = 100
85
68
  MYSQL_USE_BATCH = False # 是否启用批量插入
86
69
 
87
- # --- Redis 过滤器配置 ---
88
- # 使用环境变量配置工具获取 Redis 配置
70
+ # Redis配置
89
71
  redis_config = get_redis_config()
90
72
  REDIS_HOST = redis_config['REDIS_HOST']
91
73
  REDIS_PORT = redis_config['REDIS_PORT']
@@ -110,7 +92,17 @@ CLEANUP_FP = 0 # 程序结束时是否清理指纹(0=不清理)
110
92
  FILTER_DEBUG = True # 是否开启去重调试日志
111
93
  DECODE_RESPONSES = True # Redis 返回是否解码为字符串
112
94
 
113
- # ============================== 框架默认中间件配置 ==============================
95
+ # 过滤器配置
96
+ DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline' # 默认使用内存过滤器和去重管道
97
+ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
98
+
99
+ # Bloom过滤器配置
100
+ BLOOM_FILTER_CAPACITY = 1000000 # Bloom过滤器容量
101
+ BLOOM_FILTER_ERROR_RATE = 0.001 # Bloom过滤器错误率
102
+
103
+ # ===========================================================================
104
+ # 4. 中间件配置
105
+ # ===========================================================================
114
106
 
115
107
  # 框架中间件列表(框架默认中间件 + 用户自定义中间件)
116
108
  MIDDLEWARES = [
@@ -126,14 +118,18 @@ MIDDLEWARES = [
126
118
  'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
127
119
  ]
128
120
 
129
- # ============================== 框架默认管道配置 ==============================
121
+ # ===========================================================================
122
+ # 5. 管道配置
123
+ # ===========================================================================
130
124
 
131
125
  # 框架数据处理管道列表(框架默认管道 + 用户自定义管道)
132
126
  PIPELINES = [
133
127
  'crawlo.pipelines.console_pipeline.ConsolePipeline',
134
128
  ]
135
129
 
136
- # ============================== 框架默认扩展配置 ==============================
130
+ # ===========================================================================
131
+ # 6. 扩展配置
132
+ # ===========================================================================
137
133
 
138
134
  # 框架扩展组件列表(框架默认扩展 + 用户自定义扩展)
139
135
  EXTENSIONS = [
@@ -142,56 +138,81 @@ EXTENSIONS = [
142
138
  'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
143
139
  ]
144
140
 
145
- # ============================== 日志与监控 ==============================
141
+ # ===========================================================================
142
+ # 7. 日志与监控配置
143
+ # ===========================================================================
146
144
 
145
+ # 日志配置
147
146
  LOG_LEVEL = None # 日志级别: DEBUG/INFO/WARNING/ERROR,默认为None,由用户在项目settings中设置
148
147
  STATS_DUMP = True # 是否周期性输出统计信息
149
148
  LOG_FILE = None # 日志文件路径,将在项目配置中设置
150
149
  LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
151
150
  LOG_ENCODING = 'utf-8'
152
151
 
153
- # ============================== 代理配置 ==============================
152
+ # 日志间隔配置
153
+ INTERVAL = 60 # 日志输出间隔(秒)
154
154
 
155
- # 代理功能默认不启用,如需使用请在项目配置文件中启用并配置相关参数
156
- PROXY_ENABLED = False # 是否启用代理
155
+ # 自定义日志配置
156
+ LOG_ENABLE_CUSTOM = False # 是否启用自定义日志
157
157
 
158
- # 简化版代理配置(适用于SimpleProxyMiddleware)
159
- PROXY_LIST = [] # 代理列表,例如: ["http://proxy1:8080", "http://proxy2:8080"]
158
+ # 内存监控配置
159
+ MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
160
+ MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
161
+ MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
162
+ MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)
160
163
 
161
- # 高级代理配置(适用于ProxyMiddleware)
162
- PROXY_API_URL = "" # 代理获取接口(请替换为真实地址)
164
+ # 性能分析配置
165
+ PERFORMANCE_PROFILER_ENABLED = False # 是否启用性能分析
166
+ PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
167
+ PERFORMANCE_PROFILER_INTERVAL = 300 # 性能分析间隔(秒)
168
+
169
+ # 健康检查配置
170
+ HEALTH_CHECK_ENABLED = True # 是否启用健康检查
171
+
172
+ # ===========================================================================
173
+ # 8. 网络请求配置
174
+ # ===========================================================================
175
+
176
+ # 默认请求头配置
177
+ DEFAULT_REQUEST_HEADERS = {
178
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
179
+ 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
180
+ 'Accept-Encoding': 'gzip, deflate, br',
181
+ } # 默认请求头
163
182
 
164
- # 代理提取方式(支持字段路径或函数)
165
- # 示例: "proxy" 适用于 {"proxy": "http://1.1.1.1:8080"}
166
- # 示例: "data.proxy" 适用于 {"data": {"proxy": "http://1.1.1.1:8080"}}
167
- PROXY_EXTRACTOR = "proxy"
183
+ # 默认User-Agent(使用现代浏览器的User-Agent)
184
+ USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36"
168
185
 
169
- # 代理刷新控制
186
+ # 是否启用随机User-Agent功能(默认禁用,用户可根据需要启用)
187
+ RANDOM_USER_AGENT_ENABLED = False # 是否启用随机用户代理
188
+
189
+ # 站外过滤配置
190
+ ALLOWED_DOMAINS = [] # 允许的域名列表
191
+
192
+ # 代理配置
193
+ PROXY_ENABLED = False # 是否启用代理
194
+ PROXY_LIST = [] # 简化版代理配置(适用于SimpleProxyMiddleware)
195
+ PROXY_API_URL = "" # 高级代理配置(适用于ProxyMiddleware)
196
+ PROXY_EXTRACTOR = "proxy" # 代理提取方式
170
197
  PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
171
198
  PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
172
199
  PROXY_POOL_SIZE = 5 # 代理池大小
173
200
  PROXY_HEALTH_CHECK_THRESHOLD = 0.5 # 代理健康检查阈值
174
201
 
175
- # ============================== Curl-Cffi 特有配置 ==============================
176
-
177
- # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
178
- CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
179
-
180
- # 自定义浏览器版本映射(可覆盖默认行为)
181
- CURL_BROWSER_VERSION_MAP = {
182
- "chrome": "chrome136",
183
- "edge": "edge101",
184
- "safari": "safari184",
185
- "firefox": "firefox135",
186
- }
187
-
188
- # ============================== 下载器优化配置 ==============================
202
+ # 下载器通用配置
203
+ DOWNLOAD_TIMEOUT = 30 # 下载超时时间(秒)
204
+ VERIFY_SSL = True # 是否验证SSL证书
205
+ CONNECTION_POOL_LIMIT = 100 # 连接池大小限制
206
+ CONNECTION_POOL_LIMIT_PER_HOST = 20 # 每个主机的连接池大小限制
207
+ DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 最大下载大小(字节)
208
+ DOWNLOAD_STATS = True # 是否启用下载统计
209
+ DOWNLOAD_WARN_SIZE = 1024 * 1024 # 下载警告大小(字节)
210
+ DOWNLOAD_RETRY_TIMES = 3 # 下载重试次数
211
+ MAX_RETRY_TIMES = 3 # 最大重试次数
189
212
 
190
213
  # 下载器健康检查
191
214
  DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
192
215
  HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
193
-
194
- # 请求统计配置
195
216
  REQUEST_STATS_ENABLED = True # 是否启用请求统计
196
217
  STATS_RESET_ON_START = False # 启动时是否重置统计
197
218
 
@@ -203,20 +224,16 @@ HTTPX_FOLLOW_REDIRECTS = True # 是否自动跟随重定向
203
224
  AIOHTTP_AUTO_DECOMPRESS = True # 是否自动解压响应
204
225
  AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
205
226
 
206
- # 通用下载器配置
207
- DOWNLOAD_TIMEOUT = 30 # 下载超时时间(秒)
208
- VERIFY_SSL = True # 是否验证SSL证书
209
- CONNECTION_POOL_LIMIT = 100 # 连接池大小限制
210
- CONNECTION_POOL_LIMIT_PER_HOST = 20 # 每个主机的连接池大小限制
211
- DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 最大下载大小(字节)
212
- DOWNLOAD_STATS = True # 是否启用下载统计
213
- DOWNLOAD_WARN_SIZE = 1024 * 1024 # 下载警告大小(字节)
214
- DOWNLOAD_RETRY_TIMES = 3 # 下载重试次数
215
- MAX_RETRY_TIMES = 3 # 最大重试次数
216
-
217
- # ============================== Selenium 下载器配置 ==============================
227
+ # Curl-Cffi 特有配置
228
+ CURL_BROWSER_TYPE = "chrome" # 浏览器指纹模拟(仅 CurlCffi 下载器有效)
229
+ CURL_BROWSER_VERSION_MAP = { # 自定义浏览器版本映射(可覆盖默认行为)
230
+ "chrome": "chrome136",
231
+ "edge": "edge101",
232
+ "safari": "safari184",
233
+ "firefox": "firefox135",
234
+ }
218
235
 
219
- # Selenium 基础配置
236
+ # Selenium 下载器配置
220
237
  SELENIUM_BROWSER_TYPE = "chrome" # 浏览器类型: chrome, firefox, edge
221
238
  SELENIUM_HEADLESS = True # 是否无头模式
222
239
  SELENIUM_TIMEOUT = 30 # 超时时间(秒)
@@ -229,9 +246,7 @@ SELENIUM_PROXY = None # 代理设置
229
246
  SELENIUM_SINGLE_BROWSER_MODE = True # 单浏览器多标签页模式
230
247
  SELENIUM_MAX_TABS_PER_BROWSER = 10 # 单浏览器最大标签页数量
231
248
 
232
- # ============================== Playwright 下载器配置 ==============================
233
-
234
- # Playwright 基础配置
249
+ # Playwright 下载器配置
235
250
  PLAYWRIGHT_BROWSER_TYPE = "chromium" # 浏览器类型: chromium, firefox, webkit
236
251
  PLAYWRIGHT_HEADLESS = True # 是否无头模式
237
252
  PLAYWRIGHT_TIMEOUT = 30000 # 超时时间(毫秒)
@@ -247,58 +262,9 @@ PLAYWRIGHT_MAX_PAGES_PER_BROWSER = 10 # 单浏览器最大页面数量
247
262
  CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
248
263
  CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
249
264
 
250
- # ============================== 内存监控配置 ==============================
251
-
252
- # 内存监控扩展默认不启用,如需使用请在项目配置文件中启用
253
- MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
254
- MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
255
- MEMORY_WARNING_THRESHOLD = 80.0 # 内存使用率警告阈值(百分比)
256
- MEMORY_CRITICAL_THRESHOLD = 90.0 # 内存使用率严重阈值(百分比)
257
-
258
- # ============================== 性能分析配置 ==============================
259
-
260
- # 性能分析扩展默认不启用
261
- PERFORMANCE_PROFILER_ENABLED = False # 是否启用性能分析
262
- PERFORMANCE_PROFILER_OUTPUT_DIR = 'profiling' # 性能分析输出目录
263
- PERFORMANCE_PROFILER_INTERVAL = 300 # 性能分析间隔(秒)
264
-
265
- # ============================== 健康检查配置 ==============================
266
-
267
- # 健康检查扩展默认启用
268
- HEALTH_CHECK_ENABLED = True # 是否启用健康检查
269
-
270
- # ============================== 日志间隔配置 ==============================
271
-
272
- # 日志间隔扩展配置
273
- INTERVAL = 60 # 日志输出间隔(秒)
274
-
275
- # ============================== 自定义日志配置 ==============================
276
-
277
- # 自定义日志扩展配置
278
- LOG_ENABLE_CUSTOM = False # 是否启用自定义日志
279
-
280
- # ============================== 默认请求头配置 ==============================
281
-
282
- # 默认请求头配置
283
- DEFAULT_REQUEST_HEADERS = {} # 默认请求头
284
- USER_AGENT = None # 用户代理
285
- USER_AGENTS = [] # 用户代理列表
286
- RANDOM_HEADERS = {} # 随机请求头
287
- RANDOM_USER_AGENT_ENABLED = False # 是否启用随机用户代理
288
- USER_AGENT_DEVICE_TYPE = "all" # 用户代理设备类型
289
-
290
- # ============================== 站外过滤配置 ==============================
291
-
292
- # 站外过滤配置
293
- ALLOWED_DOMAINS = [] # 允许的域名列表
294
-
295
- # ============================== Bloom过滤器配置 ==============================
296
-
297
- # Bloom过滤器配置
298
- BLOOM_FILTER_CAPACITY = 1000000 # Bloom过滤器容量
299
- BLOOM_FILTER_ERROR_RATE = 0.001 # Bloom过滤器错误率
300
-
301
- # ============================== CSV管道配置 ==============================
265
+ # ===========================================================================
266
+ # 9. 数据存储配置
267
+ # ===========================================================================
302
268
 
303
269
  # CSV管道配置
304
270
  CSV_DELIMITER = ',' # CSV分隔符
@@ -311,11 +277,9 @@ CSV_DICT_FILE = None # CSV字典文件路径
311
277
  CSV_BATCH_SIZE = 100 # CSV批处理大小
312
278
  CSV_BATCH_FILE = None # CSV批处理文件路径
313
279
 
314
- # ============================== 数据库去重管道配置 ==============================
315
-
316
280
  # 数据库去重管道配置
317
281
  DB_HOST = 'localhost' # 数据库主机
318
282
  DB_PORT = 3306 # 数据库端口
319
283
  DB_USER = 'root' # 数据库用户
320
284
  DB_PASSWORD = '' # 数据库密码
321
- DB_NAME = 'crawlo' # 数据库名称
285
+ DB_NAME = 'crawlo' # 数据库名称
@@ -53,15 +53,7 @@ class SettingManager(MutableMapping):
53
53
  merged_pipelines.append(pipeline)
54
54
  self.attributes['PIPELINES'] = merged_pipelines
55
55
 
56
- # 特殊处理PIPELINES,确保去重管道在最前面
57
- dedup_pipeline = self.attributes.get('DEFAULT_DEDUP_PIPELINE')
58
- if dedup_pipeline:
59
- pipelines = self.attributes.get('PIPELINES', [])
60
- # 移除所有去重管道实例(如果存在)
61
- pipelines = [item for item in pipelines if item != dedup_pipeline]
62
- # 在开头插入去重管道
63
- pipelines.insert(0, dedup_pipeline)
64
- self.attributes['PIPELINES'] = pipelines
56
+
65
57
 
66
58
  # 合并扩展配置
67
59
  if 'EXTENSIONS' in user_config:
@@ -82,6 +74,19 @@ class SettingManager(MutableMapping):
82
74
  for key, value in user_config.items():
83
75
  if key not in ['MIDDLEWARES', 'PIPELINES', 'EXTENSIONS']:
84
76
  self.attributes[key] = value
77
+
78
+ # 特殊处理PIPELINES,确保去重管道在最前面(在所有配置更新后执行)
79
+ dedup_pipeline = self.attributes.get('DEFAULT_DEDUP_PIPELINE')
80
+ if dedup_pipeline:
81
+ pipelines = self.attributes.get('PIPELINES', [])
82
+ # 移除所有去重管道实例(如果存在)
83
+ # 移除内存和Redis去重管道
84
+ pipelines = [item for item in pipelines
85
+ if item not in ('crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline',
86
+ 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline')]
87
+ # 在开头插入去重管道
88
+ pipelines.insert(0, dedup_pipeline)
89
+ self.attributes['PIPELINES'] = pipelines
85
90
 
86
91
  def set_settings(self, module):
87
92
  if isinstance(module, str):
@@ -13,6 +13,8 @@ from datetime import datetime
13
13
  from typing import Dict, Any, Optional, Tuple, List, Set
14
14
  from urllib.parse import urlparse
15
15
 
16
+ from crawlo.utils.fingerprint import FingerprintGenerator
17
+
16
18
 
17
19
  class TaskDistributor:
18
20
  """任务分发工具类"""
@@ -99,15 +101,9 @@ class DeduplicationTool:
99
101
  data (Any): 数据
100
102
 
101
103
  Returns:
102
- str: 数据指纹(MD5哈希)
104
+ str: 数据指纹(SHA256哈希)
103
105
  """
104
- if isinstance(data, dict):
105
- # 对于字典,排序键以确保一致性
106
- data_str = str(sorted(data.items()))
107
- else:
108
- data_str = str(data)
109
-
110
- return hashlib.md5(data_str.encode('utf-8')).hexdigest()
106
+ return FingerprintGenerator.data_fingerprint(data)
111
107
 
112
108
  def is_duplicate(self, data: Any) -> bool:
113
109
  """
@@ -0,0 +1,123 @@
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 统一指纹生成工具
5
+ ================
6
+ 提供一致的指纹生成方法,确保在框架各组件中生成的指纹保持一致。
7
+
8
+ 特点:
9
+ - 算法统一: 所有指纹生成使用相同的算法(SHA256)
10
+ - 格式一致: 相同数据在不同场景下生成相同指纹
11
+ - 高性能: 优化的实现确保高效生成
12
+ - 易扩展: 支持不同类型数据的指纹生成
13
+ """
14
+
15
+ import hashlib
16
+ import json
17
+ from typing import Any, Dict, Union
18
+ from collections import namedtuple
19
+
20
+
21
+ def generate_data_fingerprint(data: Any) -> str:
22
+ """
23
+ 生成数据指纹
24
+
25
+ 基于数据内容生成唯一指纹,用于去重判断。
26
+ 使用 SHA256 算法确保安全性。
27
+
28
+ :param data: 要生成指纹的数据(支持 dict, Item, namedtuple, str 等类型)
29
+ :return: 数据指纹(hex string)
30
+ """
31
+ # 将数据转换为可序列化的字典
32
+ if hasattr(data, 'to_dict'):
33
+ # 支持 Item 等实现了 to_dict 方法的对象
34
+ data_dict = data.to_dict()
35
+ elif hasattr(data, '_asdict'):
36
+ # 支持 namedtuple 对象
37
+ data_dict = data._asdict()
38
+ elif isinstance(data, dict):
39
+ data_dict = data
40
+ else:
41
+ # 其他类型转换为字符串处理
42
+ data_dict = {'__data__': str(data)}
43
+
44
+ # 对字典进行排序以确保一致性
45
+ sorted_items = sorted(data_dict.items())
46
+
47
+ # 生成指纹字符串
48
+ fingerprint_string = '|'.join([f"{k}={v}" for k, v in sorted_items if v is not None])
49
+
50
+ # 使用 SHA256 生成固定长度的指纹
51
+ return hashlib.sha256(fingerprint_string.encode('utf-8')).hexdigest()
52
+
53
+
54
+ def generate_request_fingerprint(
55
+ method: str,
56
+ url: str,
57
+ body: bytes = b'',
58
+ headers: Dict[str, str] = None
59
+ ) -> str:
60
+ """
61
+ 生成请求指纹
62
+
63
+ 基于请求的方法、URL、body 和可选的 headers 生成唯一指纹。
64
+ 使用 SHA256 算法确保安全性。
65
+
66
+ :param method: HTTP方法
67
+ :param url: 请求URL
68
+ :param body: 请求体
69
+ :param headers: 请求头
70
+ :return: 请求指纹(hex string)
71
+ """
72
+ hash_func = hashlib.sha256()
73
+
74
+ # 基本字段
75
+ hash_func.update(method.encode('utf-8'))
76
+ hash_func.update(url.encode('utf-8'))
77
+ hash_func.update(body or b'')
78
+
79
+ # 可选的 headers
80
+ if headers:
81
+ # 对 headers 进行排序以确保一致性
82
+ sorted_headers = sorted(headers.items())
83
+ for name, value in sorted_headers:
84
+ hash_func.update(f"{name}:{value}".encode('utf-8'))
85
+
86
+ return hash_func.hexdigest()
87
+
88
+
89
+ class FingerprintGenerator:
90
+ """指纹生成器类"""
91
+
92
+ @staticmethod
93
+ def item_fingerprint(item) -> str:
94
+ """
95
+ 生成数据项指纹
96
+
97
+ :param item: 数据项
98
+ :return: 指纹字符串
99
+ """
100
+ return generate_data_fingerprint(item)
101
+
102
+ @staticmethod
103
+ def request_fingerprint(method: str, url: str, body: bytes = b'', headers: Dict[str, str] = None) -> str:
104
+ """
105
+ 生成请求指纹
106
+
107
+ :param method: HTTP方法
108
+ :param url: 请求URL
109
+ :param body: 请求体
110
+ :param headers: 请求头
111
+ :return: 指纹字符串
112
+ """
113
+ return generate_request_fingerprint(method, url, body, headers)
114
+
115
+ @staticmethod
116
+ def data_fingerprint(data: Any) -> str:
117
+ """
118
+ 生成通用数据指纹
119
+
120
+ :param data: 任意数据
121
+ :return: 指纹字符串
122
+ """
123
+ return generate_data_fingerprint(data)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: crawlo
3
- Version: 1.4.0
3
+ Version: 1.4.2
4
4
  Summary: Crawlo 是一款基于异步IO的高性能Python爬虫框架,支持分布式抓取。
5
5
  Home-page: https://github.com/crawl-coder/Crawlo.git
6
6
  Author: crawl-coder