crawlo 1.4.0__py3-none-any.whl → 1.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (45) hide show
  1. crawlo/__init__.py +9 -4
  2. crawlo/__version__.py +1 -1
  3. crawlo/core/__init__.py +8 -2
  4. crawlo/core/scheduler.py +2 -2
  5. crawlo/downloader/aiohttp_downloader.py +7 -2
  6. crawlo/extension/log_interval.py +44 -7
  7. crawlo/initialization/__init__.py +6 -2
  8. crawlo/middleware/middleware_manager.py +1 -1
  9. crawlo/mode_manager.py +13 -7
  10. crawlo/pipelines/bloom_dedup_pipeline.py +5 -15
  11. crawlo/pipelines/database_dedup_pipeline.py +5 -8
  12. crawlo/pipelines/memory_dedup_pipeline.py +5 -15
  13. crawlo/pipelines/redis_dedup_pipeline.py +2 -15
  14. crawlo/project.py +18 -7
  15. crawlo/settings/default_settings.py +114 -150
  16. crawlo/settings/setting_manager.py +14 -9
  17. crawlo/tools/distributed_coordinator.py +4 -8
  18. crawlo/utils/fingerprint.py +123 -0
  19. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/METADATA +1 -1
  20. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/RECORD +45 -29
  21. examples/test_project/__init__.py +7 -0
  22. examples/test_project/run.py +35 -0
  23. examples/test_project/test_project/__init__.py +4 -0
  24. examples/test_project/test_project/items.py +18 -0
  25. examples/test_project/test_project/middlewares.py +119 -0
  26. examples/test_project/test_project/pipelines.py +97 -0
  27. examples/test_project/test_project/settings.py +170 -0
  28. examples/test_project/test_project/spiders/__init__.py +10 -0
  29. examples/test_project/test_project/spiders/of_week_dis.py +144 -0
  30. tests/debug_framework_logger.py +1 -1
  31. tests/debug_log_levels.py +1 -1
  32. tests/test_all_pipeline_fingerprints.py +134 -0
  33. tests/test_default_header_middleware.py +242 -87
  34. tests/test_fingerprint_consistency.py +136 -0
  35. tests/test_fingerprint_simple.py +52 -0
  36. tests/test_framework_logger.py +1 -1
  37. tests/test_framework_startup.py +1 -1
  38. tests/test_hash_performance.py +100 -0
  39. tests/test_mode_change.py +1 -1
  40. tests/test_offsite_middleware.py +185 -162
  41. tests/test_offsite_middleware_simple.py +204 -0
  42. tests/test_pipeline_fingerprint_consistency.py +87 -0
  43. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/WHEEL +0 -0
  44. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/entry_points.txt +0 -0
  45. {crawlo-1.4.0.dist-info → crawlo-1.4.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,10 @@
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ test_project.spiders
4
+ ========================
5
+ 存放所有的爬虫。
6
+ """
7
+
8
+ # 自动导入所有爬虫以确保它们被注册
9
+ # 示例:
10
+ # from .YourSpider import YourSpider
@@ -0,0 +1,144 @@
1
+ # -*- coding: UTF-8 -*-
2
+ """
3
+ test_project.spiders.of_week_dis
4
+ =======================================
5
+ 由 `crawlo genspider` 命令生成的爬虫。
6
+ 基于 Crawlo 框架,支持异步并发、分布式爬取等功能。
7
+
8
+ 使用示例:
9
+ crawlo crawl of_week_dis
10
+ """
11
+
12
+ from crawlo.spider import Spider
13
+ from crawlo import Request
14
+ from ..items import ExampleItem
15
+
16
+
17
+ class OfweekdisSpider(Spider):
18
+ """
19
+ 爬虫:of_week_dis
20
+
21
+ 功能说明:
22
+ - 支持并发爬取
23
+ - 自动去重过滤
24
+ - 错误重试机制
25
+ - 数据管道处理
26
+ """
27
+ name = 'of_week_dis'
28
+ allowed_domains = ['ee.ofweek.com']
29
+ start_urls = ['https://ee.ofweek.com/']
30
+
31
+ # 高级配置(可选)
32
+ # custom_settings = {
33
+ # 'DOWNLOAD_DELAY': 2.0,
34
+ # 'CONCURRENCY': 4,
35
+ # 'RETRY_HTTP_CODES': [500, 502, 503, 504, 408, 429],
36
+ # 'ALLOWED_RESPONSE_CODES': [200, 301, 302], # 只允许特定状态码
37
+ # 'DENIED_RESPONSE_CODES': [403, 404], # 拒绝特定状态码
38
+ # }
39
+
40
+ def start_requests(self):
41
+ """
42
+ 生成初始请求。
43
+
44
+ 支持自定义请求头、代理、优先级等。
45
+ """
46
+ headers = {
47
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
48
+ 'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
49
+ }
50
+
51
+ for url in self.start_urls:
52
+ yield Request(
53
+ url=url,
54
+ callback=self.parse,
55
+ headers=headers,
56
+ # meta={'proxy': 'http://proxy.example.com:8080'}, # 自定义代理
57
+ # priority=10, # 请求优先级(数字越大优先级越高)
58
+ )
59
+
60
+ def parse(self, response):
61
+ """
62
+ 解析响应的主方法。
63
+
64
+ Args:
65
+ response: 响应对象,包含页面内容和元数据
66
+
67
+ Yields:
68
+ Request: 新的请求对象(用于深度爬取)
69
+ Item: 数据项对象(用于数据存储)
70
+ """
71
+ self.logger.info(f'正在解析页面: {response.url}')
72
+
73
+ # ================== 数据提取示例 ==================
74
+
75
+ # 提取数据并创建 Item
76
+ # item = Item()
77
+ # item['title'] = response.xpath('//title/text()').get(default='')
78
+ # item['url'] = response.url
79
+ # item['content'] = response.xpath('//div[@class="content"]//text()').getall()
80
+ # yield item
81
+
82
+ # 直接返回字典(简单数据)
83
+ yield {
84
+ 'title': response.xpath('//title/text()').get(default=''),
85
+ 'url': response.url,
86
+ 'status_code': response.status_code,
87
+ # 'description': response.xpath('//meta[@name="description"]/@content').get(),
88
+ # 'keywords': response.xpath('//meta[@name="keywords"]/@content').get(),
89
+ }
90
+
91
+ # ================== 链接提取示例 ==================
92
+
93
+ # 提取并跟进链接
94
+ # links = response.xpath('//a/@href').getall()
95
+ # for link in links:
96
+ # # 过滤有效链接
97
+ # if link and not link.startswith(('javascript:', 'mailto:', '#')):
98
+ # yield response.follow(
99
+ # link,
100
+ # callback=self.parse_detail, # 或者 self.parse 继续递归
101
+ # meta={'parent_url': response.url} # 传递父页面信息
102
+ # )
103
+
104
+ # 用 CSS 选择器提取链接
105
+ # for link in response.css('a.item-link::attr(href)').getall():
106
+ # yield response.follow(link, callback=self.parse_detail)
107
+
108
+ # ================== 分页处理示例 ==================
109
+
110
+ # 处理分页
111
+ # next_page = response.xpath('//a[@class="next"]/@href').get()
112
+ # if next_page:
113
+ # yield response.follow(next_page, callback=self.parse)
114
+
115
+ # 数字分页
116
+ # current_page = int(response.meta.get('page', 1))
117
+ # max_pages = 100 # 设置最大页数
118
+ # if current_page < max_pages:
119
+ # next_url = f'https://ee.ofweek.com/page/{current_page + 1}'
120
+ # yield Request(
121
+ # url=next_url,
122
+ # callback=self.parse,
123
+ # meta={'page': current_page + 1}
124
+ # )
125
+
126
+ def parse_detail(self, response):
127
+ """
128
+ 解析详情页面的方法(可选)。
129
+
130
+ 用于处理从列表页跳转而来的详情页。
131
+ """
132
+ self.logger.info(f'正在解析详情页: {response.url}')
133
+
134
+ # parent_url = response.meta.get('parent_url', '')
135
+ #
136
+ # yield {
137
+ # 'title': response.xpath('//h1/text()').get(default=''),
138
+ # 'content': '\n'.join(response.xpath('//div[@class="content"]//text()').getall()),
139
+ # 'url': response.url,
140
+ # 'parent_url': parent_url,
141
+ # 'publish_time': response.xpath('//time/@datetime').get(),
142
+ # }
143
+
144
+ pass
@@ -7,7 +7,7 @@ import sys
7
7
  import os
8
8
  sys.path.insert(0, '/')
9
9
 
10
- from crawlo.core.framework_initializer import initialize_framework, get_framework_initializer
10
+ from crawlo.initialization import initialize_framework, get_framework_initializer
11
11
  from crawlo.utils.log import get_logger, LoggerManager
12
12
  import logging
13
13
 
tests/debug_log_levels.py CHANGED
@@ -7,7 +7,7 @@ import sys
7
7
  import os
8
8
  sys.path.insert(0, '/')
9
9
 
10
- from crawlo.core.framework_initializer import initialize_framework
10
+ from crawlo.initialization import initialize_framework
11
11
  from crawlo.utils.log import LoggerManager, get_logger
12
12
  import logging
13
13
 
@@ -0,0 +1,134 @@
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 所有去重管道指纹一致性测试
5
+ ====================
6
+ 验证所有去重管道对相同数据生成一致的指纹
7
+ """
8
+
9
+ import sys
10
+ import os
11
+
12
+ # 添加项目根目录到Python路径
13
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
14
+
15
+ from crawlo.utils.fingerprint import FingerprintGenerator
16
+
17
+
18
+ class MockItem:
19
+ """模拟数据项类"""
20
+
21
+ def __init__(self, **kwargs):
22
+ for key, value in kwargs.items():
23
+ setattr(self, key, value)
24
+
25
+ def to_dict(self):
26
+ """转换为字典"""
27
+ return {k: v for k, v in self.__dict__.items() if not k.startswith('_')}
28
+
29
+
30
+ def test_all_pipeline_fingerprints():
31
+ """测试所有管道指纹一致性"""
32
+ # 创建测试数据项
33
+ test_item = MockItem(
34
+ title="Test Title",
35
+ url="https://example.com",
36
+ content="Test content",
37
+ price=99.99
38
+ )
39
+
40
+ # 使用指纹生成器生成指纹
41
+ expected_fingerprint = FingerprintGenerator.item_fingerprint(test_item)
42
+
43
+ print(f"Expected fingerprint: {expected_fingerprint}")
44
+
45
+ # 测试内存去重管道指纹生成方法
46
+ try:
47
+ from crawlo.pipelines.memory_dedup_pipeline import MemoryDedupPipeline
48
+ memory_pipeline = MemoryDedupPipeline()
49
+ memory_fingerprint = memory_pipeline._generate_item_fingerprint(test_item)
50
+ print(f"Memory pipeline fingerprint: {memory_fingerprint}")
51
+ assert memory_fingerprint == expected_fingerprint, "Memory pipeline fingerprint mismatch"
52
+ print("✓ Memory pipeline fingerprint一致")
53
+ except Exception as e:
54
+ print(f"✗ Memory pipeline test failed: {e}")
55
+
56
+ # 测试Redis去重管道指纹生成方法
57
+ try:
58
+ from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
59
+ redis_pipeline = RedisDedupPipeline()
60
+ redis_fingerprint = redis_pipeline._generate_item_fingerprint(test_item)
61
+ print(f"Redis pipeline fingerprint: {redis_fingerprint}")
62
+ assert redis_fingerprint == expected_fingerprint, "Redis pipeline fingerprint mismatch"
63
+ print("✓ Redis pipeline fingerprint一致")
64
+ except Exception as e:
65
+ print(f"✗ Redis pipeline test failed: {e}")
66
+
67
+ # 测试Bloom去重管道指纹生成方法
68
+ try:
69
+ from crawlo.pipelines.bloom_dedup_pipeline import BloomDedupPipeline
70
+ bloom_pipeline = BloomDedupPipeline()
71
+ bloom_fingerprint = bloom_pipeline._generate_item_fingerprint(test_item)
72
+ print(f"Bloom pipeline fingerprint: {bloom_fingerprint}")
73
+ assert bloom_fingerprint == expected_fingerprint, "Bloom pipeline fingerprint mismatch"
74
+ print("✓ Bloom pipeline fingerprint一致")
75
+ except Exception as e:
76
+ print(f"✗ Bloom pipeline test failed: {e}")
77
+
78
+ # 测试数据库去重管道指纹生成方法
79
+ try:
80
+ from crawlo.pipelines.database_dedup_pipeline import DatabaseDedupPipeline
81
+ database_pipeline = DatabaseDedupPipeline()
82
+ database_fingerprint = database_pipeline._generate_item_fingerprint(test_item)
83
+ print(f"Database pipeline fingerprint: {database_fingerprint}")
84
+ assert database_fingerprint == expected_fingerprint, "Database pipeline fingerprint mismatch"
85
+ print("✓ Database pipeline fingerprint一致")
86
+ except Exception as e:
87
+ print(f"✗ Database pipeline test failed: {e}")
88
+
89
+ # 测试分布式协调工具指纹生成方法
90
+ try:
91
+ from crawlo.tools.distributed_coordinator import DeduplicationTool
92
+ dedup_tool = DeduplicationTool()
93
+ tool_fingerprint = dedup_tool.generate_fingerprint(test_item.to_dict())
94
+ print(f"Deduplication tool fingerprint: {tool_fingerprint}")
95
+ # 注意:这里我们传入的是字典,因为工具类的generate_fingerprint方法直接处理数据
96
+ expected_tool_fingerprint = FingerprintGenerator.data_fingerprint(test_item.to_dict())
97
+ assert tool_fingerprint == expected_tool_fingerprint, "Deduplication tool fingerprint mismatch"
98
+ print("✓ Deduplication tool fingerprint一致")
99
+ except Exception as e:
100
+ print(f"✗ Deduplication tool test failed: {e}")
101
+
102
+
103
+ def test_fingerprint_stability():
104
+ """测试指纹稳定性"""
105
+ # 创建相同的测试数据项多次
106
+ item1 = MockItem(
107
+ title="Test Title",
108
+ url="https://example.com",
109
+ content="Test content",
110
+ price=99.99
111
+ )
112
+
113
+ item2 = MockItem(
114
+ title="Test Title",
115
+ url="https://example.com",
116
+ content="Test content",
117
+ price=99.99
118
+ )
119
+
120
+ # 生成指纹
121
+ fingerprint1 = FingerprintGenerator.item_fingerprint(item1)
122
+ fingerprint2 = FingerprintGenerator.item_fingerprint(item2)
123
+
124
+ # 验证相同数据生成相同指纹
125
+ print(f"\nFirst fingerprint: {fingerprint1}")
126
+ print(f"Second fingerprint: {fingerprint2}")
127
+ assert fingerprint1 == fingerprint2, "Same items should generate same fingerprints"
128
+ print("✓ 相同数据生成相同指纹")
129
+
130
+
131
+ if __name__ == '__main__':
132
+ test_all_pipeline_fingerprints()
133
+ test_fingerprint_stability()
134
+ print("\n🎉 所有测试通过!")