cobweb-launcher 1.1.13__tar.gz → 1.1.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cobweb-launcher might be problematic. Click here for more details.

Files changed (38) hide show
  1. {cobweb-launcher-1.1.13/cobweb_launcher.egg-info → cobweb-launcher-1.1.14}/PKG-INFO +1 -1
  2. cobweb-launcher-1.1.14/cobweb/crawlers/base_crawler.py +128 -0
  3. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/launchers/launcher.py +1 -1
  4. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/launchers/launcher_pro.py +1 -1
  5. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14/cobweb_launcher.egg-info}/PKG-INFO +1 -1
  6. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/setup.py +1 -1
  7. cobweb-launcher-1.1.13/cobweb/crawlers/base_crawler.py +0 -122
  8. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/LICENSE +0 -0
  9. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/README.md +0 -0
  10. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/__init__.py +0 -0
  11. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/__init__.py +0 -0
  12. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/common_queue.py +0 -0
  13. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/decorators.py +0 -0
  14. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/item.py +0 -0
  15. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/log.py +0 -0
  16. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/request.py +0 -0
  17. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/response.py +0 -0
  18. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/base/seed.py +0 -0
  19. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/constant.py +0 -0
  20. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/crawlers/__init__.py +0 -0
  21. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/crawlers/file_crawler.py +0 -0
  22. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/db/__init__.py +0 -0
  23. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/db/redis_db.py +0 -0
  24. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/exceptions/__init__.py +0 -0
  25. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/exceptions/oss_db_exception.py +0 -0
  26. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/launchers/__init__.py +0 -0
  27. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/pipelines/__init__.py +0 -0
  28. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/pipelines/base_pipeline.py +0 -0
  29. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/pipelines/loghub_pipeline.py +0 -0
  30. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/setting.py +0 -0
  31. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/utils/__init__.py +0 -0
  32. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/utils/oss.py +0 -0
  33. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb/utils/tools.py +0 -0
  34. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb_launcher.egg-info/SOURCES.txt +0 -0
  35. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb_launcher.egg-info/dependency_links.txt +0 -0
  36. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb_launcher.egg-info/requires.txt +0 -0
  37. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/cobweb_launcher.egg-info/top_level.txt +0 -0
  38. {cobweb-launcher-1.1.13 → cobweb-launcher-1.1.14}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cobweb-launcher
3
- Version: 1.1.13
3
+ Version: 1.1.14
4
4
  Summary: spider_hole
5
5
  Home-page: https://github.com/Juannie-PP/cobweb
6
6
  Author: Juannie-PP
@@ -0,0 +1,128 @@
1
+ import threading
2
+
3
+ from inspect import isgenerator
4
+ from typing import Union, Callable, Mapping
5
+
6
+ from cobweb.base import Queue, Seed, BaseItem, Request, Response, logger
7
+ from cobweb.constant import DealModel, LogTemplate
8
+ from cobweb.utils import download_log_info
9
+ from cobweb import setting
10
+
11
+
12
+ class Crawler(threading.Thread):
13
+
14
+ def __init__(
15
+ self,
16
+ upload_queue: Queue,
17
+ custom_func: Union[Mapping[str, Callable]],
18
+ launcher_queue: Union[Mapping[str, Queue]],
19
+ ):
20
+ super().__init__()
21
+
22
+ self.upload_queue = upload_queue
23
+ for func_name, _callable in custom_func.items():
24
+ if isinstance(_callable, Callable):
25
+ self.__setattr__(func_name, _callable)
26
+
27
+ self.launcher_queue = launcher_queue
28
+
29
+ self.spider_thread_num = setting.SPIDER_THREAD_NUM
30
+ self.max_retries = setting.SPIDER_MAX_RETRIES
31
+
32
+ @staticmethod
33
+ def request(seed: Seed) -> Union[Request, BaseItem]:
34
+ stream = True if setting.DOWNLOAD_MODEL else False
35
+ yield Request(seed.url, seed, stream=stream, timeout=5)
36
+
37
+ @staticmethod
38
+ def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
39
+ response = item.download()
40
+ yield Response(item.seed, response, **item.to_dict)
41
+
42
+ @staticmethod
43
+ def parse(item: Response) -> BaseItem:
44
+ pass
45
+
46
+ def get_seed(self) -> Seed:
47
+ return self.launcher_queue['todo'].pop()
48
+
49
+ def distribute(self, item, seed):
50
+ if isinstance(item, BaseItem):
51
+ self.upload_queue.push(item)
52
+ elif isinstance(item, Seed):
53
+ self.launcher_queue['new'].push(item)
54
+ elif isinstance(item, str) and item == DealModel.poll:
55
+ self.launcher_queue['todo'].push(seed)
56
+ elif isinstance(item, str) and item == DealModel.done:
57
+ self.launcher_queue['done'].push(seed)
58
+ elif isinstance(item, str) and item == DealModel.fail:
59
+ seed.identifier = DealModel.fail
60
+ self.launcher_queue['done'].push(seed)
61
+ else:
62
+ raise TypeError("yield value type error!")
63
+
64
+ def spider(self):
65
+ while True:
66
+ seed = self.get_seed()
67
+
68
+ if not seed:
69
+ continue
70
+
71
+ elif seed.params.retry >= self.max_retries:
72
+ seed.params.identifier = DealModel.fail
73
+ self.launcher_queue['done'].push(seed)
74
+ continue
75
+
76
+ seed_detail_log_info = download_log_info(seed.to_dict)
77
+
78
+ request_iterators = self.request(seed)
79
+
80
+ if not isgenerator(request_iterators):
81
+ raise TypeError("request function isn't a generator!")
82
+
83
+ for request_item in request_iterators:
84
+
85
+ if isinstance(request_item, BaseItem):
86
+ self.upload_queue.push(request_item)
87
+
88
+ elif isinstance(request_item, Request):
89
+ try:
90
+ download_iterators = self.download(request_item)
91
+ if not isgenerator(download_iterators):
92
+ raise TypeError("download function isn't a generator")
93
+ for download_item in download_iterators:
94
+ if isinstance(download_item, Response):
95
+ response_detail_log_info = download_log_info(download_item.to_dict)
96
+ logger.info(LogTemplate.download_info.format(
97
+ detail=seed_detail_log_info,
98
+ retry=seed.params.retry,
99
+ priority=seed.params.priority,
100
+ seed_version=seed.params.seed_version,
101
+ identifier=seed.identifier or "",
102
+ status=download_item.response,
103
+ response=response_detail_log_info
104
+ ))
105
+ parse_iterators = self.parse(download_item)
106
+ if not isgenerator(parse_iterators):
107
+ raise TypeError("parse function isn't a generator")
108
+ for parse_item in parse_iterators:
109
+ if isinstance(parse_item, Response):
110
+ raise TypeError("upload_item can't be a Response instance")
111
+ self.distribute(parse_item, seed)
112
+ else:
113
+ self.distribute(download_item, seed)
114
+ except Exception as e:
115
+ logger.info(LogTemplate.download_exception.format(
116
+ detail=seed_detail_log_info,
117
+ retry=seed.params.retry,
118
+ priority=seed.params.priority,
119
+ seed_version=seed.params.seed_version,
120
+ identifier=seed.identifier or "", exception=e
121
+ ))
122
+ seed.params.retry += 1
123
+ self.launcher_queue['todo'].push(seed)
124
+
125
+ def run(self):
126
+ for index in range(self.spider_thread_num):
127
+ threading.Thread(name=f"spider_{index}", target=self.spider).start()
128
+
@@ -93,7 +93,7 @@ class Launcher(threading.Thread):
93
93
  @launcher.request
94
94
  def request(seed: Seed) -> Union[Request, BaseItem]:
95
95
  ...
96
- return Request(seed.url, seed)
96
+ yield Request(seed.url, seed)
97
97
  """
98
98
  def decorator(func):
99
99
  self.__CUSTOM_FUNC__["request"] = func
@@ -117,7 +117,7 @@ class LauncherPro(Launcher):
117
117
  seed = self.__LAUNCHER_QUEUE__['done'].pop()
118
118
  if not seed:
119
119
  break
120
- if seed.params.identifier == DealModel.fail:
120
+ if seed.identifier == DealModel.fail:
121
121
  f_seeds.append(seed.to_string)
122
122
  elif self._done_model == 1:
123
123
  s_seeds.append(seed.to_string)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cobweb-launcher
3
- Version: 1.1.13
3
+ Version: 1.1.14
4
4
  Summary: spider_hole
5
5
  Home-page: https://github.com/Juannie-PP/cobweb
6
6
  Author: Juannie-PP
@@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
5
5
 
6
6
  setup(
7
7
  name="cobweb-launcher",
8
- version="1.1.13",
8
+ version="1.1.14",
9
9
  packages=find_packages(),
10
10
  url="https://github.com/Juannie-PP/cobweb",
11
11
  license="MIT",
@@ -1,122 +0,0 @@
1
- import threading
2
-
3
- from inspect import isgenerator
4
- from typing import Union, Callable, Mapping
5
-
6
- from cobweb.base import Queue, Seed, BaseItem, Request, Response, logger
7
- from cobweb.constant import DealModel, LogTemplate
8
- from cobweb.utils import download_log_info
9
- from cobweb import setting
10
-
11
-
12
- class Crawler(threading.Thread):
13
-
14
- def __init__(
15
- self,
16
- upload_queue: Queue,
17
- custom_func: Union[Mapping[str, Callable]],
18
- launcher_queue: Union[Mapping[str, Queue]],
19
- ):
20
- super().__init__()
21
-
22
- self.upload_queue = upload_queue
23
- for func_name, _callable in custom_func.items():
24
- if isinstance(_callable, Callable):
25
- self.__setattr__(func_name, _callable)
26
-
27
- self.launcher_queue = launcher_queue
28
-
29
- self.spider_thread_num = setting.SPIDER_THREAD_NUM
30
- self.max_retries = setting.SPIDER_MAX_RETRIES
31
-
32
- @staticmethod
33
- def request(seed: Seed) -> Union[Request, BaseItem]:
34
- stream = True if setting.DOWNLOAD_MODEL else False
35
- return Request(seed.url, seed, stream=stream, timeout=5)
36
-
37
- @staticmethod
38
- def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
39
- response = item.download()
40
- yield Response(item.seed, response, **item.to_dict)
41
-
42
- @staticmethod
43
- def parse(item: Response) -> BaseItem:
44
- pass
45
-
46
- def get(self) -> Seed:
47
- return self.launcher_queue['todo'].pop()
48
-
49
- def spider(self):
50
- while True:
51
- seed = self.get()
52
-
53
- if not seed:
54
- continue
55
-
56
- elif seed.params.retry >= self.max_retries:
57
- seed.params.identifier = DealModel.fail
58
- self.launcher_queue['done'].push(seed)
59
- continue
60
-
61
- item = self.request(seed)
62
-
63
- if isinstance(item, Request):
64
-
65
- download_iterators = self.download(item)
66
-
67
- if not isgenerator(download_iterators):
68
- raise TypeError("download function isn't a generator")
69
-
70
- seed_detail_log_info = download_log_info(seed.to_dict)
71
-
72
- try:
73
- for it in download_iterators:
74
- if isinstance(it, Response):
75
- response_detail_log_info = download_log_info(it.to_dict)
76
- logger.info(LogTemplate.download_info.format(
77
- detail=seed_detail_log_info, retry=item.seed.params.retry,
78
- priority=item.seed.params.priority,
79
- seed_version=item.seed.params.seed_version,
80
- identifier=item.seed.identifier or "",
81
- status=it.response, response=response_detail_log_info
82
- ))
83
- parse_iterators = self.parse(it)
84
- if not isgenerator(parse_iterators):
85
- raise TypeError("parse function isn't a generator")
86
- for upload_item in parse_iterators:
87
- if not isinstance(upload_item, BaseItem):
88
- raise TypeError("upload_item isn't BaseItem subclass")
89
- self.upload_queue.push(upload_item)
90
- elif isinstance(it, BaseItem):
91
- self.upload_queue.push(it)
92
- elif isinstance(it, Seed):
93
- self.launcher_queue['new'].push(it)
94
- elif isinstance(it, str) and it == DealModel.poll:
95
- self.launcher_queue['todo'].push(item)
96
- break
97
- elif isinstance(it, str) and it == DealModel.done:
98
- self.launcher_queue['done'].push(seed)
99
- break
100
- elif isinstance(it, str) and it == DealModel.fail:
101
- seed.params.identifier = DealModel.fail
102
- self.launcher_queue['done'].push(seed)
103
- break
104
- else:
105
- raise TypeError("yield value type error!")
106
-
107
- except Exception as e:
108
- logger.info(LogTemplate.download_exception.format(
109
- detail=seed_detail_log_info, retry=seed.params.retry,
110
- priority=seed.params.priority, seed_version=seed.params.seed_version,
111
- identifier=seed.identifier or "", exception=e
112
- ))
113
- seed.params.retry += 1
114
- self.launcher_queue['todo'].push(seed)
115
-
116
- elif isinstance(item, BaseItem):
117
- self.upload_queue.push(item)
118
-
119
- def run(self):
120
- for index in range(self.spider_thread_num):
121
- threading.Thread(name=f"spider_{index}", target=self.spider).start()
122
-