cobweb-launcher 1.1.11__py3-none-any.whl → 1.1.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cobweb-launcher might be problematic. Click here for more details.

cobweb/base/response.py CHANGED
@@ -20,3 +20,4 @@ class Response:
20
20
  _dict.pop('seed')
21
21
  _dict.pop('response')
22
22
  return _dict
23
+
cobweb/base/seed.py CHANGED
@@ -5,11 +5,10 @@ import hashlib
5
5
 
6
6
  class SeedParams:
7
7
 
8
- def __init__(self, retry, priority, seed_version, identifier=None):
8
+ def __init__(self, retry, priority, seed_version):
9
9
  self.retry = retry or 0
10
10
  self.priority = priority or 300
11
11
  self.seed_version = seed_version or int(time.time())
12
- self.identifier = identifier
13
12
 
14
13
 
15
14
  class Seed:
@@ -18,7 +17,6 @@ class Seed:
18
17
  "retry",
19
18
  "priority",
20
19
  "seed_version",
21
- "identifier"
22
20
  ]
23
21
 
24
22
  def __init__(
@@ -28,7 +26,6 @@ class Seed:
28
26
  retry=None,
29
27
  priority=None,
30
28
  seed_version=None,
31
- identifier=None,
32
29
  **kwargs
33
30
  ):
34
31
  if any(isinstance(seed, t) for t in (str, bytes)):
@@ -50,7 +47,6 @@ class Seed:
50
47
  "retry": retry,
51
48
  "priority": priority,
52
49
  "seed_version": seed_version,
53
- "identifier": identifier
54
50
  }
55
51
 
56
52
  if kwargs:
@@ -1,2 +1,2 @@
1
1
  from .base_crawler import Crawler
2
- from .file_crawler import FileCrawlerAir, FileCrawlerPro
2
+ from .file_crawler import FileCrawlerAir
@@ -58,18 +58,18 @@ class Crawler(threading.Thread):
58
58
  self.launcher_queue['done'].push(seed)
59
59
  continue
60
60
 
61
- seed_detail_log_info = download_log_info(seed.to_dict)
61
+ item = self.request(seed)
62
62
 
63
- try:
64
- item = self.request(seed)
63
+ if isinstance(item, Request):
65
64
 
66
- if isinstance(item, Request):
65
+ download_iterators = self.download(item)
67
66
 
68
- download_iterators = self.download(item)
67
+ if not isgenerator(download_iterators):
68
+ raise TypeError("download function isn't a generator")
69
69
 
70
- if not isgenerator(download_iterators):
71
- raise TypeError("download function isn't a generator")
70
+ seed_detail_log_info = download_log_info(seed.to_dict)
72
71
 
72
+ try:
73
73
  for it in download_iterators:
74
74
  if isinstance(it, Response):
75
75
  response_detail_log_info = download_log_info(it.to_dict)
@@ -77,7 +77,7 @@ class Crawler(threading.Thread):
77
77
  detail=seed_detail_log_info, retry=item.seed.params.retry,
78
78
  priority=item.seed.params.priority,
79
79
  seed_version=item.seed.params.seed_version,
80
- identifier=item.seed.params.identifier,
80
+ identifier=item.seed.identifier or "",
81
81
  status=it.response, response=response_detail_log_info
82
82
  ))
83
83
  parse_iterators = self.parse(it)
@@ -104,22 +104,17 @@ class Crawler(threading.Thread):
104
104
  else:
105
105
  raise TypeError("yield value type error!")
106
106
 
107
- elif isinstance(item, BaseItem):
108
- self.upload_queue.push(item)
109
- else:
110
- raise TypeError(
111
- f"request func return value type error!"
112
- f"item.__class__ is {item.__class__.__name__}"
113
- )
114
- except Exception as e:
115
- logger.info(LogTemplate.download_exception.format(
116
- detail=seed_detail_log_info, retry=seed.params.retry,
117
- priority=seed.params.priority, seed_version=seed.params.seed_version,
118
- identifier=seed.params.identifier, exception=e
119
- ))
120
- seed.params.retry += 1
121
- self.launcher_queue['todo'].push(seed)
122
-
107
+ except Exception as e:
108
+ logger.info(LogTemplate.download_exception.format(
109
+ detail=seed_detail_log_info, retry=seed.params.retry,
110
+ priority=seed.params.priority, seed_version=seed.params.seed_version,
111
+ identifier=seed.identifier or "", exception=e
112
+ ))
113
+ seed.params.retry += 1
114
+ self.launcher_queue['todo'].push(seed)
115
+
116
+ elif isinstance(item, BaseItem):
117
+ self.upload_queue.push(item)
123
118
 
124
119
  def run(self):
125
120
  for index in range(self.spider_thread_num):
@@ -1,4 +1,4 @@
1
-
1
+ import os
2
2
  from typing import Union
3
3
  from cobweb import setting
4
4
  from cobweb.utils import OssUtil
@@ -7,7 +7,7 @@ from cobweb.base import Seed, BaseItem, Request, Response
7
7
  from cobweb.exceptions import OssDBPutPartError, OssDBMergeError
8
8
 
9
9
 
10
- oss_util = OssUtil()
10
+ oss_util = OssUtil(is_path_style=bool(int(os.getenv("PRIVATE_LINK", 0))))
11
11
 
12
12
 
13
13
  class FileCrawlerAir(Crawler):
@@ -15,159 +15,84 @@ class FileCrawlerAir(Crawler):
15
15
  @staticmethod
16
16
  def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
17
17
  seed_dict = item.seed.to_dict
18
- bucket_name = oss_util.bucket
18
+ seed_dict["bucket_name"] = oss_util.bucket
19
19
  try:
20
- key = item.seed.oss_path or getattr(item, "oss_path", None)
21
- if oss_util.exists(key):
22
- content_length = oss_util.head(key).content_length
23
- yield Response(item.seed, "exists", bucket_name=bucket_name, data_size=content_length, **seed_dict)
24
-
25
- end = seed_dict.get("end", "")
26
- start = seed_dict.get("start", "0")
27
-
28
- if end or int(start):
29
- item.request_setting["headers"]['Range'] = f'bytes={start}-{end}'
30
-
31
- if not item.seed.params.identifier:
32
- content = b""
33
- chunk_size = oss_util.chunk_size
34
- min_upload_size = oss_util.min_upload_size
35
- position = seed_dict.get("position", 1)
36
-
37
- response = item.download()
38
-
39
- content_length = int(response.headers.get("content-length", 0))
40
- content_type = response.headers.get("content-type", "").split(";")[0]
41
- if content_type and content_type in setting.FILE_FILTER_CONTENT_TYPE:
42
- yield Response(
43
- item.seed, response, filter=True, msg=f"response content type is {content_type}",
44
- bucket_name=bucket_name, data_size=content_length, **seed_dict
45
- )
46
- elif position == 1 and min_upload_size >= content_length > 0:
47
- """过小文件标识返回"""
48
- yield Response(
49
- item.seed, response, filter=True, msg="file size is too small",
50
- bucket_name=bucket_name, data_size=content_length, **seed_dict
51
- )
52
- elif position == 1 and chunk_size > content_length > min_upload_size:
53
- """小文件直接下载"""
54
- for part_data in response.iter_content(chunk_size):
55
- content += part_data
56
- oss_util.put(key, content)
57
- yield Response(item.seed, response, bucket_name=bucket_name, data_size=content_length, **seed_dict)
58
- response.close()
59
- else:
60
- """中大文件同步分片下载"""
61
- upload_content_length = 0
62
- if not seed_dict.get("upload_id"):
63
- seed_dict["upload_id"] = oss_util.init_part(key).upload_id
64
- upload_id = seed_dict["upload_id"]
65
- for part_data in response.iter_content(chunk_size):
66
- content += part_data
67
- if len(content) >= chunk_size:
68
- upload_data = content[:chunk_size]
69
- content = content[chunk_size:]
70
- oss_util.put_part(key, upload_id, position, upload_data)
71
- upload_content_length += len(upload_data)
72
- position += 1
73
- seed_dict['position'] = position
74
- seed_dict['start'] = upload_content_length
75
-
76
- response.close()
77
- if content:
78
- oss_util.put_part(key, upload_id, position, content)
79
- content_length += len(content)
80
- oss_util.merge(key, upload_id)
81
- yield Response(item.seed, response, bucket_name=bucket_name, data_size=content_length, **seed_dict)
82
-
83
- elif item.seed.params.identifier == "merge":
84
- oss_util.merge(key, seed_dict["upload_id"])
85
- content_length = oss_util.head(key).content_length
86
- yield Response(item.seed, "merge", bucket_name=bucket_name, data_size=content_length, **seed_dict)
87
- except OssDBPutPartError:
88
- yield Seed(seed_dict)
89
- except OssDBMergeError:
90
- yield Seed(seed_dict, identifier="merge")
91
-
20
+ seed_dict["oss_path"] = key = item.seed.oss_path or getattr(item, "oss_path")
92
21
 
93
- class FileCrawlerPro(FileCrawlerAir):
94
-
95
- @staticmethod
96
- def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
97
- seed_dict = item.seed.to_dict
98
- bucket_name = oss_util.bucket
99
- try:
100
- key = item.seed.oss_path or getattr(item, "oss_path", None)
101
22
  if oss_util.exists(key):
102
- content_length = oss_util.head(key).content_length
103
- yield Response(item.seed, "exists", bucket_name=bucket_name, data_size=content_length, **seed_dict)
104
-
105
- end = seed_dict.get("end", "")
106
- start = seed_dict.get("start", "0")
107
-
108
- if end or int(start):
109
- item.request_setting["headers"]['Range'] = f'bytes={start}-{end}'
110
-
111
- if not item.seed.params.identifier:
112
- content = b""
113
- chunk_size = oss_util.chunk_size
114
- min_upload_size = oss_util.min_upload_size
115
- position = seed_dict.get("position", 1)
116
-
117
- response = item.download()
118
-
119
- content_length = int(response.headers.get("content-length", 0))
120
- content_type = response.headers.get("content-type", "").split(";")[0]
121
- if content_type and content_type in setting.FILE_FILTER_CONTENT_TYPE:
122
- yield Response(
123
- item.seed, response, filter=True, msg=f"response content type is {content_type}",
124
- bucket_name=bucket_name, data_size=content_length, **seed_dict
125
- )
126
- response.close()
127
- elif position == 1 and min_upload_size >= content_length > 0:
128
- """过小文件标识返回"""
129
- yield Response(
130
- item.seed, response, filter=True, msg="file size is too small",
131
- bucket_name=bucket_name, data_size=content_length, **seed_dict
132
- )
133
- response.close()
134
- elif position == 1 and chunk_size > content_length > min_upload_size:
135
- """小文件直接下载"""
136
- for part_data in response.iter_content(chunk_size):
137
- content += part_data
138
- oss_util.put(key, content)
139
- yield Response(item.seed, response, bucket_name=bucket_name, data_size=content_length, **seed_dict)
140
- response.close()
141
- else:
142
- """中大文件同步分片下载"""
143
- upload_content_length = 0
144
- if not seed_dict.get("upload_id"):
145
- seed_dict["upload_id"] = oss_util.init_part(key).upload_id
146
- upload_id = seed_dict["upload_id"]
147
- for part_data in response.iter_content(chunk_size):
148
- content += part_data
149
- if len(content) >= chunk_size:
150
- upload_data = content[:chunk_size]
151
- content = content[chunk_size:]
152
- oss_util.put_part(key, upload_id, position, upload_data)
153
- upload_content_length += len(upload_data)
154
- position += 1
155
- seed_dict['position'] = position
156
- seed_dict['start'] = upload_content_length
157
-
158
- if content:
159
- oss_util.put_part(key, upload_id, position, content)
160
- content_length += len(content)
161
- oss_util.merge(key, upload_id)
162
- yield Response(item.seed, response, bucket_name=bucket_name, data_size=content_length, **seed_dict)
163
- response.close()
164
-
165
- elif item.seed.params.identifier == "merge":
166
- oss_util.merge(key, seed_dict["upload_id"])
167
- content_length = oss_util.head(key).content_length
168
- yield Response(item.seed, "merge", bucket_name=bucket_name, data_size=content_length, **seed_dict)
23
+ seed_dict["data_size"] = oss_util.head(key).content_length
24
+ yield Response(item.seed, "exists", **seed_dict)
25
+
26
+ else:
27
+ seed_dict.setdefault("end", "")
28
+ seed_dict.setdefault("start", 0)
29
+
30
+ if seed_dict["end"] or seed_dict["start"]:
31
+ start, end = seed_dict["start"], seed_dict["end"]
32
+ item.request_setting["headers"]['Range'] = f'bytes={start}-{end}'
33
+
34
+ if not item.seed.identifier:
35
+ content = b""
36
+ chunk_size = oss_util.chunk_size
37
+ min_upload_size = oss_util.min_upload_size
38
+ seed_dict.setdefault("position", 1)
39
+
40
+ response = item.download()
41
+
42
+ content_type = response.headers.get("content-type", "").split(";")[0]
43
+ seed_dict["data_size"] = content_length = int(response.headers.get("content-length", 0))
44
+
45
+ if content_type and content_type in setting.FILE_FILTER_CONTENT_TYPE:
46
+ """过滤响应文件类型"""
47
+ response.close()
48
+ seed_dict["filter"] = True
49
+ seed_dict["msg"] = f"response content type is {content_type}"
50
+ yield Response(item.seed, response, **seed_dict)
51
+
52
+ elif seed_dict['position'] == 1 and min_upload_size >= content_length > 0:
53
+ """过小文件标识返回"""
54
+ response.close()
55
+ seed_dict["filter"] = True
56
+ seed_dict["msg"] = "file size is too small"
57
+ yield Response(item.seed, response, **seed_dict)
58
+
59
+ elif seed_dict['position'] == 1 and chunk_size > content_length > min_upload_size:
60
+ """小文件直接下载"""
61
+ for part_data in response.iter_content(chunk_size):
62
+ content += part_data
63
+ response.close()
64
+ oss_util.put(key, content)
65
+ yield Response(item.seed, response, **seed_dict)
66
+
67
+ else:
68
+ """中大文件同步分片下载"""
69
+ seed_dict.setdefault("upload_id", oss_util.init_part(key).upload_id)
70
+
71
+ for part_data in response.iter_content(chunk_size):
72
+ content += part_data
73
+ if len(content) >= chunk_size:
74
+ upload_data = content[:chunk_size]
75
+ content = content[chunk_size:]
76
+ oss_util.put_part(key, seed_dict["upload_id"], seed_dict['position'], content)
77
+ seed_dict['start'] += len(upload_data)
78
+ seed_dict['position'] += 1
79
+
80
+ response.close()
81
+
82
+ if content:
83
+ oss_util.put_part(key, seed_dict["upload_id"], seed_dict['position'], content)
84
+ oss_util.merge(key, seed_dict["upload_id"])
85
+ seed_dict["data_size"] = oss_util.head(key).content_length
86
+ yield Response(item.seed, response, **seed_dict)
87
+
88
+ elif item.seed.identifier == "merge":
89
+ oss_util.merge(key, seed_dict["upload_id"])
90
+ seed_dict["data_size"] = oss_util.head(key).content_length
91
+ yield Response(item.seed, "merge", **seed_dict)
169
92
 
170
93
  except OssDBPutPartError:
171
94
  yield Seed(seed_dict)
172
95
  except OssDBMergeError:
173
96
  yield Seed(seed_dict, identifier="merge")
97
+
98
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cobweb-launcher
3
- Version: 1.1.11
3
+ Version: 1.1.13
4
4
  Summary: spider_hole
5
5
  Home-page: https://github.com/Juannie-PP/cobweb
6
6
  Author: Juannie-PP
@@ -7,11 +7,11 @@ cobweb/base/decorators.py,sha256=wDCaQ94aAZGxks9Ljc0aXq6omDXT1_yzFy83ZW6VbVI,930
7
7
  cobweb/base/item.py,sha256=pMriHStzUXtSvIf5Z3KXsP-bCvjlG1gM3z33wWeuoH8,966
8
8
  cobweb/base/log.py,sha256=L01hXdk3L2qEm9X1FOXQ9VmWIoHSELe0cyZvrdAN61A,2003
9
9
  cobweb/base/request.py,sha256=tEkgMVUfdQI-kZuzWuiit9P_q4Q9-_RZh9aXXpc0314,2352
10
- cobweb/base/response.py,sha256=7h9TwCNqRlwM_fvNmid9zOoRfHbKB8ABSU0eaVUJdVo,405
11
- cobweb/base/seed.py,sha256=XswH16eEd6iwIBpt71E2S_AsV5UVCcOEOBFoP0r5QRo,2900
12
- cobweb/crawlers/__init__.py,sha256=_HAXBg7Sq8fsDGSjDm3AQz9aQtLZONpt5b8dSe607mI,91
13
- cobweb/crawlers/base_crawler.py,sha256=uR1wQ2sJpFovNoAK52293rF03O-jNbv24P5QoNt1tW0,5169
14
- cobweb/crawlers/file_crawler.py,sha256=AuKu2ZoDoccsI2COnsUWGxKQf67Y-87lL7xqiPv0LRI,8532
10
+ cobweb/base/response.py,sha256=eB1DWMXFCpn3cJ3yzgCRU1WeZAdayGDohRgdjdMUFN4,406
11
+ cobweb/base/seed.py,sha256=QxlXztWjV8VvcHu-cTzyoDHaDvoX26iyoJKiWk64HnE,2759
12
+ cobweb/crawlers/__init__.py,sha256=Rr3DTjD-abMA1_FYcQJZYNvQvcWMuEVcsIU6duqHrw4,75
13
+ cobweb/crawlers/base_crawler.py,sha256=H-cUfrn79PAI8Z7Xv_NhW-9J0G-SiBU4wtzymzAJMYc,4961
14
+ cobweb/crawlers/file_crawler.py,sha256=2Sjbdgxzqd41WykKUQE3QQlGai3T8k-pmHNmPlTchjQ,4454
15
15
  cobweb/db/__init__.py,sha256=ut0iEyBLjcJL06WNG_5_d4hO5PJWvDrKWMkDOdmgh2M,30
16
16
  cobweb/db/redis_db.py,sha256=NNI2QkRV1hEZI-z-COEncXt88z3pZN6wusKlcQzc8V4,4304
17
17
  cobweb/exceptions/__init__.py,sha256=E9SHnJBbhD7fOgPFMswqyOf8SKRDrI_i25L0bSpohvk,32
@@ -25,8 +25,8 @@ cobweb/pipelines/loghub_pipeline.py,sha256=cjPO6w6UJ0jNw2fVvdX0BCdlm58T7dmYXlxzX
25
25
  cobweb/utils/__init__.py,sha256=JTE4sBfHnKHhD6w9Auk0MIT7O9BMOamCeryhlHNx3Zg,47
26
26
  cobweb/utils/oss.py,sha256=qAl05ybL2Jp6KFjHDHVMfmeBHQmDKPtZleHjHAY7LZc,3277
27
27
  cobweb/utils/tools.py,sha256=bVd3iRGBvwhohQAH7AXTTjbmQ54Z35K0O-fatEyhePU,1249
28
- cobweb_launcher-1.1.11.dist-info/LICENSE,sha256=z1rxSIGOyzcSb3orZxFPxzx-0C1vTocmswqBNxpKfEk,1063
29
- cobweb_launcher-1.1.11.dist-info/METADATA,sha256=ArmQqXdIsFaO_6ynAgM_UpKaYux3iOcyHXASUWzECn8,1246
30
- cobweb_launcher-1.1.11.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
31
- cobweb_launcher-1.1.11.dist-info/top_level.txt,sha256=4GETBGNsKqiCUezmT-mJn7tjhcDlu7nLIV5gGgHBW4I,7
32
- cobweb_launcher-1.1.11.dist-info/RECORD,,
28
+ cobweb_launcher-1.1.13.dist-info/LICENSE,sha256=z1rxSIGOyzcSb3orZxFPxzx-0C1vTocmswqBNxpKfEk,1063
29
+ cobweb_launcher-1.1.13.dist-info/METADATA,sha256=ShOTXcWNMBOaLR50TU0QFjR25Ae9aZ-PNyBgwTkxckE,1246
30
+ cobweb_launcher-1.1.13.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
31
+ cobweb_launcher-1.1.13.dist-info/top_level.txt,sha256=4GETBGNsKqiCUezmT-mJn7tjhcDlu7nLIV5gGgHBW4I,7
32
+ cobweb_launcher-1.1.13.dist-info/RECORD,,