cobweb-launcher 1.0.6__tar.gz → 1.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cobweb-launcher might be problematic. Click here for more details.

Files changed (37) hide show
  1. {cobweb-launcher-1.0.6/cobweb_launcher.egg-info → cobweb-launcher-1.0.8}/PKG-INFO +1 -1
  2. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/crawlers/file_crawler.py +7 -16
  3. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/setting.py +1 -1
  4. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/utils/tools.py +1 -1
  5. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8/cobweb_launcher.egg-info}/PKG-INFO +1 -1
  6. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/setup.py +1 -1
  7. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/LICENSE +0 -0
  8. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/README.md +0 -0
  9. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/__init__.py +0 -0
  10. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/__init__.py +0 -0
  11. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/common_queue.py +0 -0
  12. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/decorators.py +0 -0
  13. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/item.py +0 -0
  14. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/log.py +0 -0
  15. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/request.py +0 -0
  16. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/response.py +0 -0
  17. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/base/seed.py +0 -0
  18. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/constant.py +0 -0
  19. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/crawlers/__init__.py +0 -0
  20. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/crawlers/base_crawler.py +0 -0
  21. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/db/__init__.py +0 -0
  22. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/db/redis_db.py +0 -0
  23. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/exceptions/__init__.py +0 -0
  24. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/exceptions/oss_db_exception.py +0 -0
  25. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/launchers/__init__.py +0 -0
  26. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/launchers/launcher.py +0 -0
  27. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/launchers/launcher_pro.py +0 -0
  28. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/pipelines/__init__.py +0 -0
  29. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/pipelines/base_pipeline.py +0 -0
  30. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/pipelines/loghub_pipeline.py +0 -0
  31. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/utils/__init__.py +0 -0
  32. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb/utils/oss.py +0 -0
  33. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb_launcher.egg-info/SOURCES.txt +0 -0
  34. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb_launcher.egg-info/dependency_links.txt +0 -0
  35. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb_launcher.egg-info/requires.txt +0 -0
  36. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/cobweb_launcher.egg-info/top_level.txt +0 -0
  37. {cobweb-launcher-1.0.6 → cobweb-launcher-1.0.8}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cobweb-launcher
3
- Version: 1.0.6
3
+ Version: 1.0.8
4
4
  Summary: spider_hole
5
5
  Home-page: https://github.com/Juannie-PP/cobweb
6
6
  Author: Juannie-PP
@@ -20,8 +20,6 @@ class CrawlerAir(Crawler):
20
20
  if CrawlerAir.oss_util.exists(key):
21
21
  content_length = CrawlerAir.oss_util.head(key).content_length
22
22
  yield Response(item.seed, "exists", bucket_name=bucket_name, data_size=content_length, **seed_dict)
23
- # data, cols = download_meta(item.seed, bucket_name=bucket_name, data_size=content_length, **seed_dict)
24
- # yield DownloadItem(item.seed, sid=item.seed.sid, cols=cols, data=data)
25
23
 
26
24
  end = seed_dict.get("end", "")
27
25
  start = seed_dict.get("start", "0")
@@ -37,20 +35,20 @@ class CrawlerAir(Crawler):
37
35
 
38
36
  response = item.download()
39
37
 
40
- content_length = response.headers.get("content-length") or 0
38
+ content_length = int(response.headers.get("content-length", 0))
41
39
  content_type = response.headers.get("content-type", "").split(";")[0]
42
40
  if content_type and content_type in setting.FILE_FILTER_CONTENT_TYPE:
43
41
  yield Response(
44
42
  item.seed, response, filter=True, msg=f"response content type is {content_type}",
45
43
  bucket_name=bucket_name, data_size=content_length, **seed_dict
46
44
  )
47
- elif position == 1 and min_upload_size >= int(content_length) > 0:
45
+ elif position == 1 and min_upload_size >= content_length > 0:
48
46
  """过小文件标识返回"""
49
47
  yield Response(
50
48
  item.seed, response, filter=True, msg="file size is too small",
51
49
  bucket_name=bucket_name, data_size=content_length, **seed_dict
52
50
  )
53
- elif position == 1 and chunk_size > int(content_length) > min_upload_size:
51
+ elif position == 1 and chunk_size > content_length > min_upload_size:
54
52
  """小文件直接下载"""
55
53
  for part_data in response.iter_content(chunk_size):
56
54
  content += part_data
@@ -80,15 +78,11 @@ class CrawlerAir(Crawler):
80
78
  content_length += len(content)
81
79
  CrawlerAir.oss_util.merge(key, upload_id)
82
80
  yield Response(item.seed, response, bucket_name=bucket_name, data_size=content_length, **seed_dict)
83
- # data, cols = download_meta(item.seed, bucket_name, data_size=content_length, **seed_dict)
84
- # yield DownloadItem(item.seed, sid=item.seed.sid, cols=cols, data=data)
85
81
 
86
82
  elif item.seed.params.identifier == "merge":
87
83
  CrawlerAir.oss_util.merge(key, seed_dict["upload_id"])
88
84
  content_length = CrawlerAir.oss_util.head(key).content_length
89
85
  yield Response(item.seed, "merge", bucket_name=bucket_name, data_size=content_length, **seed_dict)
90
- # data, cols = download_meta(item.seed, bucket_name, data_size=content_length, **seed_dict)
91
- # yield DownloadItem(item.seed, sid=item.seed.sid, cols=cols, data=data)
92
86
  except OssDBPutPartError:
93
87
  yield Seed(seed_dict)
94
88
  except OssDBMergeError:
@@ -123,20 +117,20 @@ class CrawlerPro(Crawler):
123
117
 
124
118
  response = item.download()
125
119
 
126
- content_length = response.headers.get("content-length") or 0
120
+ content_length = int(response.headers.get("content-length", 0))
127
121
  content_type = response.headers.get("content-type", "").split(";")[0]
128
122
  if content_type and content_type in setting.FILE_FILTER_CONTENT_TYPE:
129
123
  yield Response(
130
124
  item.seed, response, filter=True, msg=f"response content type is {content_type}",
131
125
  bucket_name=bucket_name, data_size=content_length, **seed_dict
132
126
  )
133
- elif position == 1 and min_upload_size >= int(content_length) > 0:
127
+ elif position == 1 and min_upload_size >= content_length > 0:
134
128
  """过小文件标识返回"""
135
129
  yield Response(
136
130
  item.seed, response, filter=True, msg="file size is too small",
137
131
  bucket_name=bucket_name, data_size=content_length, **seed_dict
138
132
  )
139
- elif position == 1 and chunk_size > int(content_length) > min_upload_size:
133
+ elif position == 1 and chunk_size > content_length > min_upload_size:
140
134
  """小文件直接下载"""
141
135
  for part_data in response.iter_content(chunk_size):
142
136
  content += part_data
@@ -166,15 +160,12 @@ class CrawlerPro(Crawler):
166
160
  content_length += len(content)
167
161
  CrawlerAir.oss_util.merge(key, upload_id)
168
162
  yield Response(item.seed, response, bucket_name=bucket_name, data_size=content_length, **seed_dict)
169
- # data, cols = download_meta(item.seed, bucket_name, data_size=content_length, **seed_dict)
170
- # yield DownloadItem(item.seed, sid=item.seed.sid, cols=cols, data=data)
171
163
 
172
164
  elif item.seed.params.identifier == "merge":
173
165
  CrawlerAir.oss_util.merge(key, seed_dict["upload_id"])
174
166
  content_length = CrawlerAir.oss_util.head(key).content_length
175
167
  yield Response(item.seed, "merge", bucket_name=bucket_name, data_size=content_length, **seed_dict)
176
- # data, cols = download_meta(item.seed, bucket_name, data_size=content_length, **seed_dict)
177
- # yield DownloadItem(item.seed, sid=item.seed.sid, cols=cols, data=data)
168
+
178
169
  except OssDBPutPartError:
179
170
  yield Seed(seed_dict)
180
171
  except OssDBMergeError:
@@ -23,7 +23,7 @@ OSS_BUCKET = os.getenv("OSS_BUCKET")
23
23
  OSS_ENDPOINT = os.getenv("OSS_ENDPOINT")
24
24
  OSS_ACCESS_KEY = os.getenv("OSS_ACCESS_KEY")
25
25
  OSS_SECRET_KEY = os.getenv("OSS_SECRET_KEY")
26
- OSS_MIN_UPLOAD_SIZE = 1024 * 100
26
+ OSS_MIN_UPLOAD_SIZE = 1024
27
27
  OSS_CHUNK_SIZE = 1024 ** 2
28
28
 
29
29
  # 采集器选择
@@ -39,4 +39,4 @@ def dynamic_load_class(model_info):
39
39
 
40
40
 
41
41
  def download_log_info(item:dict) -> str:
42
- return "\n".join([" " * 12 + f"{k.ljust(14)}: {v}" for k, v in item.items()])
42
+ return "\n".join([" " * 12 + f"{str(k).ljust(14)}: {str(v)}" for k, v in item.items()])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cobweb-launcher
3
- Version: 1.0.6
3
+ Version: 1.0.8
4
4
  Summary: spider_hole
5
5
  Home-page: https://github.com/Juannie-PP/cobweb
6
6
  Author: Juannie-PP
@@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
5
5
 
6
6
  setup(
7
7
  name="cobweb-launcher",
8
- version="1.0.6",
8
+ version="1.0.8",
9
9
  packages=find_packages(),
10
10
  url="https://github.com/Juannie-PP/cobweb",
11
11
  license="MIT",
File without changes