cobweb-launcher 1.2.11__py3-none-any.whl → 1.2.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cobweb-launcher might be problematic. Click here for more details.

@@ -23,25 +23,33 @@ class Crawler(threading.Thread):
23
23
  self,
24
24
  stop: threading.Event,
25
25
  pause: threading.Event,
26
- launcher_queue: Union[Mapping[str, Queue]],
26
+ # launcher_queue: Union[Mapping[str, Queue]],
27
+ get_seed: Callable,
28
+ set_seed: Callable,
29
+ add_seed: Callable,
30
+ delete_seed: Callable,
31
+ upload_data: Callable,
27
32
  custom_func: Union[Mapping[str, Callable]],
28
33
  thread_num: int,
29
- max_retries: int
34
+ max_retries: int,
35
+ time_sleep: int,
30
36
  ):
31
37
  super().__init__()
32
38
 
33
39
  self._stop = stop
34
40
  self._pause = pause
35
- self._new = launcher_queue["new"]
36
- self._todo = launcher_queue["todo"]
37
- self._done = launcher_queue["done"]
38
- self._upload = launcher_queue["upload"]
41
+ self._get_seed = get_seed
42
+ self._set_seed = set_seed
43
+ self._add_seed = add_seed
44
+ self._delete_seed = delete_seed
45
+ self._upload_data = upload_data
39
46
 
40
47
  for func_name, _callable in custom_func.items():
41
48
  if isinstance(_callable, Callable):
42
49
  self.__setattr__(func_name, _callable)
43
50
 
44
51
  self.thread_num = thread_num
52
+ self.time_sleep = time_sleep
45
53
  self.max_retries = max_retries
46
54
 
47
55
  @staticmethod
@@ -64,23 +72,23 @@ class Crawler(threading.Thread):
64
72
 
65
73
  def distribute(self, item, seed):
66
74
  if isinstance(item, BaseItem):
67
- self._upload.push(item)
75
+ self._upload_data(item)
68
76
  elif isinstance(item, Seed):
69
- self._new.push(item)
77
+ self._add_seed(item)
70
78
  elif isinstance(item, str) and item == DealModel.poll:
71
- self._todo.push(seed)
79
+ self._set_seed(seed)
72
80
  elif isinstance(item, str) and item == DealModel.done:
73
- self._done.push(seed)
81
+ self._delete_seed(seed)
74
82
  elif isinstance(item, str) and item == DealModel.fail:
75
83
  seed.params.seed_status = DealModel.fail
76
- self._done.push(seed)
84
+ self._delete_seed(seed)
77
85
  else:
78
86
  raise TypeError("yield value type error!")
79
87
 
80
88
  def spider(self):
81
89
  while not self._stop.is_set():
82
90
 
83
- seed = self._todo.pop()
91
+ seed = self._get_seed()
84
92
 
85
93
  if not seed:
86
94
  time.sleep(1)
@@ -88,7 +96,7 @@ class Crawler(threading.Thread):
88
96
 
89
97
  elif seed.params.retry > self.max_retries:
90
98
  seed.params.seed_status = DealModel.fail
91
- self._done.push(seed)
99
+ self._delete_seed(seed)
92
100
  continue
93
101
 
94
102
  seed_detail_log_info = LogTemplate.log_info(seed.to_dict)
@@ -149,7 +157,9 @@ class Crawler(threading.Thread):
149
157
  exception=''.join(traceback.format_exception(type(e), e, e.__traceback__))
150
158
  ))
151
159
  seed.params.retry += 1
152
- self._todo.push(seed)
160
+ # self._todo.push(seed)
161
+ self._set_seed(seed)
162
+ time.sleep(self.time_sleep * seed.params.retry)
153
163
  finally:
154
164
  time.sleep(0.1)
155
165
  logger.info("spider thread close")
cobweb/db/redis_db.py CHANGED
@@ -63,7 +63,7 @@ class RedisDB:
63
63
 
64
64
  def lock(self, key, t=15) -> bool:
65
65
  lua_script = """
66
- local status = redis.call('setnx', KEYS[1], ARGV[1])
66
+ local status = redis.call('setnx', KEYS[1], 1)
67
67
  if ( status == 1 ) then
68
68
  redis.call('expire', KEYS[1], ARGV[1])
69
69
  end
@@ -17,6 +17,8 @@ def check_pause(func):
17
17
  func(self, *args, **kwargs)
18
18
  except Exception as e:
19
19
  logger.info(f"{func.__name__}: " + str(e))
20
+ finally:
21
+ time.sleep(0.1)
20
22
 
21
23
  return wrapper
22
24
 
@@ -94,10 +96,14 @@ class Launcher(threading.Thread):
94
96
 
95
97
  self._spider_max_retries = setting.SPIDER_MAX_RETRIES
96
98
  self._spider_thread_num = setting.SPIDER_THREAD_NUM
99
+ self._spider_time_sleep = setting.SPIDER_TIME_SLEEP
100
+ self._spider_max_speed = setting.SPIDER_MAX_SPEED
97
101
 
98
102
  self._done_model = setting.DONE_MODEL
99
103
  self._task_model = setting.TASK_MODEL
100
104
 
105
+ self._filter_field = setting.FILTER_FIELD
106
+
101
107
  @property
102
108
  def request(self):
103
109
  """
@@ -151,7 +157,22 @@ class Launcher(threading.Thread):
151
157
  def _remove_doing_seeds(self, seeds):
152
158
  for seed in seeds:
153
159
  self.__DOING__.pop(seed, None)
154
- logger.info("remove %s seeds from __DOING__" % len(seeds))
160
+ # logger.info("remove %s seeds from __DOING__" % len(seeds))
161
+
162
+ def _get_seed(self) -> Seed:
163
+ return self.__LAUNCHER_QUEUE__["todo"].pop()
164
+
165
+ def _set_seed(self, seed, **kwargs):
166
+ self.__LAUNCHER_QUEUE__["todo"].push(seed, **kwargs)
167
+
168
+ def _upload_data(self, data, **kwargs):
169
+ self.__LAUNCHER_QUEUE__["upload"].push(data, **kwargs)
170
+
171
+ def _add_seed(self, seed, **kwargs):
172
+ self.__LAUNCHER_QUEUE__["new"].push(seed, **kwargs)
173
+
174
+ def _delete_seed(self, seed, **kwargs):
175
+ self.__LAUNCHER_QUEUE__["done"].push(seed, **kwargs)
155
176
 
156
177
  def _execute(self):
157
178
  for func_name in self.__LAUNCHER_FUNC__:
@@ -165,10 +186,16 @@ class Launcher(threading.Thread):
165
186
 
166
187
  self._Crawler(
167
188
  stop=self._stop, pause=self._pause,
168
- launcher_queue=self.__LAUNCHER_QUEUE__,
189
+ # launcher_queue=self.__LAUNCHER_QUEUE__,
190
+ get_seed=self._get_seed,
191
+ set_seed=self._set_seed,
192
+ add_seed=self._add_seed,
193
+ delete_seed=self._delete_seed,
194
+ upload_data=self._upload_data,
169
195
  custom_func=self.__CUSTOM_FUNC__,
170
196
  thread_num = self._spider_thread_num,
171
- max_retries = self._spider_max_retries
197
+ max_retries = self._spider_max_retries,
198
+ time_sleep=self._spider_time_sleep
172
199
  ).start()
173
200
 
174
201
  self._Pipeline(
@@ -3,6 +3,7 @@ import threading
3
3
 
4
4
  from cobweb.db import RedisDB
5
5
  from cobweb.base import Seed, logger
6
+ from cobweb.utils import BloomFilter
6
7
  from cobweb.constant import DealModel, LogTemplate
7
8
  from .launcher import Launcher, check_pause
8
9
 
@@ -15,11 +16,19 @@ class LauncherPro(Launcher):
15
16
  self._done_key = "{%s:%s}:done" % (project, task)
16
17
  self._fail_key = "{%s:%s}:fail" % (project, task)
17
18
  self._heartbeat_key = "heartbeat:%s_%s" % (project, task)
18
- self._reset_lock_key = "lock:reset:%s_%s" % (project, task)
19
+
19
20
  self._statistics_done_key = "statistics:%s:%s:done" % (project, task)
20
21
  self._statistics_fail_key = "statistics:%s:%s:fail" % (project, task)
22
+ self._speed_control_key = "speed_control:%s_%s" % (project, task)
23
+
24
+ self._reset_lock_key = "lock:reset:%s_%s" % (project, task)
25
+
26
+ self._bf_key = "bloom_%s_%s" % (project, task)
27
+
21
28
  self._db = RedisDB()
22
29
 
30
+ self._bf = BloomFilter(self._bf_key)
31
+
23
32
  self._heartbeat_start_event = threading.Event()
24
33
  self._redis_queue_empty_event = threading.Event()
25
34
 
@@ -33,6 +42,21 @@ class LauncherPro(Launcher):
33
42
  else:
34
43
  self._db._client.incrby(key, count)
35
44
 
45
+ def _get_seed(self) -> Seed:
46
+ spider_speed = self._db._client.get(self._speed_control_key)
47
+ if int(spider_speed or 0) > self._spider_max_speed:
48
+ expire_time = self._db.ttl(self._speed_control_key)
49
+ if expire_time == -1:
50
+ self._db.delete(self._speed_control_key)
51
+ else:
52
+ logger.info(f"Too fast! Please wait {expire_time} seconds...")
53
+ time.sleep(expire_time / 2)
54
+ return None
55
+ seed = self.__LAUNCHER_QUEUE__["todo"].pop()
56
+ if seed and not self._db.lock(self._speed_control_key, t=60):
57
+ self._db._client.incrby(self._speed_control_key, 1)
58
+ return seed
59
+
36
60
  @check_pause
37
61
  def _execute_heartbeat(self):
38
62
  if self._heartbeat_start_event.is_set():
@@ -110,7 +134,7 @@ class LauncherPro(Launcher):
110
134
  """
111
135
  删除队列种子,根据状态添加至成功或失败队列,移除doing字典种子索引
112
136
  """
113
- seeds, s_seeds, f_seeds = [], [], []
137
+ seed_info = {"count": 0, "failed": [], "succeed": [], "common": []}
114
138
  status = self.__LAUNCHER_QUEUE__['done'].length < self._done_queue_max_size
115
139
 
116
140
  for _ in range(self._done_queue_max_size):
@@ -118,26 +142,25 @@ class LauncherPro(Launcher):
118
142
  if not seed:
119
143
  break
120
144
  if seed.params.seed_status == DealModel.fail:
121
- f_seeds.append(seed.to_string)
145
+ seed_info["failed"].append(seed.to_string)
122
146
  elif self._done_model == 1:
123
- s_seeds.append(seed.to_string)
147
+ seed_info["succeed"].append(seed.to_string)
124
148
  else:
125
- seeds.append(seed.to_string)
126
- if seeds:
127
- count = self._db.zrem(self._todo_key, *seeds)
128
- if count:
129
- self.statistics(self._statistics_done_key, count)
130
- self._remove_doing_seeds(seeds)
131
- if s_seeds:
132
- count = self._db.done([self._todo_key, self._done_key], *s_seeds)
133
- if count:
134
- self.statistics(self._statistics_done_key, count)
135
- self._remove_doing_seeds(s_seeds)
136
- if f_seeds:
137
- count = self._db.done([self._todo_key, self._fail_key], *f_seeds)
138
- if count:
139
- self.statistics(self._statistics_fail_key, count)
140
- self._remove_doing_seeds(f_seeds)
149
+ seed_info["common"].append(seed.to_string)
150
+ seed_info['count'] += 1
151
+
152
+ if seed_info["count"]:
153
+
154
+ succeed_count = self._db.zrem(self._todo_key, *seed_info["common"])
155
+ succeed_count += self._db.done([self._todo_key, self._done_key], *seed_info["succeed"])
156
+ failed_count = self._db.done([self._todo_key, self._fail_key], *seed_info["failed"])
157
+
158
+ if failed_count:
159
+ self.statistics(self._statistics_fail_key, failed_count)
160
+ if succeed_count:
161
+ self.statistics(self._statistics_done_key, succeed_count)
162
+
163
+ self._remove_doing_seeds(seed_info["common"] + seed_info["succeed"] + seed_info["failed"])
141
164
 
142
165
  if status:
143
166
  time.sleep(self._done_queue_wait_seconds)
cobweb/setting.py CHANGED
@@ -57,9 +57,16 @@ DONE_MODEL = 0 # 0:种子消费成功直接从队列移除,失败则添加
57
57
  # spider
58
58
  SPIDER_THREAD_NUM = 10
59
59
  SPIDER_MAX_RETRIES = 5
60
+ SPIDER_TIME_SLEEP = 10
61
+ SPIDER_MAX_SPEED = 1000 # 一分钟最大采集数
60
62
 
61
63
  # 任务模式
62
64
  TASK_MODEL = 0 # 0:单次,1:常驻
63
65
 
66
+
67
+ # bloom过滤器
68
+ CAPACITY = 100000000
69
+ ERROR_RATE = 0.001
70
+ FILTER_FIELD = "url"
64
71
  # 文件下载响应类型过滤
65
72
  # FILE_FILTER_CONTENT_TYPE = ["text/html", "application/xhtml+xml"]
cobweb/utils/__init__.py CHANGED
@@ -1,3 +1,4 @@
1
1
  from .oss import OssUtil
2
2
  from .tools import *
3
+ from .bloom import BloomFilter
3
4
 
cobweb/utils/bloom.py ADDED
@@ -0,0 +1,76 @@
1
+ import math
2
+ import time
3
+
4
+ import mmh3
5
+ import redis
6
+ from cobweb import setting
7
+
8
+
9
+ class BloomFilter:
10
+
11
+ def __init__(self, key, redis_config=None, capacity=None, error_rate=None):
12
+ redis_config = redis_config or setting.REDIS_CONFIG
13
+ capacity = capacity or setting.CAPACITY
14
+ error_rate = error_rate or setting.ERROR_RATE
15
+ redis_config['db'] = 3
16
+
17
+ self.key = key
18
+
19
+ pool = redis.ConnectionPool(**redis_config)
20
+ self._client = redis.Redis(connection_pool=pool)
21
+ self.bit_size = self.get_bit_size(capacity, error_rate)
22
+ self.hash_count = self.get_hash_count(self.bit_size, capacity)
23
+ self._init_bloom_key()
24
+
25
+ def add(self, value):
26
+ for seed in range(self.hash_count):
27
+ result = mmh3.hash(value, seed) % self.bit_size
28
+ self._client.setbit(self.key, result, 1)
29
+ return True
30
+
31
+ def exists(self, value):
32
+ if not self._client.exists(self.key):
33
+ return False
34
+ for seed in range(self.hash_count):
35
+ result = mmh3.hash(value, seed) % self.bit_size
36
+ if not self._client.getbit(self.key, result):
37
+ return False
38
+ return True
39
+
40
+ def _init_bloom_key(self):
41
+ lua_script = """
42
+ redis.call("SETBIT", KEYS[1], ARGV[1], ARGV[2])
43
+ redis.call("EXPIRE", KEYS[1], 604800)
44
+ """
45
+ if self._client.exists(self.key):
46
+ return True
47
+ execute = self._client.register_script(lua_script)
48
+ execute(keys=[self.key], args=[self.bit_size-1, 1])
49
+
50
+ @classmethod
51
+ def get_bit_size(cls, n, p):
52
+ return int(-(n * math.log(p)) / (math.log(2) ** 2))
53
+
54
+ @classmethod
55
+ def get_hash_count(cls, m, n):
56
+ return int((m / n) * math.log(2))
57
+
58
+
59
+ if __name__ == '__main__':
60
+ testBLF = BloomFilter("test", {
61
+ "host": "r-j6c1t3etiefpmz7cwdpd.redis.rds.aliyuncs.com",
62
+ "password": "SpiderLinux666",
63
+ })
64
+
65
+ print("start")
66
+ start_time = time.time()
67
+ testBLF.add("test")
68
+ add_time = time.time()
69
+ print("add time::: ")
70
+ print(add_time - start_time)
71
+ print("get::: ")
72
+ print(testBLF.exists("test"))
73
+ exist_time = time.time()
74
+ print("get time::: ")
75
+ print(exist_time - add_time)
76
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cobweb-launcher
3
- Version: 1.2.11
3
+ Version: 1.2.14
4
4
  Summary: spider_hole
5
5
  Home-page: https://github.com/Juannie-PP/cobweb
6
6
  Author: Juannie-PP
@@ -16,6 +16,7 @@ Requires-Dist: requests (>=2.19.1)
16
16
  Requires-Dist: oss2 (>=2.18.1)
17
17
  Requires-Dist: redis (>=4.4.4)
18
18
  Requires-Dist: aliyun-log-python-sdk
19
+ Requires-Dist: mmh3
19
20
 
20
21
  # cobweb
21
22
  cobweb是一个基于python的分布式爬虫调度框架,目前支持分布式爬虫,单机爬虫,支持自定义数据库,支持自定义数据存储,支持自定义数据处理等操作。
@@ -1,6 +1,6 @@
1
1
  cobweb/__init__.py,sha256=uMHyf4Fekbyw2xBCbkA8R0LwCpBJf5p_7pWbh60ZWYk,83
2
2
  cobweb/constant.py,sha256=zy3XYsc1qp2B76_Fn_hVQ8eGHlPBd3OFlZK2cryE6FY,2839
3
- cobweb/setting.py,sha256=Wev0clo4ZETI5cRvBnzTWnJWo0Nowv_uvNCqlzYPSiE,1990
3
+ cobweb/setting.py,sha256=MGe4QGnE5XOTh9Z7NhakaTFK7f-lZtzlA9PFcuc1qoY,2145
4
4
  cobweb/base/__init__.py,sha256=4gwWWQ0Q8cYG9cD7Lwf4XMqRGc5M_mapS3IczR6zeCE,222
5
5
  cobweb/base/common_queue.py,sha256=W7PPZZFl52j3Mc916T0imHj7oAUelA6aKJwW-FecDPE,872
6
6
  cobweb/base/decorators.py,sha256=wDCaQ94aAZGxks9Ljc0aXq6omDXT1_yzFy83ZW6VbVI,930
@@ -11,27 +11,28 @@ cobweb/base/response.py,sha256=eB1DWMXFCpn3cJ3yzgCRU1WeZAdayGDohRgdjdMUFN4,406
11
11
  cobweb/base/seed.py,sha256=Uz_VBRlAxNYQcFHk3tsZFMlU96yPOedHaWGTvk-zKd8,2908
12
12
  cobweb/crawlers/__init__.py,sha256=msvkB9mTpsgyj8JfNMsmwAcpy5kWk_2NrO1Adw2Hkw0,29
13
13
  cobweb/crawlers/base_crawler.py,sha256=ee_WSDnPQpPTk6wlFuY2UEx5L3hcsAZFcr6i3GLSry8,5751
14
- cobweb/crawlers/crawler.py,sha256=MJ2rFTrVjQ4Q9TmX0dpbOvT4tdokQwh7dKeFY_W7pPE,6002
14
+ cobweb/crawlers/crawler.py,sha256=xiFNM0t69f5xlm59hPbO2MpqtdirVAUhD84-CLpyHPM,6349
15
15
  cobweb/crawlers/file_crawler.py,sha256=2Sjbdgxzqd41WykKUQE3QQlGai3T8k-pmHNmPlTchjQ,4454
16
16
  cobweb/db/__init__.py,sha256=ut0iEyBLjcJL06WNG_5_d4hO5PJWvDrKWMkDOdmgh2M,30
17
- cobweb/db/redis_db.py,sha256=NNI2QkRV1hEZI-z-COEncXt88z3pZN6wusKlcQzc8V4,4304
17
+ cobweb/db/redis_db.py,sha256=fumNZJiio-uQqRcSrymx8eJ1PqsdOwITe_Y-9JOXxrQ,4298
18
18
  cobweb/exceptions/__init__.py,sha256=E9SHnJBbhD7fOgPFMswqyOf8SKRDrI_i25L0bSpohvk,32
19
19
  cobweb/exceptions/oss_db_exception.py,sha256=iP_AImjNHT3-Iv49zCFQ3rdLnlvuHa3h2BXApgrOYpA,636
20
20
  cobweb/launchers/__init__.py,sha256=af0Y6wrGX8SQZ7w7XL2sOtREjCT3dwad-uCc3nIontY,76
21
- cobweb/launchers/launcher.py,sha256=mBpq0CmxXXv-KdXQ2x7vwOkkAvaKECLkZLGhraafkQA,5953
21
+ cobweb/launchers/launcher.py,sha256=AbkrytfJEyj8FhTbLgjmOOIuvOYV3cpVknE9yt31WbM,6930
22
22
  cobweb/launchers/launcher_air.py,sha256=KAk_M8F3029cXYe7m4nn3Nzyi89lbxJ2cqZjqW8iZ0E,2832
23
- cobweb/launchers/launcher_pro.py,sha256=OmlC5o3BdcsTFwOVAKNJdnwdlTl-fUiXC2kViWEpcoU,7677
23
+ cobweb/launchers/launcher_pro.py,sha256=8QKhToKoD2WonIaqRQAhUWRhbNOIgYXzGFRK1id_3yM,8638
24
24
  cobweb/pipelines/__init__.py,sha256=zSUsGtx6smbs2iXBXvYynReKSgky-3gjqaAtKVnA_OU,105
25
25
  cobweb/pipelines/base_pipeline.py,sha256=fYnWf79GmhufXpcnMa3te18SbmnVeYLwxfyo-zLd9CY,1577
26
26
  cobweb/pipelines/loghub_pipeline.py,sha256=cjPO6w6UJ0jNw2fVvdX0BCdlm58T7dmYXlxzXOBpvfY,1027
27
27
  cobweb/pipelines/pipeline.py,sha256=4TJLX0sUHRxYndF5A4Vs5btUGI-wigkOcFvhTW1hLXI,2009
28
28
  cobweb/pipelines/pipeline_console.py,sha256=NEh-4zhuVAQOqwXLsqeb-rcNZ9_KXFUpL3otUTL5qBs,754
29
29
  cobweb/pipelines/pipeline_loghub.py,sha256=xZ6D55BGdiM71WUv83jyLGbEyUwhBHLJRZoXthBxxTs,1019
30
- cobweb/utils/__init__.py,sha256=JTE4sBfHnKHhD6w9Auk0MIT7O9BMOamCeryhlHNx3Zg,47
30
+ cobweb/utils/__init__.py,sha256=vBtZTy3EfRE0MmH43URhmr7nw6_oOWTEbGOM9xR_9o8,78
31
+ cobweb/utils/bloom.py,sha256=K3ACqQOIw9drxeVb6DKLA2iZYFn_rE16I781gXAsyqA,2228
31
32
  cobweb/utils/oss.py,sha256=gyt8-UB07tVphZLQXMOf-JTJwU-mWq8KZkOXKkAf3uk,3513
32
33
  cobweb/utils/tools.py,sha256=5JEaaAwYoV9Sdla2UBIJn6faUBuXmxUMagm9ck6FVqs,1253
33
- cobweb_launcher-1.2.11.dist-info/LICENSE,sha256=z1rxSIGOyzcSb3orZxFPxzx-0C1vTocmswqBNxpKfEk,1063
34
- cobweb_launcher-1.2.11.dist-info/METADATA,sha256=GSDZ3aWvattuW3Md-faK0cSBPyGDA8bJcVZRlSVp4ag,6490
35
- cobweb_launcher-1.2.11.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
36
- cobweb_launcher-1.2.11.dist-info/top_level.txt,sha256=4GETBGNsKqiCUezmT-mJn7tjhcDlu7nLIV5gGgHBW4I,7
37
- cobweb_launcher-1.2.11.dist-info/RECORD,,
34
+ cobweb_launcher-1.2.14.dist-info/LICENSE,sha256=z1rxSIGOyzcSb3orZxFPxzx-0C1vTocmswqBNxpKfEk,1063
35
+ cobweb_launcher-1.2.14.dist-info/METADATA,sha256=ofgY9uUtCBfQn-LkEc0eZelQpJxWoB8Aza_u53qrytk,6510
36
+ cobweb_launcher-1.2.14.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
37
+ cobweb_launcher-1.2.14.dist-info/top_level.txt,sha256=4GETBGNsKqiCUezmT-mJn7tjhcDlu7nLIV5gGgHBW4I,7
38
+ cobweb_launcher-1.2.14.dist-info/RECORD,,