cobweb-launcher 1.3.14__py3-none-any.whl → 3.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. cobweb/__init__.py +1 -1
  2. cobweb/base/__init__.py +4 -149
  3. cobweb/base/common_queue.py +0 -13
  4. cobweb/base/request.py +2 -14
  5. cobweb/base/seed.py +16 -12
  6. cobweb/constant.py +0 -16
  7. cobweb/crawlers/crawler.py +3 -85
  8. cobweb/db/redis_db.py +109 -52
  9. cobweb/launchers/__init__.py +8 -2
  10. cobweb/launchers/distributor.py +171 -0
  11. cobweb/launchers/launcher.py +87 -131
  12. cobweb/launchers/uploader.py +65 -0
  13. cobweb/pipelines/pipeline.py +3 -36
  14. cobweb/schedulers/__init__.py +1 -3
  15. cobweb/schedulers/launcher_air.py +93 -0
  16. cobweb/schedulers/launcher_api.py +225 -0
  17. cobweb/schedulers/scheduler.py +85 -0
  18. cobweb/schedulers/scheduler_with_redis.py +177 -0
  19. cobweb/setting.py +15 -32
  20. cobweb/utils/__init__.py +2 -1
  21. cobweb/utils/decorators.py +43 -0
  22. cobweb/utils/dotting.py +55 -0
  23. cobweb/utils/oss.py +28 -9
  24. {cobweb_launcher-1.3.14.dist-info → cobweb_launcher-3.1.0.dist-info}/METADATA +1 -1
  25. cobweb_launcher-3.1.0.dist-info/RECORD +41 -0
  26. cobweb/base/basic.py +0 -295
  27. cobweb/base/dotting.py +0 -35
  28. cobweb/launchers/launcher_air.py +0 -88
  29. cobweb/launchers/launcher_api.py +0 -88
  30. cobweb/launchers/launcher_pro.py +0 -88
  31. cobweb/schedulers/scheduler_api.py +0 -72
  32. cobweb/schedulers/scheduler_redis.py +0 -72
  33. cobweb_launcher-1.3.14.dist-info/RECORD +0 -40
  34. {cobweb_launcher-1.3.14.dist-info → cobweb_launcher-3.1.0.dist-info}/LICENSE +0 -0
  35. {cobweb_launcher-1.3.14.dist-info → cobweb_launcher-3.1.0.dist-info}/WHEEL +0 -0
  36. {cobweb_launcher-1.3.14.dist-info → cobweb_launcher-3.1.0.dist-info}/top_level.txt +0 -0
cobweb/__init__.py CHANGED
@@ -1,2 +1,2 @@
1
- from .launchers import LauncherPro, LauncherApi
1
+ from .launchers import Launcher
2
2
  from .constant import CrawlerModel
cobweb/base/__init__.py CHANGED
@@ -1,154 +1,9 @@
1
- import os
2
- import time
3
- import traceback
4
- import threading
5
-
6
- from functools import wraps
7
- from inspect import isgenerator
8
- from typing import Callable, Union
9
-
10
1
  from .common_queue import Queue
11
2
  from .response import Response
12
- from .basic import Seed, Request, Response
3
+ from .request import Request
13
4
  from .item import BaseItem, ConsoleItem
14
- # from .seed import Seed
15
- from .log import logger
16
- from .dotting import LoghubDot
17
-
18
-
19
- class TaskQueue:
20
- TODO = Queue() # 任务种子队列
21
- DOWNLOAD = Queue() # 下载任务队列
22
-
23
- SEED = Queue() # 添加任务种子队列
24
- REQUEST = Queue() # 请求队列
25
- RESPONSE = Queue() # 响应队列
26
- DONE = Queue() # 下载完成队列
27
- UPLOAD = Queue() # 任务上传队列
28
- DELETE = Queue() # 任务删除队列
29
- DOT = LoghubDot()
30
-
31
- @staticmethod
32
- def is_empty():
33
- total_length = TaskQueue.SEED.length
34
- total_length += TaskQueue.TODO.length
35
- total_length += TaskQueue.REQUEST.length
36
- total_length += TaskQueue.DOWNLOAD.length
37
- total_length += TaskQueue.RESPONSE.length
38
- total_length += TaskQueue.UPLOAD.length
39
- total_length += TaskQueue.DONE.length
40
- total_length += TaskQueue.DELETE.length
41
- return not bool(total_length)
42
-
43
- @staticmethod
44
- def process_task(it: Union[Seed, Request, Response, BaseItem], crawler_func: Callable):
45
- try:
46
- start_time = time.time()
47
- iterators = crawler_func(it)
48
- if not isgenerator(iterators):
49
- raise TypeError(f"{crawler_func.__name__} function isn't a generator")
50
- for tk in iterators:
51
- if isinstance(tk, Request):
52
- TaskQueue.REQUEST.push(tk)
53
- elif isinstance(tk, Response):
54
- TaskQueue.RESPONSE.push(tk)
55
- elif isinstance(tk, BaseItem):
56
- TaskQueue.UPLOAD.push(tk)
57
- elif isinstance(tk, Seed):
58
- TaskQueue.SEED.push(tk)
59
- else:
60
- raise TypeError(f"{crawler_func.__name__} function return type isn't supported")
61
- TaskQueue.DOT.build(
62
- topic=f"{os.getenv('PROJECT')}:{os.getenv('TASK')}",
63
- cost_time=round(time.time() - start_time, 2),
64
- process_task_type=tk.__class__.__name__,
65
- **tk.to_dict
66
- )
67
- except Exception as e:
68
- it.params.retry += 1
69
- if isinstance(it, Request):
70
- TaskQueue.DOWNLOAD.push(it)
71
- elif isinstance(it, Response):
72
- TaskQueue.RESPONSE.push(it)
73
- elif isinstance(it, Seed):
74
- TaskQueue.TODO.push(it)
75
- elif isinstance(it, BaseItem):
76
- TaskQueue.UPLOAD.push(it)
77
- logger.info(
78
- f"{crawler_func.__name__} failed: "
79
- f"{''.join(traceback.format_exception(type(e), e, e.__traceback__))}"
80
- )
81
- time.sleep(1)
82
-
83
-
84
- class Decorators:
5
+ from .seed import Seed
85
6
 
86
- @staticmethod
87
- def add_thread(num=1):
88
- def decorator(func):
89
- @wraps(func)
90
- def wrapper(self, *args):
91
- for i in range(num):
92
- name = func.__name__ + "_" + str(i) if num > 1 else func.__name__
93
- self._threads.append(threading.Thread(name=name, target=func, args=(self,) + args))
94
-
95
- return wrapper
96
-
97
- return decorator
98
-
99
- @staticmethod
100
- def pause(func):
101
- @wraps(func)
102
- def wrapper(self, *args, **kwargs):
103
- while not self.stop.is_set():
104
- while not self.pause.is_set():
105
- try:
106
- func(self)
107
- except Exception as e:
108
- logger.info(f"{func.__name__}: " + str(e))
109
- finally:
110
- time.sleep(0.1)
111
- # logger.info(f"{func.__name__}: close!")
112
-
113
- return wrapper
114
-
115
- @staticmethod
116
- def stop(func):
117
- @wraps(func)
118
- def wrapper(self, *args, **kwargs):
119
- while not self.stop.is_set():
120
- try:
121
- func(self, *args, **kwargs)
122
- except Exception as e:
123
- logger.info(
124
- f"{func.__name__} exception: \n" +
125
- ''.join(traceback.format_exception(type(e), e, e.__traceback__))
126
- )
127
- finally:
128
- time.sleep(0.1)
129
-
130
- return wrapper
131
-
132
- @staticmethod
133
- def decorator_oss_db(exception, retries=3):
134
- def decorator(func):
135
- @wraps(func)
136
- def wrapper(callback_func, *args, **kwargs):
137
- result = None
138
- for i in range(retries):
139
- msg = None
140
- try:
141
- return func(callback_func, *args, **kwargs)
142
- except Exception as e:
143
- result = None
144
- msg = e
145
- finally:
146
- if result:
147
- return result
148
-
149
- if i >= 2 and msg:
150
- raise exception(msg)
151
-
152
- return wrapper
7
+ from .log import logger
8
+ # from .decorators import decorator_oss_db
153
9
 
154
- return decorator
@@ -1,4 +1,3 @@
1
- import time
2
1
  from collections import deque
3
2
 
4
3
 
@@ -29,15 +28,3 @@ class Queue:
29
28
  return None
30
29
  except AttributeError:
31
30
  return None
32
-
33
- def clear(self):
34
- self._queue.clear()
35
-
36
- def get(self):
37
- try:
38
- yield self._queue.popleft()
39
- except IndexError:
40
- time.sleep(1)
41
- yield None
42
- except AttributeError:
43
- yield None
cobweb/base/request.py CHANGED
@@ -1,4 +1,3 @@
1
- import json
2
1
  import random
3
2
  import requests
4
3
 
@@ -31,6 +30,7 @@ class Request:
31
30
  **kwargs
32
31
  ):
33
32
  self.url = url
33
+ self.seed = seed
34
34
  self.check_status_code = check_status_code
35
35
  self.request_setting = {}
36
36
 
@@ -46,12 +46,6 @@ class Request:
46
46
  if random_ua:
47
47
  self._build_header()
48
48
 
49
- if isinstance(seed, Seed):
50
- self.seed = seed.to_string
51
- else:
52
- kwargs.update(**seed.to_dict)
53
- self.seed = self.to_string
54
-
55
49
  @property
56
50
  def _random_ua(self) -> str:
57
51
  v1 = random.randint(4, 15)
@@ -79,16 +73,10 @@ class Request:
79
73
  @property
80
74
  def to_dict(self):
81
75
  _dict = self.__dict__.copy()
76
+ _dict.pop('url')
82
77
  _dict.pop('seed')
83
78
  _dict.pop('check_status_code')
84
79
  _dict.pop('request_setting')
85
80
  return _dict
86
81
 
87
- @property
88
- def to_string(self) -> str:
89
- return json.dumps(
90
- self.to_dict,
91
- ensure_ascii=False,
92
- separators=(",", ":")
93
- )
94
82
 
cobweb/base/seed.py CHANGED
@@ -5,11 +5,13 @@ import hashlib
5
5
 
6
6
  class SeedParams:
7
7
 
8
- def __init__(self, retry, priority, seed_version, seed_status=None):
8
+ def __init__(self, retry, priority, seed_version, seed_status=None, proxy_type=None, proxy=None):
9
9
  self.retry = retry or 0
10
10
  self.priority = priority or 300
11
11
  self.seed_version = seed_version or int(time.time())
12
12
  self.seed_status = seed_status
13
+ self.proxy_type = proxy_type
14
+ self.proxy = proxy
13
15
 
14
16
 
15
17
  class Seed:
@@ -18,7 +20,9 @@ class Seed:
18
20
  "retry",
19
21
  "priority",
20
22
  "seed_version",
21
- "seed_status"
23
+ "seed_status",
24
+ "proxy_type",
25
+ "proxy"
22
26
  ]
23
27
 
24
28
  def __init__(
@@ -29,6 +33,8 @@ class Seed:
29
33
  priority=None,
30
34
  seed_version=None,
31
35
  seed_status=None,
36
+ proxy_type=None,
37
+ proxy=None,
32
38
  **kwargs
33
39
  ):
34
40
  if any(isinstance(seed, t) for t in (str, bytes)):
@@ -51,6 +57,8 @@ class Seed:
51
57
  "priority": priority,
52
58
  "seed_version": seed_version,
53
59
  "seed_status": seed_status,
60
+ "proxy_type": proxy_type,
61
+ "proxy": proxy
54
62
  }
55
63
 
56
64
  if kwargs:
@@ -104,15 +112,11 @@ class Seed:
104
112
  separators=(",", ":")
105
113
  )
106
114
 
107
- # @property
108
- # def get_all(self):
109
- # return json.dumps(
110
- # self.__dict__,
111
- # ensure_ascii=False,
112
- # separators=(",", ":")
113
- # )
114
-
115
115
  @property
116
- def seed(self):
117
- return self.to_string
116
+ def get_all(self):
117
+ return json.dumps(
118
+ self.__dict__,
119
+ ensure_ascii=False,
120
+ separators=(",", ":")
121
+ )
118
122
 
cobweb/constant.py CHANGED
@@ -37,22 +37,6 @@ class LogTemplate:
37
37
  ----------------------- end - console pipeline ------------------
38
38
  """
39
39
 
40
- launcher_polling = """
41
- ----------------------- start - 轮训日志: {task} -----------------
42
- 正在运行任务
43
- 构造请求任务数: {memory_todo_count}
44
- 正在下载任务数: {memory_download_count}
45
- 任务内存队列
46
- 待构造请求队列: {todo_queue_len}
47
- 待删除请求队列: {delete_queue_len}
48
- 待进行下载队列: {request_queue_len}
49
- 待解析响应队列: {response_queue_len}
50
- 待删除下载队列: {done_queue_len}
51
- 存储队列
52
- 待上传数据队列: {upload_queue_len}
53
- ----------------------- end - 轮训日志: {task} ------------------
54
- """
55
-
56
40
  launcher_air_polling = """
57
41
  ----------------------- start - 轮训日志: {task} -----------------
58
42
  内存队列
@@ -1,40 +1,19 @@
1
1
  import json
2
- import os
3
- import time
4
- import threading
5
- from typing import Union, Callable, Mapping
6
-
7
- from cobweb import setting
2
+ from typing import Union
8
3
  from cobweb.base import (
9
4
  Seed,
10
5
  BaseItem,
11
6
  Request,
12
7
  Response,
13
8
  ConsoleItem,
14
- Decorators,
15
- TaskQueue,
16
9
  )
17
- from cobweb.constant import DealModel
18
-
19
10
 
20
- class Crawler(threading.Thread):
21
11
 
22
- def __init__(self, stop, pause, custom_func: Union[Mapping[str, Callable]]):
23
- super().__init__()
24
- self.stop = stop
25
- self.pause = pause
26
- for func_name, _callable in custom_func.items():
27
- if isinstance(_callable, Callable):
28
- self.__setattr__(func_name, _callable)
29
-
30
- self.spider_max_retries = setting.SPIDER_MAX_RETRIES
31
- self.request_queue_size = setting.REQUEST_QUEUE_SIZE
32
- self.download_queue_size = setting.DOWNLOAD_QUEUE_SIZE
33
- self.upload_queue_size = setting.UPLOAD_QUEUE_SIZE
12
+ class Crawler:
34
13
 
35
14
  @staticmethod
36
15
  def request(seed: Seed) -> Union[Request, BaseItem]:
37
- yield Request(seed, timeout=5)
16
+ yield Request(seed.url, seed, timeout=5)
38
17
 
39
18
  @staticmethod
40
19
  def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
@@ -47,64 +26,3 @@ class Crawler(threading.Thread):
47
26
  upload_item["text"] = item.response.text
48
27
  yield ConsoleItem(item.seed, data=json.dumps(upload_item, ensure_ascii=False))
49
28
 
50
- # @decorators.add_thread()
51
- @Decorators.pause
52
- def build_request_item(self):
53
- thread_sleep = 0.1
54
- if TaskQueue.REQUEST.length >= self.request_queue_size:
55
- thread_sleep = 5
56
- elif seed := TaskQueue.TODO.pop():
57
- # member, priority = seed_info
58
- # seed = Seed(member, priority=priority)
59
- if seed.params.retry > self.spider_max_retries:
60
- TaskQueue.DOT.build(
61
- topic=f"{os.getenv('PROJECT')}:{os.getenv('TASK')}",
62
- process_task_type=seed.__class__.__name__,
63
- seed_status=DealModel.fail,
64
- retries=seed.params.retry,
65
- **seed.to_dict
66
- )
67
- else:
68
- TaskQueue.process_task(seed, self.request)
69
- TaskQueue.DELETE.push(seed.seed)
70
- time.sleep(thread_sleep)
71
-
72
- @Decorators.pause
73
- def build_download_item(self):
74
- thread_sleep = 0.1
75
- if TaskQueue.RESPONSE.length >= self.download_queue_size:
76
- thread_sleep = 5
77
- elif request_item := TaskQueue.DOWNLOAD.pop():
78
- if request_item.params.retry > self.spider_max_retries:
79
- TaskQueue.DOT.build(
80
- topic=f"{os.getenv('PROJECT')}:{os.getenv('TASK')}",
81
- process_task_type=request_item.__class__.__name__,
82
- retries=request_item.params.retry,
83
- seed_status=DealModel.fail,
84
- **request_item.to_dict
85
- )
86
- TaskQueue.DONE.push(request_item.seed)
87
- else:
88
- TaskQueue.process_task(request_item, self.download)
89
- time.sleep(thread_sleep)
90
-
91
- @Decorators.pause
92
- def build_parse_item(self):
93
- thread_sleep = 0.1
94
- if TaskQueue.UPLOAD.length >= self.upload_queue_size:
95
- thread_sleep = 5
96
- elif response_item := TaskQueue.RESPONSE.pop():
97
- if response_item.params.retry > self.spider_max_retries:
98
- TaskQueue.DOT.build(
99
- topic=f"{os.getenv('PROJECT')}:{os.getenv('TASK')}",
100
- process_task_type=response_item.__class__.__name__,
101
- seed_status=DealModel.fail,
102
- retries=response_item.params.retry,
103
- **response_item.to_dict
104
- )
105
- TaskQueue.DONE.push(response_item.seed)
106
- else:
107
- TaskQueue.process_task(response_item, self.parse)
108
- time.sleep(thread_sleep)
109
-
110
-
cobweb/db/redis_db.py CHANGED
@@ -1,67 +1,152 @@
1
1
  import redis
2
+ import time
2
3
  from cobweb import setting
4
+ from redis.exceptions import ConnectionError, TimeoutError
3
5
 
4
6
 
5
7
  class RedisDB:
6
-
7
8
  def __init__(self, **kwargs):
8
9
  redis_config = kwargs or setting.REDIS_CONFIG
9
- pool = redis.ConnectionPool(**redis_config)
10
- self._client = redis.Redis(connection_pool=pool)
10
+ self.host = redis_config['host']
11
+ self.password = redis_config['password']
12
+ self.port = redis_config['port']
13
+ self.db = redis_config['db']
14
+
15
+ self.max_retries = 5
16
+ self.retry_delay = 5
17
+ self.client = None
18
+ self.connect()
19
+
20
+ def connect(self):
21
+ """尝试连接 Redis"""
22
+ retries = 0
23
+ while retries < self.max_retries:
24
+ try:
25
+ self.client = redis.Redis(
26
+ host=self.host,
27
+ port=self.port,
28
+ password=self.password,
29
+ db=self.db,
30
+ socket_timeout=5, # 设置连接超时时间
31
+ socket_connect_timeout=5 # 设置连接超时时间
32
+ )
33
+ # 测试连接是否成功
34
+ self.client.ping()
35
+ return
36
+ except (ConnectionError, TimeoutError) as e:
37
+ retries += 1
38
+ if retries < self.max_retries:
39
+ time.sleep(self.retry_delay)
40
+ else:
41
+ raise Exception("达到最大重试次数,无法连接 Redis")
42
+
43
+ def is_connected(self):
44
+ try:
45
+ self.client.ping()
46
+ return True
47
+ except (ConnectionError, TimeoutError):
48
+ return False
49
+
50
+ def reconnect(self):
51
+ self.connect()
52
+
53
+ def execute_command(self, command, *args, **kwargs):
54
+ retries = 0
55
+ while retries < self.max_retries:
56
+ try:
57
+ if not self.is_connected():
58
+ self.reconnect()
59
+ return getattr(self.client, command)(*args, **kwargs)
60
+ except (ConnectionError, TimeoutError) as e:
61
+ retries += 1
62
+ if retries < self.max_retries:
63
+ time.sleep(self.retry_delay)
64
+ else:
65
+ raise Exception("达到最大重试次数,无法执行命令")
66
+
67
+ def get(self, name):
68
+ # with self.get_connection() as client:
69
+ # return client.get(name)
70
+ return self.execute_command("get", name)
71
+
72
+ def incrby(self, name, value):
73
+ # with self.get_connection() as client:
74
+ # client.incrby(name, value)
75
+ self.execute_command("incrby", name, value)
11
76
 
12
77
  def setnx(self, name, value=""):
13
- return self._client.setnx(name, value)
78
+ # with self.get_connection() as client:
79
+ # client.setnx(name, value)
80
+ self.execute_command("setnx", name, value)
14
81
 
15
82
  def setex(self, name, t, value=""):
16
- return self._client.setex(name, t, value)
83
+ # with self.get_connection() as client:
84
+ # client.setex(name, t, value)
85
+ self.execute_command("setex", name, t, value)
17
86
 
18
87
  def expire(self, name, t, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False):
19
- return self._client.expire(name, t, nx, xx, gt, lt)
88
+ # with self.get_connection() as client:
89
+ # client.expire(name, t, nx, xx, gt, lt)
90
+ self.execute_command("expire", name, t, nx, xx, gt, lt)
20
91
 
21
92
  def ttl(self, name):
22
- return self._client.ttl(name)
93
+ # with self.get_connection() as client:
94
+ # return client.ttl(name)
95
+ return self.execute_command("ttl", name)
23
96
 
24
97
  def delete(self, name):
25
- return self._client.delete(name)
98
+ # with self.get_connection() as client:
99
+ # return client.delete(name)
100
+ return self.execute_command("delete", name)
26
101
 
27
102
  def exists(self, *name) -> bool:
28
- return self._client.exists(*name)
29
-
30
- def incrby(self, name, value):
31
- return self._client.incrby(name, value)
103
+ # with self.get_connection() as client:
104
+ # return client.exists(*name)
105
+ return self.execute_command("exists", *name)
32
106
 
33
107
  def sadd(self, name, value):
34
- return self._client.sadd(name, value)
108
+ # with self.get_connection() as client:
109
+ # return client.sadd(name, value)
110
+ return self.execute_command("sadd", name, value)
35
111
 
36
112
  def zcard(self, name) -> bool:
37
- return self._client.zcard(name)
113
+ # with self.get_connection() as client:
114
+ # return client.zcard(name)
115
+ return self.execute_command("zcard", name)
38
116
 
39
117
  def zadd(self, name, item: dict, **kwargs):
40
- return self._client.zadd(name, item, **kwargs)
118
+ # with self.get_connection() as client:
119
+ # return client.zadd(name, item, **kwargs)
120
+ return self.execute_command("zadd", name, item, **kwargs)
41
121
 
42
122
  def zrem(self, name, *value):
43
- return self._client.zrem(name, *value)
123
+ # with self.get_connection() as client:
124
+ # return client.zrem(name, *value)
125
+ return self.execute_command("zrem", name, *value)
44
126
 
45
127
  def zcount(self, name, _min, _max):
46
- return self._client.zcount(name, _min, _max)
128
+ # with self.get_connection() as client:
129
+ # return client.zcount(name, _min, _max)
130
+ return self.execute_command("zcount", name, _min, _max)
47
131
 
48
132
  # def zrangebyscore(self, name, _min, _max, start, num, withscores: bool = False, *args):
49
- # return self._client.zrangebyscore(name, _min, _max, start, num, withscores, *args)
133
+ # with self.get_connection() as client:
134
+ # return client.zrangebyscore(name, _min, _max, start, num, withscores, *args)
50
135
 
51
136
  def lua(self, script: str, keys: list = None, args: list = None):
52
137
  keys = keys or []
53
138
  args = args or []
54
139
  keys_count = len(keys)
55
- return self._client.eval(script, keys_count, *keys, *args)
140
+ return self.execute_command("eval", script, keys_count, *keys, *args)
56
141
 
57
142
  def lua_sha(self, sha1: str, keys: list = None, args: list = None):
58
143
  keys = keys or []
59
144
  args = args or []
60
145
  keys_count = len(keys)
61
- return self._client.evalsha(sha1, keys_count, *keys, *args)
146
+ return self.execute_command("evalsha", sha1, keys_count, *keys, *args)
62
147
 
63
148
  def execute_lua(self, lua_script: str, keys: list, *args):
64
- execute = self._client.register_script(lua_script)
149
+ execute = self.execute_command("register_script", lua_script)
65
150
  return execute(keys=keys, args=args)
66
151
 
67
152
  def lock(self, key, t=15) -> bool:
@@ -75,32 +160,7 @@ class RedisDB:
75
160
  status = self.execute_lua(lua_script, [key], t)
76
161
  return bool(status)
77
162
 
78
- def auto_incr(self, name, t=15, limit=1000):
79
- lua_script = """
80
- local count = 0
81
- local status = false
82
- local limit = ARGV[2]
83
- local expire = redis.call('ttl', KEYS[1])
84
-
85
- if ( expire == -2 ) then
86
- redis.call('setnx', KEYS[1], 1)
87
- elseif ( expire == -1) then
88
- redis.call('expire', KEYS[1], ARGV[1])
89
- else
90
- count = redis.call('get', KEYS[1])
91
- end
92
-
93
- if ( count + 0 < limit + 0 ) then
94
- status = true
95
- redis.call('incr', KEYS[1])
96
- end
97
-
98
- return status
99
- """
100
- status = self.execute_lua(lua_script, [name], t, limit)
101
- return bool(status)
102
-
103
- def members(self, key, score, start=0, count=5000, _min="-inf", _max="+inf") -> list:
163
+ def members(self, key, score, start=0, count=1000, _min="-inf", _max="+inf") -> list:
104
164
  lua_script = """
105
165
  local min = ARGV[1]
106
166
  local max = ARGV[2]
@@ -114,7 +174,7 @@ class RedisDB:
114
174
  else
115
175
  members = redis.call('zrangebyscore', KEYS[1], min, max, 'WITHSCORES', 'limit', start, count)
116
176
  end
117
-
177
+
118
178
  local result = {}
119
179
 
120
180
  for i = 1, #members, 2 do
@@ -126,7 +186,7 @@ class RedisDB:
126
186
  else
127
187
  originPriority = math.floor(members[i+1])
128
188
  end
129
-
189
+
130
190
  if ( score + 0 >= 1000 ) then
131
191
  priority = -score - originPriority / 1000
132
192
  elseif ( score + 0 == 0 ) then
@@ -153,6 +213,3 @@ class RedisDB:
153
213
  end
154
214
  """
155
215
  self.execute_lua(lua_script, keys, *args)
156
-
157
-
158
-
@@ -1,3 +1,9 @@
1
1
  # from .launcher_air import LauncherAir
2
- from .launcher_pro import LauncherPro
3
- from .launcher_api import LauncherApi
2
+ # from .launcher_pro import LauncherPro
3
+ # from .launcher_api import LauncherApi
4
+
5
+
6
+ from .launcher import Launcher
7
+ from .uploader import Uploader
8
+ from .distributor import Distributor
9
+