cobweb-launcher 1.2.49__py3-none-any.whl → 1.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. cobweb/base/__init__.py +141 -4
  2. cobweb/base/basic.py +28 -82
  3. cobweb/base/common_queue.py +13 -0
  4. cobweb/base/dotting.py +1 -1
  5. cobweb/base/request.py +14 -2
  6. cobweb/base/seed.py +10 -6
  7. cobweb/constant.py +16 -0
  8. cobweb/crawlers/crawler.py +51 -181
  9. cobweb/db/redis_db.py +28 -0
  10. cobweb/launchers/__init__.py +2 -2
  11. cobweb/launchers/launcher.py +110 -141
  12. cobweb/launchers/launcher_api.py +66 -114
  13. cobweb/launchers/launcher_pro.py +76 -194
  14. cobweb/pipelines/base_pipeline.py +54 -0
  15. cobweb/pipelines/loghub_pipeline.py +34 -0
  16. cobweb/pipelines/pipeline.py +25 -49
  17. cobweb/schedulers/__init__.py +0 -2
  18. cobweb/schedulers/scheduler_redis.py +5 -8
  19. cobweb/setting.py +29 -6
  20. cobweb/utils/dotting.py +10 -42
  21. cobweb_/__init__.py +2 -0
  22. cobweb_/base/__init__.py +9 -0
  23. cobweb_/base/common_queue.py +30 -0
  24. cobweb_/base/decorators.py +40 -0
  25. cobweb_/base/item.py +46 -0
  26. cobweb_/base/log.py +94 -0
  27. cobweb_/base/request.py +82 -0
  28. cobweb_/base/response.py +23 -0
  29. cobweb_/base/seed.py +114 -0
  30. cobweb_/constant.py +94 -0
  31. cobweb_/crawlers/__init__.py +1 -0
  32. cobweb_/crawlers/crawler.py +184 -0
  33. cobweb_/db/__init__.py +2 -0
  34. cobweb_/db/api_db.py +82 -0
  35. cobweb_/db/redis_db.py +130 -0
  36. cobweb_/exceptions/__init__.py +1 -0
  37. cobweb_/exceptions/oss_db_exception.py +28 -0
  38. cobweb_/launchers/__init__.py +3 -0
  39. cobweb_/launchers/launcher.py +235 -0
  40. cobweb_/launchers/launcher_air.py +88 -0
  41. cobweb_/launchers/launcher_api.py +221 -0
  42. cobweb_/launchers/launcher_pro.py +222 -0
  43. cobweb_/pipelines/__init__.py +3 -0
  44. cobweb_/pipelines/pipeline.py +69 -0
  45. cobweb_/pipelines/pipeline_console.py +22 -0
  46. cobweb_/pipelines/pipeline_loghub.py +34 -0
  47. cobweb_/setting.py +74 -0
  48. cobweb_/utils/__init__.py +5 -0
  49. cobweb_/utils/bloom.py +58 -0
  50. cobweb_/utils/dotting.py +32 -0
  51. cobweb_/utils/oss.py +94 -0
  52. cobweb_/utils/tools.py +42 -0
  53. {cobweb_launcher-1.2.49.dist-info → cobweb_launcher-1.3.2.dist-info}/METADATA +1 -1
  54. cobweb_launcher-1.3.2.dist-info/RECORD +110 -0
  55. cobweb_launcher-1.3.2.dist-info/top_level.txt +2 -0
  56. cobweb_new/__init__.py +2 -0
  57. cobweb_new/base/__init__.py +72 -0
  58. cobweb_new/base/common_queue.py +53 -0
  59. cobweb_new/base/decorators.py +72 -0
  60. cobweb_new/base/item.py +46 -0
  61. cobweb_new/base/log.py +94 -0
  62. cobweb_new/base/request.py +82 -0
  63. cobweb_new/base/response.py +23 -0
  64. cobweb_new/base/seed.py +118 -0
  65. cobweb_new/constant.py +105 -0
  66. cobweb_new/crawlers/__init__.py +1 -0
  67. cobweb_new/crawlers/crawler-new.py +85 -0
  68. cobweb_new/crawlers/crawler.py +170 -0
  69. cobweb_new/db/__init__.py +2 -0
  70. cobweb_new/db/api_db.py +82 -0
  71. cobweb_new/db/redis_db.py +158 -0
  72. cobweb_new/exceptions/__init__.py +1 -0
  73. cobweb_new/exceptions/oss_db_exception.py +28 -0
  74. cobweb_new/launchers/__init__.py +3 -0
  75. cobweb_new/launchers/launcher.py +237 -0
  76. cobweb_new/launchers/launcher_air.py +88 -0
  77. cobweb_new/launchers/launcher_api.py +161 -0
  78. cobweb_new/launchers/launcher_pro.py +96 -0
  79. cobweb_new/launchers/tesss.py +47 -0
  80. cobweb_new/pipelines/__init__.py +3 -0
  81. cobweb_new/pipelines/pipeline.py +68 -0
  82. cobweb_new/pipelines/pipeline_console.py +22 -0
  83. cobweb_new/pipelines/pipeline_loghub.py +34 -0
  84. cobweb_new/setting.py +95 -0
  85. cobweb_new/utils/__init__.py +5 -0
  86. cobweb_new/utils/bloom.py +58 -0
  87. cobweb_new/utils/oss.py +94 -0
  88. cobweb_new/utils/tools.py +42 -0
  89. cobweb/schedulers/scheduler_api.py +0 -72
  90. cobweb_launcher-1.2.49.dist-info/RECORD +0 -44
  91. cobweb_launcher-1.2.49.dist-info/top_level.txt +0 -1
  92. {cobweb_launcher-1.2.49.dist-info → cobweb_launcher-1.3.2.dist-info}/LICENSE +0 -0
  93. {cobweb_launcher-1.2.49.dist-info → cobweb_launcher-1.3.2.dist-info}/WHEEL +0 -0
cobweb_/base/seed.py ADDED
@@ -0,0 +1,114 @@
1
+ import json
2
+ import time
3
+ import hashlib
4
+
5
+
6
+ class SeedParams:
7
+
8
+ def __init__(self, retry, priority, seed_version, seed_status=None):
9
+ self.retry = retry or 0
10
+ self.priority = priority or 300
11
+ self.seed_version = seed_version or int(time.time())
12
+ self.seed_status = seed_status
13
+
14
+
15
+ class Seed:
16
+
17
+ __SEED_PARAMS__ = [
18
+ "retry",
19
+ "priority",
20
+ "seed_version",
21
+ "seed_status"
22
+ ]
23
+
24
+ def __init__(
25
+ self,
26
+ seed,
27
+ sid=None,
28
+ retry=None,
29
+ priority=None,
30
+ seed_version=None,
31
+ seed_status=None,
32
+ **kwargs
33
+ ):
34
+ if any(isinstance(seed, t) for t in (str, bytes)):
35
+ try:
36
+ item = json.loads(seed)
37
+ self._init_seed(item)
38
+ except json.JSONDecodeError:
39
+ self.__setattr__("url", seed)
40
+ elif isinstance(seed, dict):
41
+ self._init_seed(seed)
42
+ else:
43
+ raise TypeError(Exception(
44
+ f"seed type error, "
45
+ f"must be str or dict! "
46
+ f"seed: {seed}"
47
+ ))
48
+
49
+ seed_params = {
50
+ "retry": retry,
51
+ "priority": priority,
52
+ "seed_version": seed_version,
53
+ "seed_status": seed_status,
54
+ }
55
+
56
+ if kwargs:
57
+ self._init_seed(kwargs)
58
+ seed_params.update({
59
+ k:v for k, v in kwargs.items()
60
+ if k in self.__SEED_PARAMS__
61
+ })
62
+ if sid or not getattr(self, "sid", None):
63
+ self._init_id(sid)
64
+ self.params = SeedParams(**seed_params)
65
+
66
+ def __getattr__(self, name):
67
+ return None
68
+
69
+ def __setitem__(self, key, value):
70
+ setattr(self, key, value)
71
+
72
+ def __getitem__(self, item):
73
+ return getattr(self, item)
74
+
75
+ def __str__(self):
76
+ return json.dumps(self.__dict__, ensure_ascii=False)
77
+
78
+ def __repr__(self):
79
+ chars = [f"{k}={v}" for k, v in self.__dict__.items()]
80
+ return f'{self.__class__.__name__}({", ".join(chars)})'
81
+
82
+ def _init_seed(self, seed_info:dict):
83
+ for k, v in seed_info.items():
84
+ if k not in self.__SEED_PARAMS__:
85
+ self.__setattr__(k, v)
86
+
87
+ def _init_id(self, sid):
88
+ if not sid:
89
+ sid = hashlib.md5(self.to_string.encode()).hexdigest()
90
+ self.__setattr__("sid", sid)
91
+
92
+ @property
93
+ def to_dict(self) -> dict:
94
+ seed = self.__dict__.copy()
95
+ if seed.get("params"):
96
+ del seed["params"]
97
+ return seed
98
+
99
+ @property
100
+ def to_string(self) -> str:
101
+ return json.dumps(
102
+ self.to_dict,
103
+ ensure_ascii=False,
104
+ separators=(",", ":")
105
+ )
106
+
107
+ @property
108
+ def get_all(self):
109
+ return json.dumps(
110
+ self.__dict__,
111
+ ensure_ascii=False,
112
+ separators=(",", ":")
113
+ )
114
+
cobweb_/constant.py ADDED
@@ -0,0 +1,94 @@
1
+
2
+ class CrawlerModel:
3
+
4
+ default = "cobweb.crawlers.Crawler"
5
+ file_air = "cobweb.crawlers.FileCrawlerAir"
6
+ file_pro = "cobweb.crawlers.FileCrawlerPro"
7
+
8
+
9
+ class LauncherModel:
10
+ task = "launcher model: task"
11
+ resident = "launcher model: resident"
12
+
13
+
14
+ class DownloadModel:
15
+ common = "download model: common"
16
+ file = "download model: file"
17
+
18
+
19
+ class LogModel:
20
+ simple = "log model: simple"
21
+ common = "log model: common"
22
+ detailed = "log model: detailed"
23
+
24
+
25
+ class DealModel:
26
+ fail = "deal model: fail"
27
+ done = "deal model: done"
28
+ poll = "deal model: poll"
29
+
30
+
31
+ class LogTemplate:
32
+
33
+ console_item = """
34
+ ----------------------- start - console pipeline -----------------
35
+ 种子详情 \n{seed_detail}
36
+ 解析详情 \n{parse_detail}
37
+ ----------------------- end - console pipeline ------------------
38
+ """
39
+
40
+ launcher_air_polling = """
41
+ ----------------------- start - 轮训日志: {task} -----------------
42
+ 内存队列
43
+ 种子数: {doing_len}
44
+ 待消费: {todo_len}
45
+ 已消费: {done_len}
46
+ 存储队列
47
+ 待上传: {upload_len}
48
+ ----------------------- end - 轮训日志: {task} ------------------
49
+ """
50
+
51
+ launcher_pro_polling = """
52
+ ----------------------- start - 轮训日志: {task} -----------------
53
+ 内存队列
54
+ 种子数: {doing_len}
55
+ 待消费: {todo_len}
56
+ 已消费: {done_len}
57
+ redis队列
58
+ 种子数: {redis_seed_count}
59
+ 待消费: {redis_todo_len}
60
+ 消费中: {redis_doing_len}
61
+ 存储队列
62
+ 待上传: {upload_len}
63
+ ----------------------- end - 轮训日志: {task} ------------------
64
+ """
65
+
66
+ download_exception = """
67
+ ----------------------- download exception -----------------------
68
+ 种子详情 \n{detail}
69
+ 种子参数
70
+ retry : {retry}
71
+ priority : {priority}
72
+ seed_version : {seed_version}
73
+ identifier : {identifier}
74
+ exception
75
+ msg : {exception}
76
+ ------------------------------------------------------------------
77
+ """
78
+
79
+ download_info = """
80
+ ------------------------ download info ---------------------------
81
+ 种子详情 \n{detail}
82
+ 种子参数
83
+ retry : {retry}
84
+ priority : {priority}
85
+ seed_version : {seed_version}
86
+ identifier : {identifier}
87
+ response
88
+ status : {status} \n{response}
89
+ ------------------------------------------------------------------
90
+ """
91
+
92
+ @staticmethod
93
+ def log_info(item: dict) -> str:
94
+ return "\n".join([" " * 12 + f"{str(k).ljust(14)}: {str(v)}" for k, v in item.items()])
@@ -0,0 +1 @@
1
+ from .crawler import Crawler
@@ -0,0 +1,184 @@
1
+ import json
2
+ import threading
3
+ import time
4
+ import traceback
5
+ from inspect import isgenerator
6
+ from typing import Union, Callable, Mapping
7
+
8
+ from cobweb.constant import DealModel, LogTemplate
9
+ from cobweb.base import (
10
+ Queue,
11
+ Seed,
12
+ BaseItem,
13
+ Request,
14
+ Response,
15
+ ConsoleItem,
16
+ logger
17
+ )
18
+ from cobweb.utils import LoghubDot
19
+
20
+
21
+ class Crawler(threading.Thread):
22
+
23
+ def __init__(
24
+ self,
25
+ task: str,
26
+ project: str,
27
+ stop: threading.Event,
28
+ pause: threading.Event,
29
+ # launcher_queue: Union[Mapping[str, Queue]],
30
+ get_seed: Callable,
31
+ set_seed: Callable,
32
+ add_seed: Callable,
33
+ delete_seed: Callable,
34
+ upload_data: Callable,
35
+ custom_func: Union[Mapping[str, Callable]],
36
+ thread_num: int,
37
+ max_retries: int,
38
+ time_sleep: int,
39
+ ):
40
+ super().__init__()
41
+ self.task = task
42
+ self.project = project
43
+ self._stop = stop
44
+ self._pause = pause
45
+ self._get_seed = get_seed
46
+ self._set_seed = set_seed
47
+ self._add_seed = add_seed
48
+ self._delete_seed = delete_seed
49
+ self._upload_data = upload_data
50
+
51
+ for func_name, _callable in custom_func.items():
52
+ if isinstance(_callable, Callable):
53
+ self.__setattr__(func_name, _callable)
54
+
55
+ self.thread_num = thread_num
56
+ self.time_sleep = time_sleep
57
+ self.max_retries = max_retries
58
+
59
+ self.loghub_dot = LoghubDot()
60
+
61
+ @staticmethod
62
+ def request(seed: Seed) -> Union[Request, BaseItem]:
63
+ yield Request(seed.url, seed, timeout=5)
64
+
65
+ @staticmethod
66
+ def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
67
+ response = item.download()
68
+ yield Response(item.seed, response, **item.to_dict)
69
+
70
+ @staticmethod
71
+ def parse(item: Response) -> BaseItem:
72
+ upload_item = item.to_dict
73
+ upload_item["text"] = item.response.text
74
+ yield ConsoleItem(item.seed, data=json.dumps(upload_item, ensure_ascii=False))
75
+
76
+ # def get_seed(self) -> Seed:
77
+ # return self._todo.pop()
78
+
79
+ def distribute(self, item, seed):
80
+ if isinstance(item, BaseItem):
81
+ self._upload_data(item)
82
+ elif isinstance(item, Seed):
83
+ self._add_seed(item)
84
+ elif isinstance(item, str) and item == DealModel.poll:
85
+ self._set_seed(seed)
86
+ elif isinstance(item, str) and item == DealModel.done:
87
+ self._delete_seed(seed)
88
+ elif isinstance(item, str) and item == DealModel.fail:
89
+ seed.params.seed_status = DealModel.fail
90
+ self._delete_seed(seed)
91
+ else:
92
+ raise TypeError("yield value type error!")
93
+
94
+ def spider(self):
95
+ while not self._stop.is_set():
96
+
97
+ seed = self._get_seed()
98
+
99
+ if not seed:
100
+ time.sleep(1)
101
+ continue
102
+
103
+ elif seed.params.retry > self.max_retries:
104
+ seed.params.seed_status = DealModel.fail
105
+ self._delete_seed(seed)
106
+ continue
107
+
108
+ seed_detail_log_info = LogTemplate.log_info(seed.to_dict)
109
+
110
+ try:
111
+ request_iterators = self.request(seed)
112
+
113
+ if not isgenerator(request_iterators):
114
+ raise TypeError("request function isn't a generator!")
115
+
116
+ iterator_status = False
117
+
118
+ for request_item in request_iterators:
119
+
120
+ iterator_status = True
121
+
122
+ if isinstance(request_item, Request):
123
+ iterator_status = False
124
+ start_time = time.time()
125
+ download_iterators = self.download(request_item)
126
+ if not isgenerator(download_iterators):
127
+ raise TypeError("download function isn't a generator")
128
+
129
+ for download_item in download_iterators:
130
+ iterator_status = True
131
+ if isinstance(download_item, Response):
132
+ iterator_status = False
133
+ logger.info(LogTemplate.download_info.format(
134
+ detail=seed_detail_log_info,
135
+ retry=seed.params.retry,
136
+ priority=seed.params.priority,
137
+ seed_version=seed.params.seed_version,
138
+ identifier=seed.identifier or "",
139
+ status=download_item.response,
140
+ response=LogTemplate.log_info(download_item.to_dict)
141
+ ))
142
+ if isinstance(download_item, Response):
143
+ end_time = time.time()
144
+ self.loghub_dot.build(
145
+ topic=f"{self.project}:{self.task}",
146
+ cost_time=end_time - start_time,
147
+ **download_item.to_dict
148
+ )
149
+ parse_iterators = self.parse(download_item)
150
+ if not isgenerator(parse_iterators):
151
+ raise TypeError("parse function isn't a generator")
152
+ for parse_item in parse_iterators:
153
+ iterator_status = True
154
+ if isinstance(parse_item, Response):
155
+ raise TypeError("upload_item can't be a Response instance")
156
+ self.distribute(parse_item, seed)
157
+ else:
158
+ self.distribute(download_item, seed)
159
+ else:
160
+ self.distribute(request_item, seed)
161
+
162
+ if not iterator_status:
163
+ raise ValueError("request/download/parse function yield value error!")
164
+ except Exception as e:
165
+ logger.info(LogTemplate.download_exception.format(
166
+ detail=seed_detail_log_info,
167
+ retry=seed.params.retry,
168
+ priority=seed.params.priority,
169
+ seed_version=seed.params.seed_version,
170
+ identifier=seed.identifier or "",
171
+ exception=''.join(traceback.format_exception(type(e), e, e.__traceback__))
172
+ ))
173
+ seed.params.retry += 1
174
+ # self._todo.push(seed)
175
+ self._set_seed(seed)
176
+ time.sleep(self.time_sleep * seed.params.retry)
177
+ finally:
178
+ time.sleep(0.1)
179
+ logger.info("spider thread close")
180
+
181
+ def run(self):
182
+ for index in range(self.thread_num):
183
+ threading.Thread(name=f"spider_{index}", target=self.spider).start()
184
+
cobweb_/db/__init__.py ADDED
@@ -0,0 +1,2 @@
1
+ from .redis_db import RedisDB
2
+ from .api_db import ApiDB
cobweb_/db/api_db.py ADDED
@@ -0,0 +1,82 @@
1
+ import os
2
+ import json
3
+ import requests
4
+
5
+
6
+ class ApiDB:
7
+
8
+ def __init__(self, host=None, **kwargs):
9
+ self.host = host or os.getenv("REDIS_API_HOST", "http://127.0.0.1:4396")
10
+
11
+ def _get_response(self, api, params: dict = None):
12
+ try:
13
+ url = self.host + api
14
+ response = requests.get(url, params=params)
15
+ json_data = response.json()
16
+ response.close()
17
+ return json_data["data"]
18
+ except:
19
+ return None
20
+
21
+ def _post_response(self, api, params: dict = None, data: dict = None):
22
+ try:
23
+ url = self.host + api
24
+ headers = {"Content-Type": "application/json"}
25
+ response = requests.post(url, headers=headers, params=params, data=json.dumps(data))
26
+ json_data = response.json()
27
+ response.close()
28
+ return json_data["data"]
29
+ except:
30
+ return None
31
+
32
+ def get(self, name):
33
+ return self._get_response(api="/get", params=dict(name=name))
34
+
35
+ def setnx(self, name, value=""):
36
+ return self._get_response(api="/setnx", params=dict(name=name, value=value))
37
+
38
+ def setex(self, name, t, value=""):
39
+ return self._get_response(api="/setex", params=dict(name=name, value=value, t=t))
40
+
41
+ def expire(self, name, t, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False):
42
+ return self._get_response(api="/expire", params=dict(name=name, t=t, nx=nx, xx=xx, gt=gt, lt=lt))
43
+
44
+ def ttl(self, name):
45
+ return self._get_response(api="/ttl", params=dict(name=name))
46
+
47
+ def delete(self, name):
48
+ return self._get_response(api="/delete", params=dict(name=name))
49
+
50
+ def exists(self, name):
51
+ return self._get_response(api="/exists", params=dict(name=name))
52
+
53
+ def incrby(self, name, value):
54
+ return self._get_response(api="/incrby", params=dict(name=name, value=value))
55
+
56
+ def zcard(self, name) -> bool:
57
+ return self._get_response(api="/zcard", params=dict(name=name))
58
+
59
+ def zadd(self, name, item: dict, **kwargs):
60
+ return self._post_response(api="/zadd", data=dict(name=name, mapping=item, **kwargs))
61
+
62
+ def zrem(self, name, *values):
63
+ return self._post_response(api="/zrem", data=dict(name=name, values=values))
64
+
65
+ def zcount(self, name, _min, _max):
66
+ return self._get_response(api="/zcount", params=dict(name=name, min=_min, max=_max))
67
+
68
+ def lock(self, name, t=15) -> bool:
69
+ return self._get_response(api="/lock", params=dict(name=name, t=t))
70
+
71
+ def auto_incr(self, name, t=15, limit=1000) -> bool:
72
+ return self._get_response(api="/auto_incr", params=dict(name=name, t=t, limit=limit))
73
+
74
+ def members(self, name, score, start=0, count=5000, _min="-inf", _max="+inf"):
75
+ return self._get_response(api="/members", params=dict(name=name, score=score, start=start, count=count, min=_min, max=_max))
76
+
77
+ def done(self, name: list, *values):
78
+ return self._post_response(api="/done", data=dict(name=name, values=values))
79
+
80
+
81
+
82
+
cobweb_/db/redis_db.py ADDED
@@ -0,0 +1,130 @@
1
+ import redis
2
+ from cobweb import setting
3
+
4
+
5
+ class RedisDB:
6
+
7
+ def __init__(self, **kwargs):
8
+ redis_config = kwargs or setting.REDIS_CONFIG
9
+ pool = redis.ConnectionPool(**redis_config)
10
+ self._client = redis.Redis(connection_pool=pool)
11
+
12
+ def setnx(self, name, value=""):
13
+ return self._client.setnx(name, value)
14
+
15
+ def setex(self, name, t, value=""):
16
+ return self._client.setex(name, t, value)
17
+
18
+ def expire(self, name, t, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False):
19
+ return self._client.expire(name, t, nx, xx, gt, lt)
20
+
21
+ def ttl(self, name):
22
+ return self._client.ttl(name)
23
+
24
+ def delete(self, name):
25
+ return self._client.delete(name)
26
+
27
+ def exists(self, *name) -> bool:
28
+ return self._client.exists(*name)
29
+
30
+ def sadd(self, name, value):
31
+ return self._client.sadd(name, value)
32
+
33
+ def zcard(self, name) -> bool:
34
+ return self._client.zcard(name)
35
+
36
+ def zadd(self, name, item: dict, **kwargs):
37
+ return self._client.zadd(name, item, **kwargs)
38
+
39
+ def zrem(self, name, *value):
40
+ return self._client.zrem(name, *value)
41
+
42
+ def zcount(self, name, _min, _max):
43
+ return self._client.zcount(name, _min, _max)
44
+
45
+ # def zrangebyscore(self, name, _min, _max, start, num, withscores: bool = False, *args):
46
+ # return self._client.zrangebyscore(name, _min, _max, start, num, withscores, *args)
47
+
48
+ def lua(self, script: str, keys: list = None, args: list = None):
49
+ keys = keys or []
50
+ args = args or []
51
+ keys_count = len(keys)
52
+ return self._client.eval(script, keys_count, *keys, *args)
53
+
54
+ def lua_sha(self, sha1: str, keys: list = None, args: list = None):
55
+ keys = keys or []
56
+ args = args or []
57
+ keys_count = len(keys)
58
+ return self._client.evalsha(sha1, keys_count, *keys, *args)
59
+
60
+ def execute_lua(self, lua_script: str, keys: list, *args):
61
+ execute = self._client.register_script(lua_script)
62
+ return execute(keys=keys, args=args)
63
+
64
+ def lock(self, key, t=15) -> bool:
65
+ lua_script = """
66
+ local status = redis.call('setnx', KEYS[1], 1)
67
+ if ( status == 1 ) then
68
+ redis.call('expire', KEYS[1], ARGV[1])
69
+ end
70
+ return status
71
+ """
72
+ status = self.execute_lua(lua_script, [key], t)
73
+ return bool(status)
74
+
75
+ def members(self, key, score, start=0, count=5000, _min="-inf", _max="+inf") -> list:
76
+ lua_script = """
77
+ local min = ARGV[1]
78
+ local max = ARGV[2]
79
+ local start = ARGV[3]
80
+ local count = ARGV[4]
81
+ local score = ARGV[5]
82
+ local members = nil
83
+
84
+ if ( type(count) == string ) then
85
+ members = redis.call('zrangebyscore', KEYS[1], min, max, 'WITHSCORES')
86
+ else
87
+ members = redis.call('zrangebyscore', KEYS[1], min, max, 'WITHSCORES', 'limit', start, count)
88
+ end
89
+
90
+ local result = {}
91
+
92
+ for i = 1, #members, 2 do
93
+ local priority = nil
94
+ local member = members[i]
95
+ local originPriority = nil
96
+ if ( members[i+1] + 0 < 0 ) then
97
+ originPriority = math.ceil(members[i+1]) * 1000 - members[i+1] * 1000
98
+ else
99
+ originPriority = math.floor(members[i+1])
100
+ end
101
+
102
+ if ( score + 0 >= 1000 ) then
103
+ priority = -score - originPriority / 1000
104
+ elseif ( score + 0 == 0 ) then
105
+ priority = originPriority
106
+ else
107
+ originPriority = score
108
+ priority = score
109
+ end
110
+ redis.call('zadd', KEYS[1], priority, member)
111
+ table.insert(result, member)
112
+ table.insert(result, originPriority)
113
+ end
114
+
115
+ return result
116
+ """
117
+ members = self.execute_lua(lua_script, [key], _min, _max, start, count, score)
118
+ return [(members[i].decode(), int(members[i + 1])) for i in range(0, len(members), 2)]
119
+
120
+ def done(self, keys: list, *args) -> list:
121
+ lua_script = """
122
+ for i, member in ipairs(ARGV) do
123
+ redis.call("zrem", KEYS[1], member)
124
+ redis.call("sadd", KEYS[2], member)
125
+ end
126
+ """
127
+ self.execute_lua(lua_script, keys, *args)
128
+
129
+
130
+
@@ -0,0 +1 @@
1
+ from .oss_db_exception import *
@@ -0,0 +1,28 @@
1
+ class OssDBException(Exception):
2
+ """Base oss client exception that all others inherit."""
3
+
4
+
5
+ class OssDBMergeError(OssDBException):
6
+ """
7
+ Exception raised when execute merge operation fails.
8
+ """
9
+
10
+
11
+ class OssDBPutPartError(OssDBException):
12
+ """
13
+ Exception raised when upload part operation fails.
14
+ """
15
+
16
+
17
+ class OssDBPutObjError(OssDBException):
18
+ """
19
+ Exception raised when upload operation fails.
20
+ """
21
+
22
+
23
+ class OssDBAppendObjError(OssDBException):
24
+ """Exception raised when upload operation fails."""
25
+
26
+
27
+ class OssDBInitPartError(OssDBException):
28
+ """Exception raised when init upload operation fails."""
@@ -0,0 +1,3 @@
1
+ from .launcher_air import LauncherAir
2
+ from .launcher_pro import LauncherPro
3
+ from .launcher_api import LauncherApi