cobweb-launcher 1.2.49__py3-none-any.whl → 1.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. cobweb/base/__init__.py +141 -4
  2. cobweb/base/basic.py +28 -82
  3. cobweb/base/common_queue.py +13 -0
  4. cobweb/base/dotting.py +1 -1
  5. cobweb/base/request.py +14 -2
  6. cobweb/base/seed.py +10 -6
  7. cobweb/constant.py +16 -0
  8. cobweb/crawlers/crawler.py +51 -181
  9. cobweb/db/redis_db.py +28 -0
  10. cobweb/launchers/__init__.py +2 -2
  11. cobweb/launchers/launcher.py +110 -141
  12. cobweb/launchers/launcher_api.py +66 -114
  13. cobweb/launchers/launcher_pro.py +76 -194
  14. cobweb/pipelines/base_pipeline.py +54 -0
  15. cobweb/pipelines/loghub_pipeline.py +34 -0
  16. cobweb/pipelines/pipeline.py +25 -49
  17. cobweb/setting.py +29 -6
  18. cobweb/utils/dotting.py +10 -42
  19. cobweb_/__init__.py +2 -0
  20. cobweb_/base/__init__.py +9 -0
  21. cobweb_/base/common_queue.py +30 -0
  22. cobweb_/base/decorators.py +40 -0
  23. cobweb_/base/item.py +46 -0
  24. cobweb_/base/log.py +94 -0
  25. cobweb_/base/request.py +82 -0
  26. cobweb_/base/response.py +23 -0
  27. cobweb_/base/seed.py +114 -0
  28. cobweb_/constant.py +94 -0
  29. cobweb_/crawlers/__init__.py +1 -0
  30. cobweb_/crawlers/crawler.py +184 -0
  31. cobweb_/db/__init__.py +2 -0
  32. cobweb_/db/api_db.py +82 -0
  33. cobweb_/db/redis_db.py +130 -0
  34. cobweb_/exceptions/__init__.py +1 -0
  35. cobweb_/exceptions/oss_db_exception.py +28 -0
  36. cobweb_/launchers/__init__.py +3 -0
  37. cobweb_/launchers/launcher.py +235 -0
  38. cobweb_/launchers/launcher_air.py +88 -0
  39. cobweb_/launchers/launcher_api.py +221 -0
  40. cobweb_/launchers/launcher_pro.py +222 -0
  41. cobweb_/pipelines/__init__.py +3 -0
  42. cobweb_/pipelines/pipeline.py +69 -0
  43. cobweb_/pipelines/pipeline_console.py +22 -0
  44. cobweb_/pipelines/pipeline_loghub.py +34 -0
  45. cobweb_/setting.py +74 -0
  46. cobweb_/utils/__init__.py +5 -0
  47. cobweb_/utils/bloom.py +58 -0
  48. cobweb_/utils/dotting.py +32 -0
  49. cobweb_/utils/oss.py +94 -0
  50. cobweb_/utils/tools.py +42 -0
  51. {cobweb_launcher-1.2.49.dist-info → cobweb_launcher-1.3.1.dist-info}/METADATA +1 -1
  52. cobweb_launcher-1.3.1.dist-info/RECORD +108 -0
  53. cobweb_launcher-1.3.1.dist-info/top_level.txt +2 -0
  54. cobweb_new/__init__.py +2 -0
  55. cobweb_new/base/__init__.py +72 -0
  56. cobweb_new/base/common_queue.py +53 -0
  57. cobweb_new/base/decorators.py +72 -0
  58. cobweb_new/base/item.py +46 -0
  59. cobweb_new/base/log.py +94 -0
  60. cobweb_new/base/request.py +82 -0
  61. cobweb_new/base/response.py +23 -0
  62. cobweb_new/base/seed.py +118 -0
  63. cobweb_new/constant.py +105 -0
  64. cobweb_new/crawlers/__init__.py +1 -0
  65. cobweb_new/crawlers/crawler-new.py +85 -0
  66. cobweb_new/crawlers/crawler.py +170 -0
  67. cobweb_new/db/__init__.py +2 -0
  68. cobweb_new/db/api_db.py +82 -0
  69. cobweb_new/db/redis_db.py +158 -0
  70. cobweb_new/exceptions/__init__.py +1 -0
  71. cobweb_new/exceptions/oss_db_exception.py +28 -0
  72. cobweb_new/launchers/__init__.py +3 -0
  73. cobweb_new/launchers/launcher.py +237 -0
  74. cobweb_new/launchers/launcher_air.py +88 -0
  75. cobweb_new/launchers/launcher_api.py +161 -0
  76. cobweb_new/launchers/launcher_pro.py +96 -0
  77. cobweb_new/launchers/tesss.py +47 -0
  78. cobweb_new/pipelines/__init__.py +3 -0
  79. cobweb_new/pipelines/pipeline.py +68 -0
  80. cobweb_new/pipelines/pipeline_console.py +22 -0
  81. cobweb_new/pipelines/pipeline_loghub.py +34 -0
  82. cobweb_new/setting.py +95 -0
  83. cobweb_new/utils/__init__.py +5 -0
  84. cobweb_new/utils/bloom.py +58 -0
  85. cobweb_new/utils/oss.py +94 -0
  86. cobweb_new/utils/tools.py +42 -0
  87. cobweb/schedulers/__init__.py +0 -3
  88. cobweb/schedulers/scheduler_api.py +0 -72
  89. cobweb/schedulers/scheduler_redis.py +0 -72
  90. cobweb_launcher-1.2.49.dist-info/RECORD +0 -44
  91. cobweb_launcher-1.2.49.dist-info/top_level.txt +0 -1
  92. {cobweb_launcher-1.2.49.dist-info → cobweb_launcher-1.3.1.dist-info}/LICENSE +0 -0
  93. {cobweb_launcher-1.2.49.dist-info → cobweb_launcher-1.3.1.dist-info}/WHEEL +0 -0
cobweb_new/constant.py ADDED
@@ -0,0 +1,105 @@
1
+
2
+ class CrawlerModel:
3
+
4
+ default = "cobweb.crawlers.Crawler"
5
+ file_air = "cobweb.crawlers.FileCrawlerAir"
6
+ file_pro = "cobweb.crawlers.FileCrawlerPro"
7
+
8
+
9
+ class LauncherModel:
10
+ task = "launcher model: task"
11
+ resident = "launcher model: resident"
12
+
13
+
14
+ class DownloadModel:
15
+ common = "download model: common"
16
+ file = "download model: file"
17
+
18
+
19
+ class LogModel:
20
+ simple = "log model: simple"
21
+ common = "log model: common"
22
+ detailed = "log model: detailed"
23
+
24
+
25
+ class DealModel:
26
+ fail = "deal model: fail"
27
+ done = "deal model: done"
28
+ poll = "deal model: poll"
29
+
30
+
31
+ class LogTemplate:
32
+
33
+ console_item = """
34
+ ----------------------- start - console pipeline -----------------
35
+ 种子详情 \n{seed_detail}
36
+ 解析详情 \n{parse_detail}
37
+ ----------------------- end - console pipeline ------------------
38
+ """
39
+
40
+ launcher_polling = """
41
+ ----------------------- start - 轮训日志: {task} -----------------
42
+ 内存队列
43
+ 种子数: {doing_len}
44
+ 待消费: {todo_len}
45
+ 已消费: {done_len}
46
+ 存储队列
47
+ 待上传: {upload_len}
48
+ ----------------------- end - 轮训日志: {task} ------------------
49
+ """
50
+
51
+ launcher_air_polling = """
52
+ ----------------------- start - 轮训日志: {task} -----------------
53
+ 内存队列
54
+ 种子数: {doing_len}
55
+ 待消费: {todo_len}
56
+ 已消费: {done_len}
57
+ 存储队列
58
+ 待上传: {upload_len}
59
+ ----------------------- end - 轮训日志: {task} ------------------
60
+ """
61
+
62
+ launcher_pro_polling = """
63
+ ----------------------- start - 轮训日志: {task} -----------------
64
+ 内存队列
65
+ 种子数: {doing_len}
66
+ 待消费: {todo_len}
67
+ 已消费: {done_len}
68
+ redis队列
69
+ 种子数: {redis_seed_count}
70
+ 待消费: {redis_todo_len}
71
+ 消费中: {redis_doing_len}
72
+ 存储队列
73
+ 待上传: {upload_len}
74
+ ----------------------- end - 轮训日志: {task} ------------------
75
+ """
76
+
77
+ download_exception = """
78
+ ----------------------- download exception -----------------------
79
+ 种子详情 \n{detail}
80
+ 种子参数
81
+ retry : {retry}
82
+ priority : {priority}
83
+ seed_version : {seed_version}
84
+ identifier : {identifier}
85
+ exception
86
+ msg : {exception}
87
+ ------------------------------------------------------------------
88
+ """
89
+
90
+ download_info = """
91
+ ------------------------ download info ---------------------------
92
+ 种子详情 \n{detail}
93
+ 种子参数
94
+ retry : {retry}
95
+ priority : {priority}
96
+ seed_version : {seed_version}
97
+ identifier : {identifier}
98
+ response
99
+ status : {status} \n{response}
100
+ ------------------------------------------------------------------
101
+ """
102
+
103
+ @staticmethod
104
+ def log_info(item: dict) -> str:
105
+ return "\n".join([" " * 12 + f"{str(k).ljust(14)}: {str(v)}" for k, v in item.items()])
@@ -0,0 +1 @@
1
+ from .crawler import Crawler
@@ -0,0 +1,85 @@
1
+ import json
2
+ import time
3
+ import threading
4
+ from typing import Union, Callable, Mapping
5
+
6
+ import setting
7
+ from cobweb.base import (
8
+ Seed,
9
+ BaseItem,
10
+ Request,
11
+ Response,
12
+ ConsoleItem,
13
+ decorators,
14
+ TaskQueue,
15
+ logger
16
+ )
17
+ from constant import DealModel
18
+
19
+
20
+ class Crawler(threading.Thread):
21
+
22
+ def __init__(self, custom_func: Union[Mapping[str, Callable]]):
23
+ super().__init__()
24
+
25
+ for func_name, _callable in custom_func.items():
26
+ if isinstance(_callable, Callable):
27
+ self.__setattr__(func_name, _callable)
28
+
29
+ self.spider_max_retries = setting.SPIDER_MAX_RETRIES
30
+ self.request_queue_size = setting.REQUEST_QUEUE_SIZE
31
+ self.download_queue_size = setting.DOWNLOAD_QUEUE_SIZE
32
+ self.upload_queue_size = setting.UPLOAD_QUEUE_SIZE
33
+
34
+ @staticmethod
35
+ def request(seed: Seed) -> Union[Request, BaseItem]:
36
+ yield Request(seed.url, seed, timeout=5)
37
+
38
+ @staticmethod
39
+ def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
40
+ response = item.download()
41
+ yield Response(item.seed, response, **item.to_dict)
42
+
43
+ @staticmethod
44
+ def parse(item: Response) -> BaseItem:
45
+ upload_item = item.to_dict
46
+ upload_item["text"] = item.response.text
47
+ yield ConsoleItem(item.seed, data=json.dumps(upload_item, ensure_ascii=False))
48
+
49
+ # @decorators.add_thread()
50
+ @decorators.pause
51
+ def build_request_item(self):
52
+ thread_sleep = 0.1
53
+ if TaskQueue.REQUEST.length >= self.request_queue_size:
54
+ thread_sleep = 5
55
+ elif seed := TaskQueue.TODO.pop():
56
+ if seed.params.retry > self.spider_max_retries:
57
+ seed.params.seed_status = DealModel.fail
58
+ else:
59
+ TaskQueue.process_task(seed, self.request)
60
+ TaskQueue.DELETE.push(seed)
61
+ time.sleep(thread_sleep)
62
+
63
+ # @decorators.add_thread(num=setting.SPIDER_THREAD_NUM)
64
+ @decorators.pause
65
+ def build_download_item(self):
66
+ thread_sleep = 0.1
67
+ if TaskQueue.DOWNLOAD.length >= self.download_queue_size:
68
+ logger.info(f"download queue is full, sleep {thread_sleep}s")
69
+ thread_sleep = 5
70
+ elif request_item := TaskQueue.REQUEST.pop():
71
+ TaskQueue.process_task(request_item, self.download)
72
+ time.sleep(thread_sleep)
73
+
74
+ # @decorators.add_thread()
75
+ @decorators.pause
76
+ def build_parse_item(self):
77
+ thread_sleep = 0.1
78
+ if TaskQueue.UPLOAD.length >= self.upload_queue_size:
79
+ logger.info(f"upload queue is full, sleep {thread_sleep}s")
80
+ thread_sleep = 5
81
+ if response_item := TaskQueue.RESPONSE.pop():
82
+ TaskQueue.process_task(response_item, self.parse)
83
+ time.sleep(thread_sleep)
84
+
85
+
@@ -0,0 +1,170 @@
1
+ import json
2
+ import threading
3
+ import time
4
+ import traceback
5
+ from inspect import isgenerator
6
+ from typing import Union, Callable, Mapping
7
+
8
+ from cobweb.constant import DealModel, LogTemplate
9
+ from cobweb.base import (
10
+ Queue,
11
+ Seed,
12
+ BaseItem,
13
+ Request,
14
+ Response,
15
+ ConsoleItem,
16
+ logger
17
+ )
18
+
19
+
20
+ class Crawler(threading.Thread):
21
+
22
+ def __init__(
23
+ self,
24
+ stop: threading.Event,
25
+ pause: threading.Event,
26
+ # launcher_queue: Union[Mapping[str, Queue]],
27
+ get_seed: Callable,
28
+ set_seed: Callable,
29
+ add_seed: Callable,
30
+ delete_seed: Callable,
31
+ upload_data: Callable,
32
+ custom_func: Union[Mapping[str, Callable]],
33
+ thread_num: int,
34
+ max_retries: int,
35
+ time_sleep: int,
36
+ ):
37
+ super().__init__()
38
+
39
+ self._stop = stop
40
+ self._pause = pause
41
+ self._get_seed = get_seed
42
+ self._set_seed = set_seed
43
+ self._add_seed = add_seed
44
+ self._delete_seed = delete_seed
45
+ self._upload_data = upload_data
46
+
47
+ for func_name, _callable in custom_func.items():
48
+ if isinstance(_callable, Callable):
49
+ self.__setattr__(func_name, _callable)
50
+
51
+ self.thread_num = thread_num
52
+ self.time_sleep = time_sleep
53
+ self.max_retries = max_retries
54
+
55
+ @staticmethod
56
+ def request(seed: Seed) -> Union[Request, BaseItem]:
57
+ yield Request(seed.url, seed, timeout=5)
58
+
59
+ @staticmethod
60
+ def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
61
+ response = item.download()
62
+ yield Response(item.seed, response, **item.to_dict)
63
+
64
+ @staticmethod
65
+ def parse(item: Response) -> BaseItem:
66
+ upload_item = item.to_dict
67
+ upload_item["text"] = item.response.text
68
+ yield ConsoleItem(item.seed, data=json.dumps(upload_item, ensure_ascii=False))
69
+
70
+ # def get_seed(self) -> Seed:
71
+ # return self._todo.pop()
72
+
73
+ def distribute(self, item, seed):
74
+ if isinstance(item, BaseItem):
75
+ self._upload_data(item)
76
+ elif isinstance(item, Seed):
77
+ self._add_seed(item)
78
+ elif isinstance(item, str) and item == DealModel.poll:
79
+ self._set_seed(seed)
80
+ elif isinstance(item, str) and item == DealModel.done:
81
+ self._delete_seed(seed)
82
+ elif isinstance(item, str) and item == DealModel.fail:
83
+ seed.params.seed_status = DealModel.fail
84
+ self._delete_seed(seed)
85
+ else:
86
+ raise TypeError("yield value type error!")
87
+
88
+ def spider(self):
89
+ while not self._stop.is_set():
90
+
91
+ seed = self._get_seed()
92
+
93
+ if not seed:
94
+ time.sleep(1)
95
+ continue
96
+
97
+ elif seed.params.retry > self.max_retries:
98
+ seed.params.seed_status = DealModel.fail
99
+ self._delete_seed(seed)
100
+ continue
101
+
102
+ seed_detail_log_info = LogTemplate.log_info(seed.to_dict)
103
+
104
+ try:
105
+ request_iterators = self.request(seed)
106
+
107
+ if not isgenerator(request_iterators):
108
+ raise TypeError("request function isn't a generator!")
109
+
110
+ iterator_status = False
111
+
112
+ for request_item in request_iterators:
113
+
114
+ iterator_status = True
115
+
116
+ if isinstance(request_item, Request):
117
+ iterator_status = False
118
+ download_iterators = self.download(request_item)
119
+ if not isgenerator(download_iterators):
120
+ raise TypeError("download function isn't a generator")
121
+
122
+ for download_item in download_iterators:
123
+ iterator_status = True
124
+ if isinstance(download_item, Response):
125
+ iterator_status = False
126
+ logger.info(LogTemplate.download_info.format(
127
+ detail=seed_detail_log_info,
128
+ retry=seed.params.retry,
129
+ priority=seed.params.priority,
130
+ seed_version=seed.params.seed_version,
131
+ identifier=seed.identifier or "",
132
+ status=download_item.response,
133
+ response=LogTemplate.log_info(download_item.to_dict)
134
+ ))
135
+ parse_iterators = self.parse(download_item)
136
+ if not isgenerator(parse_iterators):
137
+ raise TypeError("parse function isn't a generator")
138
+ for parse_item in parse_iterators:
139
+ iterator_status = True
140
+ if isinstance(parse_item, Response):
141
+ raise TypeError("upload_item can't be a Response instance")
142
+ self.distribute(parse_item, seed)
143
+ else:
144
+ self.distribute(download_item, seed)
145
+ else:
146
+ self.distribute(request_item, seed)
147
+
148
+ if not iterator_status:
149
+ raise ValueError("request/download/parse function yield value error!")
150
+ except Exception as e:
151
+ logger.info(LogTemplate.download_exception.format(
152
+ detail=seed_detail_log_info,
153
+ retry=seed.params.retry,
154
+ priority=seed.params.priority,
155
+ seed_version=seed.params.seed_version,
156
+ identifier=seed.identifier or "",
157
+ exception=''.join(traceback.format_exception(type(e), e, e.__traceback__))
158
+ ))
159
+ seed.params.retry += 1
160
+ # self._todo.push(seed)
161
+ self._set_seed(seed)
162
+ time.sleep(self.time_sleep * seed.params.retry)
163
+ finally:
164
+ time.sleep(0.1)
165
+ logger.info("spider thread close")
166
+
167
+ def run(self):
168
+ for index in range(self.thread_num):
169
+ threading.Thread(name=f"spider_{index}", target=self.spider).start()
170
+
@@ -0,0 +1,2 @@
1
+ from .redis_db import RedisDB
2
+ from .api_db import ApiDB
@@ -0,0 +1,82 @@
1
+ import os
2
+ import json
3
+ import requests
4
+
5
+
6
+ class ApiDB:
7
+
8
+ def __init__(self, host=None, **kwargs):
9
+ self.host = host or os.getenv("REDIS_API_HOST", "http://127.0.0.1:4396")
10
+
11
+ def _get_response(self, api, params: dict = None):
12
+ try:
13
+ url = self.host + api
14
+ response = requests.get(url, params=params)
15
+ json_data = response.json()
16
+ response.close()
17
+ return json_data["data"]
18
+ except:
19
+ return None
20
+
21
+ def _post_response(self, api, params: dict = None, data: dict = None):
22
+ try:
23
+ url = self.host + api
24
+ headers = {"Content-Type": "application/json"}
25
+ response = requests.post(url, headers=headers, params=params, data=json.dumps(data))
26
+ json_data = response.json()
27
+ response.close()
28
+ return json_data["data"]
29
+ except:
30
+ return None
31
+
32
+ def get(self, name):
33
+ return self._get_response(api="/get", params=dict(name=name))
34
+
35
+ def setnx(self, name, value=""):
36
+ return self._get_response(api="/setnx", params=dict(name=name, value=value))
37
+
38
+ def setex(self, name, t, value=""):
39
+ return self._get_response(api="/setex", params=dict(name=name, value=value, t=t))
40
+
41
+ def expire(self, name, t, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False):
42
+ return self._get_response(api="/expire", params=dict(name=name, t=t, nx=nx, xx=xx, gt=gt, lt=lt))
43
+
44
+ def ttl(self, name):
45
+ return self._get_response(api="/ttl", params=dict(name=name))
46
+
47
+ def delete(self, name):
48
+ return self._get_response(api="/delete", params=dict(name=name))
49
+
50
+ def exists(self, name):
51
+ return self._get_response(api="/exists", params=dict(name=name))
52
+
53
+ def incrby(self, name, value):
54
+ return self._get_response(api="/incrby", params=dict(name=name, value=value))
55
+
56
+ def zcard(self, name) -> bool:
57
+ return self._get_response(api="/zcard", params=dict(name=name))
58
+
59
+ def zadd(self, name, item: dict, **kwargs):
60
+ return self._post_response(api="/zadd", data=dict(name=name, mapping=item, **kwargs))
61
+
62
+ def zrem(self, name, *values):
63
+ return self._post_response(api="/zrem", data=dict(name=name, values=values))
64
+
65
+ def zcount(self, name, _min, _max):
66
+ return self._get_response(api="/zcount", params=dict(name=name, min=_min, max=_max))
67
+
68
+ def lock(self, name, t=15) -> bool:
69
+ return self._get_response(api="/lock", params=dict(name=name, t=t))
70
+
71
+ def auto_incr(self, name, t=15, limit=1000) -> bool:
72
+ return self._get_response(api="/auto_incr", params=dict(name=name, t=t, limit=limit))
73
+
74
+ def members(self, name, score, start=0, count=5000, _min="-inf", _max="+inf"):
75
+ return self._get_response(api="/members", params=dict(name=name, score=score, start=start, count=count, min=_min, max=_max))
76
+
77
+ def done(self, name: list, *values):
78
+ return self._post_response(api="/done", data=dict(name=name, values=values))
79
+
80
+
81
+
82
+
@@ -0,0 +1,158 @@
1
+ import redis
2
+ from cobweb import setting
3
+
4
+
5
+ class RedisDB:
6
+
7
+ def __init__(self, **kwargs):
8
+ redis_config = kwargs or setting.REDIS_CONFIG
9
+ pool = redis.ConnectionPool(**redis_config)
10
+ self._client = redis.Redis(connection_pool=pool)
11
+
12
+ def setnx(self, name, value=""):
13
+ return self._client.setnx(name, value)
14
+
15
+ def setex(self, name, t, value=""):
16
+ return self._client.setex(name, t, value)
17
+
18
+ def expire(self, name, t, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False):
19
+ return self._client.expire(name, t, nx, xx, gt, lt)
20
+
21
+ def ttl(self, name):
22
+ return self._client.ttl(name)
23
+
24
+ def delete(self, name):
25
+ return self._client.delete(name)
26
+
27
+ def exists(self, *name) -> bool:
28
+ return self._client.exists(*name)
29
+
30
+ def incrby(self, name, value):
31
+ return self._client.incrby(name, value)
32
+
33
+ def sadd(self, name, value):
34
+ return self._client.sadd(name, value)
35
+
36
+ def zcard(self, name) -> bool:
37
+ return self._client.zcard(name)
38
+
39
+ def zadd(self, name, item: dict, **kwargs):
40
+ return self._client.zadd(name, item, **kwargs)
41
+
42
+ def zrem(self, name, *value):
43
+ return self._client.zrem(name, *value)
44
+
45
+ def zcount(self, name, _min, _max):
46
+ return self._client.zcount(name, _min, _max)
47
+
48
+ # def zrangebyscore(self, name, _min, _max, start, num, withscores: bool = False, *args):
49
+ # return self._client.zrangebyscore(name, _min, _max, start, num, withscores, *args)
50
+
51
+ def lua(self, script: str, keys: list = None, args: list = None):
52
+ keys = keys or []
53
+ args = args or []
54
+ keys_count = len(keys)
55
+ return self._client.eval(script, keys_count, *keys, *args)
56
+
57
+ def lua_sha(self, sha1: str, keys: list = None, args: list = None):
58
+ keys = keys or []
59
+ args = args or []
60
+ keys_count = len(keys)
61
+ return self._client.evalsha(sha1, keys_count, *keys, *args)
62
+
63
+ def execute_lua(self, lua_script: str, keys: list, *args):
64
+ execute = self._client.register_script(lua_script)
65
+ return execute(keys=keys, args=args)
66
+
67
+ def lock(self, key, t=15) -> bool:
68
+ lua_script = """
69
+ local status = redis.call('setnx', KEYS[1], 1)
70
+ if ( status == 1 ) then
71
+ redis.call('expire', KEYS[1], ARGV[1])
72
+ end
73
+ return status
74
+ """
75
+ status = self.execute_lua(lua_script, [key], t)
76
+ return bool(status)
77
+
78
+ def auto_incr(self, name, t=15, limit=1000):
79
+ lua_script = """
80
+ local count = 0
81
+ local status = false
82
+ local limit = ARGV[2]
83
+ local expire = redis.call('ttl', KEYS[1])
84
+
85
+ if ( expire == -2 ) then
86
+ redis.call('setnx', KEYS[1], 1)
87
+ elseif ( expire == -1) then
88
+ redis.call('expire', KEYS[1], ARGV[1])
89
+ else
90
+ count = redis.call('get', KEYS[1])
91
+ end
92
+
93
+ if ( count + 0 < limit + 0 ) then
94
+ status = true
95
+ redis.call('incr', KEYS[1])
96
+ end
97
+
98
+ return status
99
+ """
100
+ status = self.execute_lua(lua_script, [name], t, limit)
101
+ return bool(status)
102
+
103
+ def members(self, key, score, start=0, count=5000, _min="-inf", _max="+inf") -> list:
104
+ lua_script = """
105
+ local min = ARGV[1]
106
+ local max = ARGV[2]
107
+ local start = ARGV[3]
108
+ local count = ARGV[4]
109
+ local score = ARGV[5]
110
+ local members = nil
111
+
112
+ if ( type(count) == string ) then
113
+ members = redis.call('zrangebyscore', KEYS[1], min, max, 'WITHSCORES')
114
+ else
115
+ members = redis.call('zrangebyscore', KEYS[1], min, max, 'WITHSCORES', 'limit', start, count)
116
+ end
117
+
118
+ local result = {}
119
+
120
+ for i = 1, #members, 2 do
121
+ local priority = nil
122
+ local member = members[i]
123
+ local originPriority = nil
124
+ if ( members[i+1] + 0 < 0 ) then
125
+ originPriority = math.ceil(members[i+1]) * 1000 - members[i+1] * 1000
126
+ else
127
+ originPriority = math.floor(members[i+1])
128
+ end
129
+
130
+ if ( score + 0 >= 1000 ) then
131
+ priority = -score - originPriority / 1000
132
+ elseif ( score + 0 == 0 ) then
133
+ priority = originPriority
134
+ else
135
+ originPriority = score
136
+ priority = score
137
+ end
138
+ redis.call('zadd', KEYS[1], priority, member)
139
+ table.insert(result, member)
140
+ table.insert(result, originPriority)
141
+ end
142
+
143
+ return result
144
+ """
145
+ members = self.execute_lua(lua_script, [key], _min, _max, start, count, score)
146
+ return [(members[i].decode(), int(members[i + 1])) for i in range(0, len(members), 2)]
147
+
148
+ def done(self, keys: list, *args) -> list:
149
+ lua_script = """
150
+ for i, member in ipairs(ARGV) do
151
+ redis.call("zrem", KEYS[1], member)
152
+ redis.call("sadd", KEYS[2], member)
153
+ end
154
+ """
155
+ self.execute_lua(lua_script, keys, *args)
156
+
157
+
158
+
@@ -0,0 +1 @@
1
+ from .oss_db_exception import *
@@ -0,0 +1,28 @@
1
+ class OssDBException(Exception):
2
+ """Base oss client exception that all others inherit."""
3
+
4
+
5
+ class OssDBMergeError(OssDBException):
6
+ """
7
+ Exception raised when execute merge operation fails.
8
+ """
9
+
10
+
11
+ class OssDBPutPartError(OssDBException):
12
+ """
13
+ Exception raised when upload part operation fails.
14
+ """
15
+
16
+
17
+ class OssDBPutObjError(OssDBException):
18
+ """
19
+ Exception raised when upload operation fails.
20
+ """
21
+
22
+
23
+ class OssDBAppendObjError(OssDBException):
24
+ """Exception raised when upload operation fails."""
25
+
26
+
27
+ class OssDBInitPartError(OssDBException):
28
+ """Exception raised when init upload operation fails."""
@@ -0,0 +1,3 @@
1
+ from .launcher_air import LauncherAir
2
+ from .launcher_pro import LauncherPro
3
+ from .launcher_api import LauncherApi