cobweb-launcher 1.3.6__py3-none-any.whl → 1.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. {cobweb_launcher-1.3.6.dist-info → cobweb_launcher-1.3.7.dist-info}/METADATA +1 -1
  2. cobweb_launcher-1.3.7.dist-info/RECORD +40 -0
  3. cobweb/base/decorators.py +0 -40
  4. cobweb/crawlers/base_crawler.py +0 -144
  5. cobweb/crawlers/file_crawler.py +0 -98
  6. cobweb/pipelines/base_pipeline.py +0 -54
  7. cobweb/pipelines/loghub_pipeline.py +0 -34
  8. cobweb/utils/dotting.py +0 -32
  9. cobweb_/__init__.py +0 -2
  10. cobweb_/base/__init__.py +0 -9
  11. cobweb_/base/common_queue.py +0 -30
  12. cobweb_/base/decorators.py +0 -40
  13. cobweb_/base/item.py +0 -46
  14. cobweb_/base/log.py +0 -94
  15. cobweb_/base/request.py +0 -82
  16. cobweb_/base/response.py +0 -23
  17. cobweb_/base/seed.py +0 -114
  18. cobweb_/constant.py +0 -94
  19. cobweb_/crawlers/__init__.py +0 -1
  20. cobweb_/crawlers/crawler.py +0 -184
  21. cobweb_/db/__init__.py +0 -2
  22. cobweb_/db/api_db.py +0 -82
  23. cobweb_/db/redis_db.py +0 -130
  24. cobweb_/exceptions/__init__.py +0 -1
  25. cobweb_/exceptions/oss_db_exception.py +0 -28
  26. cobweb_/launchers/__init__.py +0 -3
  27. cobweb_/launchers/launcher.py +0 -235
  28. cobweb_/launchers/launcher_air.py +0 -88
  29. cobweb_/launchers/launcher_api.py +0 -221
  30. cobweb_/launchers/launcher_pro.py +0 -222
  31. cobweb_/pipelines/__init__.py +0 -3
  32. cobweb_/pipelines/pipeline.py +0 -69
  33. cobweb_/pipelines/pipeline_console.py +0 -22
  34. cobweb_/pipelines/pipeline_loghub.py +0 -34
  35. cobweb_/setting.py +0 -74
  36. cobweb_/utils/__init__.py +0 -5
  37. cobweb_/utils/bloom.py +0 -58
  38. cobweb_/utils/dotting.py +0 -32
  39. cobweb_/utils/oss.py +0 -94
  40. cobweb_/utils/tools.py +0 -42
  41. cobweb_launcher-1.3.6.dist-info/RECORD +0 -111
  42. cobweb_new/__init__.py +0 -2
  43. cobweb_new/base/__init__.py +0 -72
  44. cobweb_new/base/common_queue.py +0 -53
  45. cobweb_new/base/decorators.py +0 -72
  46. cobweb_new/base/item.py +0 -46
  47. cobweb_new/base/log.py +0 -94
  48. cobweb_new/base/request.py +0 -82
  49. cobweb_new/base/response.py +0 -23
  50. cobweb_new/base/seed.py +0 -118
  51. cobweb_new/constant.py +0 -105
  52. cobweb_new/crawlers/__init__.py +0 -1
  53. cobweb_new/crawlers/crawler-new.py +0 -85
  54. cobweb_new/crawlers/crawler.py +0 -170
  55. cobweb_new/db/__init__.py +0 -2
  56. cobweb_new/db/api_db.py +0 -82
  57. cobweb_new/db/redis_db.py +0 -158
  58. cobweb_new/exceptions/__init__.py +0 -1
  59. cobweb_new/exceptions/oss_db_exception.py +0 -28
  60. cobweb_new/launchers/__init__.py +0 -3
  61. cobweb_new/launchers/launcher.py +0 -237
  62. cobweb_new/launchers/launcher_air.py +0 -88
  63. cobweb_new/launchers/launcher_api.py +0 -161
  64. cobweb_new/launchers/launcher_pro.py +0 -96
  65. cobweb_new/launchers/tesss.py +0 -47
  66. cobweb_new/pipelines/__init__.py +0 -3
  67. cobweb_new/pipelines/pipeline.py +0 -68
  68. cobweb_new/pipelines/pipeline_console.py +0 -22
  69. cobweb_new/pipelines/pipeline_loghub.py +0 -34
  70. cobweb_new/setting.py +0 -95
  71. cobweb_new/utils/__init__.py +0 -5
  72. cobweb_new/utils/bloom.py +0 -58
  73. cobweb_new/utils/oss.py +0 -94
  74. cobweb_new/utils/tools.py +0 -42
  75. {cobweb_launcher-1.3.6.dist-info → cobweb_launcher-1.3.7.dist-info}/LICENSE +0 -0
  76. {cobweb_launcher-1.3.6.dist-info → cobweb_launcher-1.3.7.dist-info}/WHEEL +0 -0
  77. {cobweb_launcher-1.3.6.dist-info → cobweb_launcher-1.3.7.dist-info}/top_level.txt +0 -0
@@ -1,82 +0,0 @@
1
- import random
2
- import requests
3
-
4
-
5
- class Request:
6
-
7
- __REQUEST_ATTRS__ = {
8
- "params",
9
- "headers",
10
- "cookies",
11
- "data",
12
- "json",
13
- "files",
14
- "auth",
15
- "timeout",
16
- "proxies",
17
- "hooks",
18
- "stream",
19
- "verify",
20
- "cert",
21
- "allow_redirects",
22
- }
23
-
24
- def __init__(
25
- self,
26
- url,
27
- seed,
28
- random_ua=True,
29
- check_status_code=True,
30
- **kwargs
31
- ):
32
- self.url = url
33
- self.seed = seed
34
- self.check_status_code = check_status_code
35
- self.request_setting = {}
36
-
37
- for k, v in kwargs.items():
38
- if k in self.__class__.__REQUEST_ATTRS__:
39
- self.request_setting[k] = v
40
- continue
41
- self.__setattr__(k, v)
42
-
43
- if not getattr(self, "method", None):
44
- self.method = "POST" if self.request_setting.get("data") or self.request_setting.get("json") else "GET"
45
-
46
- if random_ua:
47
- self._build_header()
48
-
49
- @property
50
- def _random_ua(self) -> str:
51
- v1 = random.randint(4, 15)
52
- v2 = random.randint(3, 11)
53
- v3 = random.randint(1, 16)
54
- v4 = random.randint(533, 605)
55
- v5 = random.randint(1000, 6000)
56
- v6 = random.randint(10, 80)
57
- user_agent = (f"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_{v1}_{v2}) AppleWebKit/{v4}.{v3} "
58
- f"(KHTML, like Gecko) Chrome/105.0.0.0 Safari/{v4}.{v3} Edg/105.0.{v5}.{v6}")
59
- return user_agent
60
-
61
- def _build_header(self) -> dict:
62
- if not self.request_setting.get("headers"):
63
- self.request_setting["headers"] = {"accept": "*/*", "user-agent": self._random_ua}
64
- elif "user-agent" not in [key.lower() for key in self.request_setting["headers"].keys()]:
65
- self.request_setting["headers"]["user-agent"] = self._random_ua
66
-
67
- def download(self) -> requests.Response:
68
- response = requests.request(self.method, self.url, **self.request_setting)
69
- if self.check_status_code:
70
- response.raise_for_status()
71
- return response
72
-
73
- @property
74
- def to_dict(self):
75
- _dict = self.__dict__.copy()
76
- _dict.pop('url')
77
- _dict.pop('seed')
78
- _dict.pop('check_status_code')
79
- _dict.pop('request_setting')
80
- return _dict
81
-
82
-
@@ -1,23 +0,0 @@
1
-
2
-
3
- class Response:
4
-
5
- def __init__(
6
- self,
7
- seed,
8
- response,
9
- **kwargs
10
- ):
11
- self.seed = seed
12
- self.response = response
13
-
14
- for k, v in kwargs.items():
15
- self.__setattr__(k, v)
16
-
17
- @property
18
- def to_dict(self):
19
- _dict = self.__dict__.copy()
20
- _dict.pop('seed')
21
- _dict.pop('response')
22
- return _dict
23
-
cobweb_new/base/seed.py DELETED
@@ -1,118 +0,0 @@
1
- import json
2
- import time
3
- import hashlib
4
-
5
-
6
- class SeedParams:
7
-
8
- def __init__(self, retry, priority, seed_version, seed_status=None):
9
- self.retry = retry or 0
10
- self.priority = priority or 300
11
- self.seed_version = seed_version or int(time.time())
12
- self.seed_status = seed_status
13
-
14
-
15
- class Seed:
16
-
17
- __SEED_PARAMS__ = [
18
- "retry",
19
- "priority",
20
- "seed_version",
21
- "seed_status"
22
- ]
23
-
24
- def __init__(
25
- self,
26
- seed,
27
- sid=None,
28
- retry=None,
29
- priority=None,
30
- seed_version=None,
31
- seed_status=None,
32
- **kwargs
33
- ):
34
- if any(isinstance(seed, t) for t in (str, bytes)):
35
- try:
36
- item = json.loads(seed)
37
- self._init_seed(item)
38
- except json.JSONDecodeError:
39
- self.__setattr__("url", seed)
40
- elif isinstance(seed, dict):
41
- self._init_seed(seed)
42
- else:
43
- raise TypeError(Exception(
44
- f"seed type error, "
45
- f"must be str or dict! "
46
- f"seed: {seed}"
47
- ))
48
-
49
- seed_params = {
50
- "retry": retry,
51
- "priority": priority,
52
- "seed_version": seed_version,
53
- "seed_status": seed_status,
54
- }
55
-
56
- if kwargs:
57
- self._init_seed(kwargs)
58
- seed_params.update({
59
- k:v for k, v in kwargs.items()
60
- if k in self.__SEED_PARAMS__
61
- })
62
- if sid or not getattr(self, "sid", None):
63
- self._init_id(sid)
64
- self.params = SeedParams(**seed_params)
65
-
66
- def __getattr__(self, name):
67
- return None
68
-
69
- def __setitem__(self, key, value):
70
- setattr(self, key, value)
71
-
72
- def __getitem__(self, item):
73
- return getattr(self, item)
74
-
75
- def __str__(self):
76
- return json.dumps(self.__dict__, ensure_ascii=False)
77
-
78
- def __repr__(self):
79
- chars = [f"{k}={v}" for k, v in self.__dict__.items()]
80
- return f'{self.__class__.__name__}({", ".join(chars)})'
81
-
82
- def _init_seed(self, seed_info:dict):
83
- for k, v in seed_info.items():
84
- if k not in self.__SEED_PARAMS__:
85
- self.__setattr__(k, v)
86
-
87
- def _init_id(self, sid):
88
- if not sid:
89
- sid = hashlib.md5(self.to_string.encode()).hexdigest()
90
- self.__setattr__("sid", sid)
91
-
92
- @property
93
- def to_dict(self) -> dict:
94
- seed = self.__dict__.copy()
95
- if seed.get("params"):
96
- del seed["params"]
97
- return seed
98
-
99
- @property
100
- def to_string(self) -> str:
101
- return json.dumps(
102
- self.to_dict,
103
- ensure_ascii=False,
104
- separators=(",", ":")
105
- )
106
-
107
- # @property
108
- # def get_all(self):
109
- # return json.dumps(
110
- # self.__dict__,
111
- # ensure_ascii=False,
112
- # separators=(",", ":")
113
- # )
114
-
115
- @property
116
- def seed(self):
117
- return self
118
-
cobweb_new/constant.py DELETED
@@ -1,105 +0,0 @@
1
-
2
- class CrawlerModel:
3
-
4
- default = "cobweb.crawlers.Crawler"
5
- file_air = "cobweb.crawlers.FileCrawlerAir"
6
- file_pro = "cobweb.crawlers.FileCrawlerPro"
7
-
8
-
9
- class LauncherModel:
10
- task = "launcher model: task"
11
- resident = "launcher model: resident"
12
-
13
-
14
- class DownloadModel:
15
- common = "download model: common"
16
- file = "download model: file"
17
-
18
-
19
- class LogModel:
20
- simple = "log model: simple"
21
- common = "log model: common"
22
- detailed = "log model: detailed"
23
-
24
-
25
- class DealModel:
26
- fail = "deal model: fail"
27
- done = "deal model: done"
28
- poll = "deal model: poll"
29
-
30
-
31
- class LogTemplate:
32
-
33
- console_item = """
34
- ----------------------- start - console pipeline -----------------
35
- 种子详情 \n{seed_detail}
36
- 解析详情 \n{parse_detail}
37
- ----------------------- end - console pipeline ------------------
38
- """
39
-
40
- launcher_polling = """
41
- ----------------------- start - 轮训日志: {task} -----------------
42
- 内存队列
43
- 种子数: {doing_len}
44
- 待消费: {todo_len}
45
- 已消费: {done_len}
46
- 存储队列
47
- 待上传: {upload_len}
48
- ----------------------- end - 轮训日志: {task} ------------------
49
- """
50
-
51
- launcher_air_polling = """
52
- ----------------------- start - 轮训日志: {task} -----------------
53
- 内存队列
54
- 种子数: {doing_len}
55
- 待消费: {todo_len}
56
- 已消费: {done_len}
57
- 存储队列
58
- 待上传: {upload_len}
59
- ----------------------- end - 轮训日志: {task} ------------------
60
- """
61
-
62
- launcher_pro_polling = """
63
- ----------------------- start - 轮训日志: {task} -----------------
64
- 内存队列
65
- 种子数: {doing_len}
66
- 待消费: {todo_len}
67
- 已消费: {done_len}
68
- redis队列
69
- 种子数: {redis_seed_count}
70
- 待消费: {redis_todo_len}
71
- 消费中: {redis_doing_len}
72
- 存储队列
73
- 待上传: {upload_len}
74
- ----------------------- end - 轮训日志: {task} ------------------
75
- """
76
-
77
- download_exception = """
78
- ----------------------- download exception -----------------------
79
- 种子详情 \n{detail}
80
- 种子参数
81
- retry : {retry}
82
- priority : {priority}
83
- seed_version : {seed_version}
84
- identifier : {identifier}
85
- exception
86
- msg : {exception}
87
- ------------------------------------------------------------------
88
- """
89
-
90
- download_info = """
91
- ------------------------ download info ---------------------------
92
- 种子详情 \n{detail}
93
- 种子参数
94
- retry : {retry}
95
- priority : {priority}
96
- seed_version : {seed_version}
97
- identifier : {identifier}
98
- response
99
- status : {status} \n{response}
100
- ------------------------------------------------------------------
101
- """
102
-
103
- @staticmethod
104
- def log_info(item: dict) -> str:
105
- return "\n".join([" " * 12 + f"{str(k).ljust(14)}: {str(v)}" for k, v in item.items()])
@@ -1 +0,0 @@
1
- from .crawler import Crawler
@@ -1,85 +0,0 @@
1
- import json
2
- import time
3
- import threading
4
- from typing import Union, Callable, Mapping
5
-
6
- import setting
7
- from cobweb.base import (
8
- Seed,
9
- BaseItem,
10
- Request,
11
- Response,
12
- ConsoleItem,
13
- decorators,
14
- TaskQueue,
15
- logger
16
- )
17
- from constant import DealModel
18
-
19
-
20
- class Crawler(threading.Thread):
21
-
22
- def __init__(self, custom_func: Union[Mapping[str, Callable]]):
23
- super().__init__()
24
-
25
- for func_name, _callable in custom_func.items():
26
- if isinstance(_callable, Callable):
27
- self.__setattr__(func_name, _callable)
28
-
29
- self.spider_max_retries = setting.SPIDER_MAX_RETRIES
30
- self.request_queue_size = setting.REQUEST_QUEUE_SIZE
31
- self.download_queue_size = setting.DOWNLOAD_QUEUE_SIZE
32
- self.upload_queue_size = setting.UPLOAD_QUEUE_SIZE
33
-
34
- @staticmethod
35
- def request(seed: Seed) -> Union[Request, BaseItem]:
36
- yield Request(seed.url, seed, timeout=5)
37
-
38
- @staticmethod
39
- def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
40
- response = item.download()
41
- yield Response(item.seed, response, **item.to_dict)
42
-
43
- @staticmethod
44
- def parse(item: Response) -> BaseItem:
45
- upload_item = item.to_dict
46
- upload_item["text"] = item.response.text
47
- yield ConsoleItem(item.seed, data=json.dumps(upload_item, ensure_ascii=False))
48
-
49
- # @decorators.add_thread()
50
- @decorators.pause
51
- def build_request_item(self):
52
- thread_sleep = 0.1
53
- if TaskQueue.REQUEST.length >= self.request_queue_size:
54
- thread_sleep = 5
55
- elif seed := TaskQueue.TODO.pop():
56
- if seed.params.retry > self.spider_max_retries:
57
- seed.params.seed_status = DealModel.fail
58
- else:
59
- TaskQueue.process_task(seed, self.request)
60
- TaskQueue.DELETE.push(seed)
61
- time.sleep(thread_sleep)
62
-
63
- # @decorators.add_thread(num=setting.SPIDER_THREAD_NUM)
64
- @decorators.pause
65
- def build_download_item(self):
66
- thread_sleep = 0.1
67
- if TaskQueue.DOWNLOAD.length >= self.download_queue_size:
68
- logger.info(f"download queue is full, sleep {thread_sleep}s")
69
- thread_sleep = 5
70
- elif request_item := TaskQueue.REQUEST.pop():
71
- TaskQueue.process_task(request_item, self.download)
72
- time.sleep(thread_sleep)
73
-
74
- # @decorators.add_thread()
75
- @decorators.pause
76
- def build_parse_item(self):
77
- thread_sleep = 0.1
78
- if TaskQueue.UPLOAD.length >= self.upload_queue_size:
79
- logger.info(f"upload queue is full, sleep {thread_sleep}s")
80
- thread_sleep = 5
81
- if response_item := TaskQueue.RESPONSE.pop():
82
- TaskQueue.process_task(response_item, self.parse)
83
- time.sleep(thread_sleep)
84
-
85
-
@@ -1,170 +0,0 @@
1
- import json
2
- import threading
3
- import time
4
- import traceback
5
- from inspect import isgenerator
6
- from typing import Union, Callable, Mapping
7
-
8
- from cobweb.constant import DealModel, LogTemplate
9
- from cobweb.base import (
10
- Queue,
11
- Seed,
12
- BaseItem,
13
- Request,
14
- Response,
15
- ConsoleItem,
16
- logger
17
- )
18
-
19
-
20
- class Crawler(threading.Thread):
21
-
22
- def __init__(
23
- self,
24
- stop: threading.Event,
25
- pause: threading.Event,
26
- # launcher_queue: Union[Mapping[str, Queue]],
27
- get_seed: Callable,
28
- set_seed: Callable,
29
- add_seed: Callable,
30
- delete_seed: Callable,
31
- upload_data: Callable,
32
- custom_func: Union[Mapping[str, Callable]],
33
- thread_num: int,
34
- max_retries: int,
35
- time_sleep: int,
36
- ):
37
- super().__init__()
38
-
39
- self._stop = stop
40
- self._pause = pause
41
- self._get_seed = get_seed
42
- self._set_seed = set_seed
43
- self._add_seed = add_seed
44
- self._delete_seed = delete_seed
45
- self._upload_data = upload_data
46
-
47
- for func_name, _callable in custom_func.items():
48
- if isinstance(_callable, Callable):
49
- self.__setattr__(func_name, _callable)
50
-
51
- self.thread_num = thread_num
52
- self.time_sleep = time_sleep
53
- self.max_retries = max_retries
54
-
55
- @staticmethod
56
- def request(seed: Seed) -> Union[Request, BaseItem]:
57
- yield Request(seed.url, seed, timeout=5)
58
-
59
- @staticmethod
60
- def download(item: Request) -> Union[Seed, BaseItem, Response, str]:
61
- response = item.download()
62
- yield Response(item.seed, response, **item.to_dict)
63
-
64
- @staticmethod
65
- def parse(item: Response) -> BaseItem:
66
- upload_item = item.to_dict
67
- upload_item["text"] = item.response.text
68
- yield ConsoleItem(item.seed, data=json.dumps(upload_item, ensure_ascii=False))
69
-
70
- # def get_seed(self) -> Seed:
71
- # return self._todo.pop()
72
-
73
- def distribute(self, item, seed):
74
- if isinstance(item, BaseItem):
75
- self._upload_data(item)
76
- elif isinstance(item, Seed):
77
- self._add_seed(item)
78
- elif isinstance(item, str) and item == DealModel.poll:
79
- self._set_seed(seed)
80
- elif isinstance(item, str) and item == DealModel.done:
81
- self._delete_seed(seed)
82
- elif isinstance(item, str) and item == DealModel.fail:
83
- seed.params.seed_status = DealModel.fail
84
- self._delete_seed(seed)
85
- else:
86
- raise TypeError("yield value type error!")
87
-
88
- def spider(self):
89
- while not self._stop.is_set():
90
-
91
- seed = self._get_seed()
92
-
93
- if not seed:
94
- time.sleep(1)
95
- continue
96
-
97
- elif seed.params.retry > self.max_retries:
98
- seed.params.seed_status = DealModel.fail
99
- self._delete_seed(seed)
100
- continue
101
-
102
- seed_detail_log_info = LogTemplate.log_info(seed.to_dict)
103
-
104
- try:
105
- request_iterators = self.request(seed)
106
-
107
- if not isgenerator(request_iterators):
108
- raise TypeError("request function isn't a generator!")
109
-
110
- iterator_status = False
111
-
112
- for request_item in request_iterators:
113
-
114
- iterator_status = True
115
-
116
- if isinstance(request_item, Request):
117
- iterator_status = False
118
- download_iterators = self.download(request_item)
119
- if not isgenerator(download_iterators):
120
- raise TypeError("download function isn't a generator")
121
-
122
- for download_item in download_iterators:
123
- iterator_status = True
124
- if isinstance(download_item, Response):
125
- iterator_status = False
126
- logger.info(LogTemplate.download_info.format(
127
- detail=seed_detail_log_info,
128
- retry=seed.params.retry,
129
- priority=seed.params.priority,
130
- seed_version=seed.params.seed_version,
131
- identifier=seed.identifier or "",
132
- status=download_item.response,
133
- response=LogTemplate.log_info(download_item.to_dict)
134
- ))
135
- parse_iterators = self.parse(download_item)
136
- if not isgenerator(parse_iterators):
137
- raise TypeError("parse function isn't a generator")
138
- for parse_item in parse_iterators:
139
- iterator_status = True
140
- if isinstance(parse_item, Response):
141
- raise TypeError("upload_item can't be a Response instance")
142
- self.distribute(parse_item, seed)
143
- else:
144
- self.distribute(download_item, seed)
145
- else:
146
- self.distribute(request_item, seed)
147
-
148
- if not iterator_status:
149
- raise ValueError("request/download/parse function yield value error!")
150
- except Exception as e:
151
- logger.info(LogTemplate.download_exception.format(
152
- detail=seed_detail_log_info,
153
- retry=seed.params.retry,
154
- priority=seed.params.priority,
155
- seed_version=seed.params.seed_version,
156
- identifier=seed.identifier or "",
157
- exception=''.join(traceback.format_exception(type(e), e, e.__traceback__))
158
- ))
159
- seed.params.retry += 1
160
- # self._todo.push(seed)
161
- self._set_seed(seed)
162
- time.sleep(self.time_sleep * seed.params.retry)
163
- finally:
164
- time.sleep(0.1)
165
- logger.info("spider thread close")
166
-
167
- def run(self):
168
- for index in range(self.thread_num):
169
- threading.Thread(name=f"spider_{index}", target=self.spider).start()
170
-
cobweb_new/db/__init__.py DELETED
@@ -1,2 +0,0 @@
1
- from .redis_db import RedisDB
2
- from .api_db import ApiDB
cobweb_new/db/api_db.py DELETED
@@ -1,82 +0,0 @@
1
- import os
2
- import json
3
- import requests
4
-
5
-
6
- class ApiDB:
7
-
8
- def __init__(self, host=None, **kwargs):
9
- self.host = host or os.getenv("REDIS_API_HOST", "http://127.0.0.1:4396")
10
-
11
- def _get_response(self, api, params: dict = None):
12
- try:
13
- url = self.host + api
14
- response = requests.get(url, params=params)
15
- json_data = response.json()
16
- response.close()
17
- return json_data["data"]
18
- except:
19
- return None
20
-
21
- def _post_response(self, api, params: dict = None, data: dict = None):
22
- try:
23
- url = self.host + api
24
- headers = {"Content-Type": "application/json"}
25
- response = requests.post(url, headers=headers, params=params, data=json.dumps(data))
26
- json_data = response.json()
27
- response.close()
28
- return json_data["data"]
29
- except:
30
- return None
31
-
32
- def get(self, name):
33
- return self._get_response(api="/get", params=dict(name=name))
34
-
35
- def setnx(self, name, value=""):
36
- return self._get_response(api="/setnx", params=dict(name=name, value=value))
37
-
38
- def setex(self, name, t, value=""):
39
- return self._get_response(api="/setex", params=dict(name=name, value=value, t=t))
40
-
41
- def expire(self, name, t, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False):
42
- return self._get_response(api="/expire", params=dict(name=name, t=t, nx=nx, xx=xx, gt=gt, lt=lt))
43
-
44
- def ttl(self, name):
45
- return self._get_response(api="/ttl", params=dict(name=name))
46
-
47
- def delete(self, name):
48
- return self._get_response(api="/delete", params=dict(name=name))
49
-
50
- def exists(self, name):
51
- return self._get_response(api="/exists", params=dict(name=name))
52
-
53
- def incrby(self, name, value):
54
- return self._get_response(api="/incrby", params=dict(name=name, value=value))
55
-
56
- def zcard(self, name) -> bool:
57
- return self._get_response(api="/zcard", params=dict(name=name))
58
-
59
- def zadd(self, name, item: dict, **kwargs):
60
- return self._post_response(api="/zadd", data=dict(name=name, mapping=item, **kwargs))
61
-
62
- def zrem(self, name, *values):
63
- return self._post_response(api="/zrem", data=dict(name=name, values=values))
64
-
65
- def zcount(self, name, _min, _max):
66
- return self._get_response(api="/zcount", params=dict(name=name, min=_min, max=_max))
67
-
68
- def lock(self, name, t=15) -> bool:
69
- return self._get_response(api="/lock", params=dict(name=name, t=t))
70
-
71
- def auto_incr(self, name, t=15, limit=1000) -> bool:
72
- return self._get_response(api="/auto_incr", params=dict(name=name, t=t, limit=limit))
73
-
74
- def members(self, name, score, start=0, count=5000, _min="-inf", _max="+inf"):
75
- return self._get_response(api="/members", params=dict(name=name, score=score, start=start, count=count, min=_min, max=_max))
76
-
77
- def done(self, name: list, *values):
78
- return self._post_response(api="/done", data=dict(name=name, values=values))
79
-
80
-
81
-
82
-