coocan 0.5.3.1__py3-none-any.whl → 0.5.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _test/crawl_csdn.py +53 -0
- _test/demo.py +33 -0
- _test/err_demo.py +27 -0
- _test/test_priority.py +21 -0
- _test/test_req_delay.py +19 -0
- _test/test_req_err.py +32 -0
- coocan/_examples/crawl_csdn_detail.py +62 -0
- coocan/_examples/crawl_csdn_list.py +50 -0
- coocan/_examples/recv_item.py +31 -0
- coocan/_examples/view_local_ip.py +22 -0
- {coocan-0.5.3.1.dist-info → coocan-0.5.5.dist-info}/METADATA +2 -1
- coocan-0.5.5.dist-info/RECORD +26 -0
- {coocan-0.5.3.1.dist-info → coocan-0.5.5.dist-info}/top_level.txt +1 -0
- coocan-0.5.3.1.dist-info/RECORD +0 -16
- {coocan-0.5.3.1.dist-info → coocan-0.5.5.dist-info}/WHEEL +0 -0
- {coocan-0.5.3.1.dist-info → coocan-0.5.5.dist-info}/entry_points.txt +0 -0
_test/crawl_csdn.py
ADDED
@@ -0,0 +1,53 @@
|
|
1
|
+
import json
|
2
|
+
|
3
|
+
from loguru import logger
|
4
|
+
|
5
|
+
import coocan
|
6
|
+
from coocan import Request, MiniSpider
|
7
|
+
|
8
|
+
api = "https://blog.csdn.net/community/home-api/v1/get-business-list"
|
9
|
+
params = {
|
10
|
+
"page": "1",
|
11
|
+
"size": "20",
|
12
|
+
"businessType": "lately",
|
13
|
+
"noMore": "false",
|
14
|
+
"username": "markadc"
|
15
|
+
}
|
16
|
+
|
17
|
+
|
18
|
+
class CsdnAirAsyncSpider(MiniSpider):
|
19
|
+
start_urls = ['http://www.csdn.net']
|
20
|
+
max_requests = 10
|
21
|
+
|
22
|
+
def parse(self, response):
|
23
|
+
yield coocan.Request(api, self.parse_page, params=params)
|
24
|
+
|
25
|
+
def middleware(self, request: Request):
|
26
|
+
request.headers["Referer"] = "http://www.csdn.net/"
|
27
|
+
|
28
|
+
def parse_page(self, response):
|
29
|
+
current_page = params["page"]
|
30
|
+
data = json.loads(response.text)
|
31
|
+
some = data["data"]["list"]
|
32
|
+
if not some:
|
33
|
+
logger.warning("没有第 {} 页".format(current_page))
|
34
|
+
return
|
35
|
+
for one in some:
|
36
|
+
date = one["formatTime"]
|
37
|
+
name = one["title"]
|
38
|
+
detail_url = one["url"]
|
39
|
+
yield coocan.Request(detail_url, self.parse_detail)
|
40
|
+
print(date, detail_url, name)
|
41
|
+
logger.info("第 {} 页抓取成功".format(params["page"]))
|
42
|
+
|
43
|
+
next_page = int(current_page) + 1
|
44
|
+
params["page"] = str(next_page)
|
45
|
+
yield coocan.Request(api, self.parse_page, params=params)
|
46
|
+
|
47
|
+
def parse_detail(self, response):
|
48
|
+
logger.success("{} {}".format(response.status_code, response.request.url))
|
49
|
+
|
50
|
+
|
51
|
+
if __name__ == '__main__':
|
52
|
+
s = CsdnAirAsyncSpider()
|
53
|
+
s.go()
|
_test/demo.py
ADDED
@@ -0,0 +1,33 @@
|
|
1
|
+
from loguru import logger
|
2
|
+
|
3
|
+
import coocan
|
4
|
+
|
5
|
+
|
6
|
+
class DemoSpider(coocan.MiniSpider):
|
7
|
+
start_urls = ["https://cn.bing.com/"]
|
8
|
+
max_requests = 5
|
9
|
+
|
10
|
+
def parse(self, response):
|
11
|
+
print(response.request.headers.get("User-Agent"))
|
12
|
+
logger.debug('{} {}'.format(response.status_code, len(response.text)))
|
13
|
+
for i in range(5):
|
14
|
+
yield coocan.Request('https://cn.bing.com/', self.parse2)
|
15
|
+
|
16
|
+
def parse2(self, response):
|
17
|
+
logger.info('{} {}'.format(response.status_code, len(response.text)))
|
18
|
+
for i in range(3):
|
19
|
+
yield coocan.Request('https://cn.bing.com/', self.parse3)
|
20
|
+
|
21
|
+
for i in range(4):
|
22
|
+
yield coocan.Request('https://cn.bing.com/', self.parse4)
|
23
|
+
|
24
|
+
def parse3(self, response):
|
25
|
+
logger.warning('{} {}'.format(response.status_code, len(response.text)))
|
26
|
+
|
27
|
+
def parse4(self, response):
|
28
|
+
logger.error('{} {}'.format(response.status_code, len(response.text)))
|
29
|
+
|
30
|
+
|
31
|
+
if __name__ == '__main__':
|
32
|
+
my_spider = DemoSpider()
|
33
|
+
my_spider.go()
|
_test/err_demo.py
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
from loguru import logger
|
2
|
+
|
3
|
+
import coocan
|
4
|
+
from coocan.spider import MiniSpider
|
5
|
+
|
6
|
+
|
7
|
+
class ErrDemoSpider(MiniSpider):
|
8
|
+
start_urls = ["https://cn.bing.com/"]
|
9
|
+
max_requests = 5
|
10
|
+
|
11
|
+
def parse(self, response):
|
12
|
+
print(response.request.headers.get("User-Agent"))
|
13
|
+
logger.debug('{} {}'.format(response.status_code, len(response.text)))
|
14
|
+
yield coocan.Request('https://cn.bing.com/', self.parse2, cb_kwargs={"name": "CLOS"})
|
15
|
+
|
16
|
+
def parse2(self, response, name):
|
17
|
+
print(name)
|
18
|
+
logger.debug('{} {}'.format(response.status_code, len(response.text)))
|
19
|
+
yield coocan.Request('https://cn.bing.com/', self.parse3, cb_kwargs={"a1": 1, "a2": 2})
|
20
|
+
|
21
|
+
def parse3(self, response, a1, a22):
|
22
|
+
print(a1, a22)
|
23
|
+
|
24
|
+
|
25
|
+
if __name__ == '__main__':
|
26
|
+
my_spider = ErrDemoSpider()
|
27
|
+
my_spider.go()
|
_test/test_priority.py
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
from coocan import MiniSpider, Request, Response
|
2
|
+
|
3
|
+
|
4
|
+
class TestPrioritySpider(MiniSpider):
|
5
|
+
headers_extra_field = {"Name": "Coocan"}
|
6
|
+
|
7
|
+
def start_requests(self):
|
8
|
+
for i in range(100):
|
9
|
+
url = 'https://www.baidu.com/s?w={}'.format(i)
|
10
|
+
yield Request(url, callback=self.parse, priority=100 - i)
|
11
|
+
|
12
|
+
def parse(self, response: Response):
|
13
|
+
print(response.request.url)
|
14
|
+
print(response.request.headers["User-Agent"])
|
15
|
+
print(response.request.headers)
|
16
|
+
print()
|
17
|
+
|
18
|
+
|
19
|
+
if __name__ == '__main__':
|
20
|
+
s = TestPrioritySpider()
|
21
|
+
s.go()
|
_test/test_req_delay.py
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
from coocan import MiniSpider, Request, Response
|
2
|
+
|
3
|
+
|
4
|
+
class TestReqDelaySpider(MiniSpider):
|
5
|
+
max_requests = 5
|
6
|
+
delay = 3
|
7
|
+
|
8
|
+
def start_requests(self):
|
9
|
+
for i in range(100):
|
10
|
+
url = 'https://www.baidu.com/s?w={}'.format(i)
|
11
|
+
yield Request(url, callback=self.parse, priority=100 - i)
|
12
|
+
|
13
|
+
def parse(self, response: Response):
|
14
|
+
print(response.request.url)
|
15
|
+
|
16
|
+
|
17
|
+
if __name__ == '__main__':
|
18
|
+
s = TestReqDelaySpider()
|
19
|
+
s.go()
|
_test/test_req_err.py
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
import random
|
2
|
+
|
3
|
+
from coocan import MiniSpider, Request, Response, IgnoreRequest
|
4
|
+
|
5
|
+
|
6
|
+
class TestReqErrSpider(MiniSpider):
|
7
|
+
def start_requests(self):
|
8
|
+
for i in range(5):
|
9
|
+
url = "https://www.google.com/{}".format(i + 1)
|
10
|
+
yield Request(url, callback=self.parse, timeout=1)
|
11
|
+
|
12
|
+
def handle_request_excetpion(self, e: Exception, request: Request):
|
13
|
+
v = random.randint(1, 3)
|
14
|
+
if v == 1:
|
15
|
+
raise IgnoreRequest("出验证码了")
|
16
|
+
if v == 2:
|
17
|
+
1 / 0
|
18
|
+
if v == 3:
|
19
|
+
new_url = "https://www.baidu.com/s?wd={}".format(random.randint(1, 100))
|
20
|
+
return Request(new_url, callback=self.parse, timeout=1)
|
21
|
+
|
22
|
+
def parse(self, response: Response):
|
23
|
+
v = random.randint(1, 2)
|
24
|
+
if v == 1:
|
25
|
+
print("爬取成功", response.url, len(response.text))
|
26
|
+
print(response.get_one("//title/text()"))
|
27
|
+
aaa
|
28
|
+
|
29
|
+
|
30
|
+
if __name__ == '__main__':
|
31
|
+
my_spider = TestReqErrSpider()
|
32
|
+
my_spider.go()
|
@@ -0,0 +1,62 @@
|
|
1
|
+
import json
|
2
|
+
|
3
|
+
from loguru import logger
|
4
|
+
|
5
|
+
import coocan
|
6
|
+
from coocan import Request, MiniSpider
|
7
|
+
|
8
|
+
|
9
|
+
class CSDNDetailSpider(MiniSpider):
|
10
|
+
start_urls = ['http://www.csdn.net']
|
11
|
+
max_requests = 10
|
12
|
+
|
13
|
+
def middleware(self, request: Request):
|
14
|
+
request.headers["Referer"] = "http://www.csdn.net/"
|
15
|
+
|
16
|
+
def parse(self, response):
|
17
|
+
api = "https://blog.csdn.net/community/home-api/v1/get-business-list"
|
18
|
+
params = {
|
19
|
+
"page": "1",
|
20
|
+
"size": "20",
|
21
|
+
"businessType": "lately",
|
22
|
+
"noMore": "false",
|
23
|
+
"username": "markadc"
|
24
|
+
}
|
25
|
+
yield Request(api, self.parse_page, params=params, cb_kwargs={"api": api, "params": params})
|
26
|
+
|
27
|
+
def parse_page(self, response, api, params):
|
28
|
+
current_page = params["page"]
|
29
|
+
data = json.loads(response.text)
|
30
|
+
some = data["data"]["list"]
|
31
|
+
|
32
|
+
if not some:
|
33
|
+
logger.warning("没有第 {} 页".format(current_page))
|
34
|
+
return
|
35
|
+
|
36
|
+
for one in some:
|
37
|
+
date = one["formatTime"]
|
38
|
+
name = one["title"]
|
39
|
+
detail_url = one["url"]
|
40
|
+
logger.info(
|
41
|
+
"""
|
42
|
+
{}
|
43
|
+
{}
|
44
|
+
{}
|
45
|
+
""".format(date, name, detail_url)
|
46
|
+
)
|
47
|
+
yield coocan.Request(detail_url, self.parse_detail, cb_kwargs={"title": name})
|
48
|
+
|
49
|
+
logger.info("第 {} 页抓取成功".format(params["page"]))
|
50
|
+
|
51
|
+
# 抓取下一页
|
52
|
+
next_page = int(current_page) + 1
|
53
|
+
params["page"] = str(next_page)
|
54
|
+
yield Request(api, self.parse_page, params=params, cb_kwargs={"api": api, "params": params})
|
55
|
+
|
56
|
+
def parse_detail(self, response, title):
|
57
|
+
logger.success("{} 已访问 {}".format(response.status_code, title))
|
58
|
+
|
59
|
+
|
60
|
+
if __name__ == '__main__':
|
61
|
+
s = CSDNDetailSpider()
|
62
|
+
s.go()
|
@@ -0,0 +1,50 @@
|
|
1
|
+
import json
|
2
|
+
|
3
|
+
from loguru import logger
|
4
|
+
|
5
|
+
from coocan import Request, MiniSpider
|
6
|
+
|
7
|
+
|
8
|
+
class CSDNSpider(MiniSpider):
|
9
|
+
start_urls = ['http://www.csdn.net']
|
10
|
+
max_requests = 10
|
11
|
+
|
12
|
+
def middleware(self, request: Request):
|
13
|
+
request.headers["Referer"] = "http://www.csdn.net/"
|
14
|
+
|
15
|
+
def parse(self, response):
|
16
|
+
api = "https://blog.csdn.net/community/home-api/v1/get-business-list"
|
17
|
+
params = {
|
18
|
+
"page": "1",
|
19
|
+
"size": "20",
|
20
|
+
"businessType": "lately",
|
21
|
+
"noMore": "false",
|
22
|
+
"username": "markadc"
|
23
|
+
}
|
24
|
+
yield Request(api, self.parse_page, params=params, cb_kwargs={"api": api, "params": params})
|
25
|
+
|
26
|
+
def parse_page(self, response, api, params):
|
27
|
+
current_page = params["page"]
|
28
|
+
data = json.loads(response.text)
|
29
|
+
some = data["data"]["list"]
|
30
|
+
|
31
|
+
if not some:
|
32
|
+
logger.warning("没有第 {} 页".format(current_page))
|
33
|
+
return
|
34
|
+
|
35
|
+
for one in some:
|
36
|
+
date = one["formatTime"]
|
37
|
+
name = one["title"]
|
38
|
+
detail_url = one["url"]
|
39
|
+
print(date, detail_url, name)
|
40
|
+
print("第 {} 页抓取成功".format(params["page"]))
|
41
|
+
|
42
|
+
# 抓取下一页
|
43
|
+
next_page = int(current_page) + 1
|
44
|
+
params["page"] = str(next_page)
|
45
|
+
yield Request(api, self.parse_page, params=params, cb_kwargs={"api": api, "params": params})
|
46
|
+
|
47
|
+
|
48
|
+
if __name__ == '__main__':
|
49
|
+
s = CSDNSpider()
|
50
|
+
s.go()
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import random
|
2
|
+
import time
|
3
|
+
|
4
|
+
from loguru import logger
|
5
|
+
|
6
|
+
from coocan import MiniSpider, Request, Response
|
7
|
+
|
8
|
+
|
9
|
+
class RecvItemSpider(MiniSpider):
|
10
|
+
start_urls = ["https://cn.bing.com/search?q=1"]
|
11
|
+
max_requests = 10
|
12
|
+
|
13
|
+
def parse(self, response: Response):
|
14
|
+
logger.warning("{} {}".format(response.status_code, response.request.url, response.get_one("//title/text()")))
|
15
|
+
for _ in range(10):
|
16
|
+
item = {"timestamp": int(time.time() * 1000), "mark": random.randint(1, 10000)} # 假设这里是爬虫的数据
|
17
|
+
yield item
|
18
|
+
head, tail = str(response.request.url).split("=")
|
19
|
+
next_url = "{}={}".format(head, int(tail) + 1)
|
20
|
+
if next_url.endswith("11"):
|
21
|
+
yield "coocan" # 出现警告日志
|
22
|
+
return
|
23
|
+
yield Request(next_url, callback=self.parse)
|
24
|
+
|
25
|
+
def process_item(self, item: dict):
|
26
|
+
logger.success("Get => {}".format(item))
|
27
|
+
|
28
|
+
|
29
|
+
if __name__ == '__main__':
|
30
|
+
s = RecvItemSpider()
|
31
|
+
s.go()
|
@@ -0,0 +1,22 @@
|
|
1
|
+
from coocan import Request, Response, MiniSpider
|
2
|
+
|
3
|
+
|
4
|
+
class ViewLocalIPSpider(MiniSpider):
|
5
|
+
start_urls = ["https://httpbin.org/ip"]
|
6
|
+
max_requests = 5
|
7
|
+
delay = 5
|
8
|
+
|
9
|
+
def start_requests(self):
|
10
|
+
for _ in range(10):
|
11
|
+
yield Request(self.start_urls[0], callback=self.parse)
|
12
|
+
|
13
|
+
def middleware(self, request: Request):
|
14
|
+
request.headers["Referer"] = "https://httpbin.org"
|
15
|
+
|
16
|
+
def parse(self, response: Response):
|
17
|
+
print(response.status_code, response.json())
|
18
|
+
|
19
|
+
|
20
|
+
if __name__ == '__main__':
|
21
|
+
s = ViewLocalIPSpider()
|
22
|
+
s.go()
|
@@ -1,10 +1,11 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: coocan
|
3
|
-
Version: 0.5.
|
3
|
+
Version: 0.5.5
|
4
4
|
Summary: Air Async Spider Framework
|
5
5
|
Home-page: https://github.com/markadc/coocan
|
6
6
|
Author: wauo
|
7
7
|
Author-email: wauo <markadc@126.com>
|
8
|
+
License-Expression: MIT
|
8
9
|
Project-URL: Homepage, https://github.com/markadc/coocan
|
9
10
|
Requires-Python: >=3.10
|
10
11
|
Description-Content-Type: text/markdown
|
@@ -0,0 +1,26 @@
|
|
1
|
+
_test/crawl_csdn.py,sha256=ap2mOq3ps7KEbqqKWH5uJqIK_IQ8YFSRRAMzpreQvww,1555
|
2
|
+
_test/demo.py,sha256=ZxfJzWuNVGhDjhUruyVyZ-BoULHMbzgpnxefLSkheZI,1051
|
3
|
+
_test/err_demo.py,sha256=EWGqb00KyB192qv3uxMr6YgOr2zKJQb6gkeFtknMLv8,845
|
4
|
+
_test/test_priority.py,sha256=K8JLC-PaVM4ztLZdYFCumDQP5m2hB8qWAIWXTOMMUyM,601
|
5
|
+
_test/test_req_delay.py,sha256=35afyHcZk3Gmja9xXXjJSHXnU8WVJGph2ZcTQRxRMNk,479
|
6
|
+
_test/test_req_err.py,sha256=magK1BUConCBj8TEC29rzmDCbI2u2XXVcPowL6ttP9g,1025
|
7
|
+
coocan/__init__.py,sha256=UqFmE7ucuR_xR3OyyBU8pxqLfCJ5AdH_HsDdTsYPf6g,55
|
8
|
+
coocan/gen.py,sha256=J6QWXkBVbiCQqey8i0BDqleRNpBswI8AyvrYmkDVQPw,1028
|
9
|
+
coocan/push_project.py,sha256=X2fjtYk1oI0ElcibA9wChLx0lCc8hwSelhUNfkJal5o,220
|
10
|
+
coocan/_examples/crawl_csdn_detail.py,sha256=J2hiKHCS7RskQ9UmNMjE8i6braFwGchH6BxtdulV9RM,1892
|
11
|
+
coocan/_examples/crawl_csdn_list.py,sha256=ZvhFvBbVXQe-qtXf1T_waXuM4tBleBqbpvzP-5z0RCg,1504
|
12
|
+
coocan/_examples/recv_item.py,sha256=iJqPuHZ2FykeleFl0Xr0yPwq4UhCnNw84lCPlYyGFzM,1007
|
13
|
+
coocan/_examples/view_local_ip.py,sha256=Sl086xNNuZqFoRM31_gMvcISSa2QoL3OGghECkQktxg,582
|
14
|
+
coocan/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
15
|
+
coocan/cmd/cli.py,sha256=FRggXqDeLsGs-7u3zhaokfk0GCpItwqudf14W9zUfYE,2480
|
16
|
+
coocan/spider/__init__.py,sha256=kMDCGeqtN50raCzwfCn18s_W8xV6KO_Ny9Xol4I48Ag,58
|
17
|
+
coocan/spider/base.py,sha256=9Dgn2920Lb9TZGV0cAZSBMvIWuTMqs9M8ZYspx9W0Io,6342
|
18
|
+
coocan/templates/spider.txt,sha256=5UEXUzb0ses_4ctn0b3vgbpUJ7tCde91ul6rp-g7Hxw,480
|
19
|
+
coocan/url/__init__.py,sha256=rEMx66XDy5AIJ9mF_2UVzHW5mRLBAWZEyQ3txrZzuZA,102
|
20
|
+
coocan/url/request.py,sha256=seZaQXQRvRMIf9WnCp3mAgNA-kxsj9P2JzAvuIt2Dx8,1116
|
21
|
+
coocan/url/response.py,sha256=AnC0xsF34q68r62EVlcHYmDH6skm9RBwRHITTb4iBbU,1785
|
22
|
+
coocan-0.5.5.dist-info/METADATA,sha256=aYnL3IygL_Dw7iwvDtK81oVQRWIygXmiiSO94C01Bv4,2579
|
23
|
+
coocan-0.5.5.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
|
24
|
+
coocan-0.5.5.dist-info/entry_points.txt,sha256=hNdk42NPboC1o7s7GzMbpII5t2U2jWrtT5bpvliXRcw,47
|
25
|
+
coocan-0.5.5.dist-info/top_level.txt,sha256=WiN3Gh529qzUs0jVvEReeZsKxFguIQKrFlMOjtxGblM,13
|
26
|
+
coocan-0.5.5.dist-info/RECORD,,
|
coocan-0.5.3.1.dist-info/RECORD
DELETED
@@ -1,16 +0,0 @@
|
|
1
|
-
coocan/__init__.py,sha256=UqFmE7ucuR_xR3OyyBU8pxqLfCJ5AdH_HsDdTsYPf6g,55
|
2
|
-
coocan/gen.py,sha256=J6QWXkBVbiCQqey8i0BDqleRNpBswI8AyvrYmkDVQPw,1028
|
3
|
-
coocan/push_project.py,sha256=X2fjtYk1oI0ElcibA9wChLx0lCc8hwSelhUNfkJal5o,220
|
4
|
-
coocan/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
|
-
coocan/cmd/cli.py,sha256=FRggXqDeLsGs-7u3zhaokfk0GCpItwqudf14W9zUfYE,2480
|
6
|
-
coocan/spider/__init__.py,sha256=kMDCGeqtN50raCzwfCn18s_W8xV6KO_Ny9Xol4I48Ag,58
|
7
|
-
coocan/spider/base.py,sha256=9Dgn2920Lb9TZGV0cAZSBMvIWuTMqs9M8ZYspx9W0Io,6342
|
8
|
-
coocan/templates/spider.txt,sha256=5UEXUzb0ses_4ctn0b3vgbpUJ7tCde91ul6rp-g7Hxw,480
|
9
|
-
coocan/url/__init__.py,sha256=rEMx66XDy5AIJ9mF_2UVzHW5mRLBAWZEyQ3txrZzuZA,102
|
10
|
-
coocan/url/request.py,sha256=seZaQXQRvRMIf9WnCp3mAgNA-kxsj9P2JzAvuIt2Dx8,1116
|
11
|
-
coocan/url/response.py,sha256=AnC0xsF34q68r62EVlcHYmDH6skm9RBwRHITTb4iBbU,1785
|
12
|
-
coocan-0.5.3.1.dist-info/METADATA,sha256=DrjX_0-M_NEvC8AJam9hh1mY1sXKhgkCoglgqjUk8B8,2556
|
13
|
-
coocan-0.5.3.1.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
|
14
|
-
coocan-0.5.3.1.dist-info/entry_points.txt,sha256=hNdk42NPboC1o7s7GzMbpII5t2U2jWrtT5bpvliXRcw,47
|
15
|
-
coocan-0.5.3.1.dist-info/top_level.txt,sha256=VwB-Q4zEljgb9v1Ms1E59B-1pBYORXuhKjgZb-LHOhk,7
|
16
|
-
coocan-0.5.3.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|