intellif-aihub 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of intellif-aihub might be problematic. Click here for more details.
- aihub/__init__.py +1 -4
- aihub/client.py +87 -0
- aihub/exceptions.py +18 -0
- aihub/models/__init__.py +0 -0
- aihub/models/artifact.py +137 -0
- aihub/models/common.py +13 -0
- aihub/models/dataset_management.py +99 -0
- aihub/models/document_center.py +28 -0
- aihub/models/labelfree.py +31 -0
- aihub/models/quota_schedule_management.py +0 -0
- aihub/models/tag_management.py +21 -0
- aihub/models/task_center.py +117 -0
- aihub/models/user.py +46 -0
- aihub/services/__init__.py +0 -0
- aihub/services/artifact.py +332 -0
- aihub/services/dataset_management.py +240 -0
- aihub/services/document_center.py +43 -0
- aihub/services/labelfree.py +44 -0
- aihub/services/quota_schedule_management.py +18 -0
- aihub/services/reporter.py +20 -0
- aihub/services/tag_management.py +35 -0
- aihub/services/task_center.py +190 -0
- aihub/services/user.py +47 -0
- aihub/utils/__init__.py +0 -0
- aihub/utils/download.py +69 -0
- aihub/utils/http.py +13 -0
- aihub/utils/s3.py +77 -0
- intellif_aihub-0.1.2.dist-info/METADATA +110 -0
- intellif_aihub-0.1.2.dist-info/RECORD +32 -0
- {intellif_aihub-0.1.1.dist-info → intellif_aihub-0.1.2.dist-info}/licenses/LICENSE +200 -200
- aihub/_version.py +0 -1
- aihub/stop.py +0 -50
- intellif_aihub-0.1.1.dist-info/METADATA +0 -24
- intellif_aihub-0.1.1.dist-info/RECORD +0 -8
- {intellif_aihub-0.1.1.dist-info → intellif_aihub-0.1.2.dist-info}/WHEEL +0 -0
- {intellif_aihub-0.1.1.dist-info → intellif_aihub-0.1.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# !/usr/bin/env python
|
|
2
|
+
# -*-coding:utf-8 -*-
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
from ..exceptions import APIError
|
|
8
|
+
from ..models.common import APIWrapper
|
|
9
|
+
from ..models.tag_management import *
|
|
10
|
+
|
|
11
|
+
_BASE = "/tag-resource-management/api/v1"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TagManagementService:
|
|
15
|
+
def __init__(self, http: httpx.Client):
|
|
16
|
+
self._project = _Project(http)
|
|
17
|
+
|
|
18
|
+
def select_projects(self) -> List[Project]:
|
|
19
|
+
return self._project.select_projects()
|
|
20
|
+
|
|
21
|
+
@property
|
|
22
|
+
def project(self) -> _Project:
|
|
23
|
+
return self._project
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class _Project:
|
|
27
|
+
def __init__(self, http: httpx.Client):
|
|
28
|
+
self._http = http
|
|
29
|
+
|
|
30
|
+
def select_projects(self) -> List[Project]:
|
|
31
|
+
resp = self._http.get(f"{_BASE}/select-projects")
|
|
32
|
+
wrapper = APIWrapper[ProjectListData].model_validate(resp.json())
|
|
33
|
+
if wrapper.code != 0:
|
|
34
|
+
raise APIError(f"backend code {wrapper.code}: {wrapper.msg}")
|
|
35
|
+
return wrapper.data.data
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
from loguru import logger
|
|
7
|
+
|
|
8
|
+
from .tag_management import TagManagementService
|
|
9
|
+
from .user import UserService
|
|
10
|
+
from ..exceptions import APIError
|
|
11
|
+
from ..models.common import APIWrapper
|
|
12
|
+
from ..models.task_center import (
|
|
13
|
+
CreateTaskReq,
|
|
14
|
+
CreateTaskResp,
|
|
15
|
+
CreateTaskOtherInfo,
|
|
16
|
+
LabelProjectTypeEnum,
|
|
17
|
+
TaskCenterPriorityEnum,
|
|
18
|
+
LabelTaskDetail,
|
|
19
|
+
)
|
|
20
|
+
from ..models.user import UserSearchReq
|
|
21
|
+
|
|
22
|
+
_BASE = "/task-center/api/v1"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def date_str_to_timestamp(date_str: str) -> int:
|
|
26
|
+
date_obj = datetime.datetime.strptime(date_str, "%Y-%m-%d")
|
|
27
|
+
return int(date_obj.timestamp())
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TaskCenterService:
|
|
31
|
+
|
|
32
|
+
def __init__(self, http: httpx.Client):
|
|
33
|
+
self._TaskCenter = _TaskCenter(http)
|
|
34
|
+
self._http = http
|
|
35
|
+
|
|
36
|
+
def create(self, payload: CreateTaskReq) -> int:
|
|
37
|
+
return self._TaskCenter.create(payload)
|
|
38
|
+
|
|
39
|
+
def get(self, task_id: int) -> LabelTaskDetail:
|
|
40
|
+
"""获取任务信息
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
task_id (int): 任务ID
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
LabelTaskDetail: 任务信息
|
|
48
|
+
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
return self._TaskCenter.get(task_id)
|
|
52
|
+
|
|
53
|
+
# 如果想要访问子对象,也保留属性
|
|
54
|
+
@property
|
|
55
|
+
def TaskCenter(self) -> _TaskCenter:
|
|
56
|
+
return self._TaskCenter
|
|
57
|
+
|
|
58
|
+
def create_label_task(
|
|
59
|
+
self,
|
|
60
|
+
name: str,
|
|
61
|
+
dataset_version_name: str,
|
|
62
|
+
feishu_doc_name: str,
|
|
63
|
+
task_receiver_name: str,
|
|
64
|
+
estimated_delivery_at: str,
|
|
65
|
+
project_name: str,
|
|
66
|
+
label_type: LabelProjectTypeEnum = LabelProjectTypeEnum.IMAGE_CLASSIFICATION,
|
|
67
|
+
description: str = "",
|
|
68
|
+
task_priority: TaskCenterPriorityEnum = TaskCenterPriorityEnum.low,
|
|
69
|
+
) -> int:
|
|
70
|
+
"""创建标注任务
|
|
71
|
+
|
|
72
|
+
Examples:
|
|
73
|
+
>>> from aihub.client import Client
|
|
74
|
+
>>> client = Client(base_url="xxx", token="xxxx")
|
|
75
|
+
>>> task_id = client.task_center.create_label_task( \
|
|
76
|
+
name="test_task", dataset_version_name="re/V1",\
|
|
77
|
+
feishu_doc_name="人脸质量人脸照片分类", task_receiver_name="hyc", \
|
|
78
|
+
project_name="hycpro", label_type=LabelProjectTypeEnum.IMAGE_CLASSIFICATION, description="test_description", \
|
|
79
|
+
task_priority="low", estimated_delivery_at= "2025-08-01")
|
|
80
|
+
1
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
name (str): 任务名称
|
|
84
|
+
dataset_version_name (str): 数据集版本名称
|
|
85
|
+
feishu_doc_name (str): 飞书文档名称
|
|
86
|
+
task_receiver_name (str): 任务接收者名称
|
|
87
|
+
estimated_delivery_at (str): 预计交付时间,格式为 "YYYY-MM-DD"
|
|
88
|
+
project_name (str): 项目名称
|
|
89
|
+
label_type (LabelProjectTypeEnum): 标注项目类型,默认为图像分类
|
|
90
|
+
description (str): 任务描述,默认为空
|
|
91
|
+
task_priority (TaskCenterPriorityEnum): 任务优先级,默认为低优先级
|
|
92
|
+
Returns:
|
|
93
|
+
任务ID
|
|
94
|
+
"""
|
|
95
|
+
# 获取接收者ID
|
|
96
|
+
user_service = UserService(self._http)
|
|
97
|
+
task_receiver_id = user_service.search_one(
|
|
98
|
+
payload=UserSearchReq(nickname=task_receiver_name)
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
# 获取项目ID
|
|
102
|
+
tag_service = TagManagementService(self._http)
|
|
103
|
+
projects = tag_service.select_projects()
|
|
104
|
+
project_id = None
|
|
105
|
+
for project in projects:
|
|
106
|
+
if project.name == project_name:
|
|
107
|
+
project_id = project.id
|
|
108
|
+
break
|
|
109
|
+
|
|
110
|
+
if project_id is None:
|
|
111
|
+
raise APIError(f"未找到项目: {project_name}")
|
|
112
|
+
|
|
113
|
+
# 获取数据集ID
|
|
114
|
+
from .dataset_management import DatasetManagementService
|
|
115
|
+
|
|
116
|
+
dataset_service = DatasetManagementService(self._http)
|
|
117
|
+
dataset_version = dataset_service.get_dataset_version_by_name(
|
|
118
|
+
version_name=dataset_version_name
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
dataset_id = dataset_version.dataset_id
|
|
122
|
+
dataset_version_id = dataset_version.id
|
|
123
|
+
|
|
124
|
+
# 获取文档ID
|
|
125
|
+
from .document_center import DocumentCenterService
|
|
126
|
+
|
|
127
|
+
doc_service = DocumentCenterService(self._http)
|
|
128
|
+
docs = doc_service.get_documents(name=feishu_doc_name)
|
|
129
|
+
|
|
130
|
+
if not docs:
|
|
131
|
+
raise APIError(f"未找到文档: {feishu_doc_name}")
|
|
132
|
+
|
|
133
|
+
doc_id = docs[0].id
|
|
134
|
+
|
|
135
|
+
# 创建任务
|
|
136
|
+
other_info = CreateTaskOtherInfo(
|
|
137
|
+
label_project_type=label_type,
|
|
138
|
+
dataset_id=dataset_id,
|
|
139
|
+
dataset_version_id=dataset_version_id,
|
|
140
|
+
doc_id=doc_id,
|
|
141
|
+
doc_type="doc_center",
|
|
142
|
+
)
|
|
143
|
+
estimated_delivery_at_timestamp = date_str_to_timestamp(estimated_delivery_at)
|
|
144
|
+
task_req = CreateTaskReq(
|
|
145
|
+
name=name,
|
|
146
|
+
description=description,
|
|
147
|
+
task_priority=task_priority,
|
|
148
|
+
type="label",
|
|
149
|
+
receiver_id=task_receiver_id,
|
|
150
|
+
other_info=other_info,
|
|
151
|
+
project_id=project_id,
|
|
152
|
+
estimated_delivery_at=estimated_delivery_at_timestamp,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
return self.create(task_req)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class _TaskCenter:
|
|
159
|
+
def __init__(self, http: httpx.Client):
|
|
160
|
+
self._http = http
|
|
161
|
+
|
|
162
|
+
def create(self, payload: CreateTaskReq) -> int:
|
|
163
|
+
"""创建任务"""
|
|
164
|
+
logger.debug(f"create task: {payload}")
|
|
165
|
+
resp = self._http.post(
|
|
166
|
+
f"{_BASE}/tasks",
|
|
167
|
+
json=payload.model_dump(),
|
|
168
|
+
)
|
|
169
|
+
logger.debug(f"create task response: {resp.text}")
|
|
170
|
+
if resp.status_code != 200:
|
|
171
|
+
raise APIError(
|
|
172
|
+
message="API Error", status=resp.status_code, detail=resp.json()
|
|
173
|
+
)
|
|
174
|
+
wrapper = APIWrapper[CreateTaskResp].model_validate(resp.json())
|
|
175
|
+
|
|
176
|
+
return wrapper.data.id
|
|
177
|
+
|
|
178
|
+
def get(self, task_id: int) -> LabelTaskDetail:
|
|
179
|
+
"""获取任务"""
|
|
180
|
+
logger.debug(f"get task: {task_id}")
|
|
181
|
+
resp = self._http.get(
|
|
182
|
+
f"{_BASE}/tasks/{task_id}",
|
|
183
|
+
)
|
|
184
|
+
logger.debug(f"get task response: {resp.text}")
|
|
185
|
+
if resp.status_code != 200:
|
|
186
|
+
raise APIError(
|
|
187
|
+
message="API Error", status=resp.status_code, detail=resp.json()
|
|
188
|
+
)
|
|
189
|
+
wrapper = APIWrapper[LabelTaskDetail].model_validate(resp.json())
|
|
190
|
+
return wrapper.data
|
aihub/services/user.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
# !/usr/bin/env python
|
|
2
|
+
# -*-coding:utf-8 -*-
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
from ..exceptions import APIError
|
|
8
|
+
from ..models.common import APIWrapper
|
|
9
|
+
from ..models.user import *
|
|
10
|
+
|
|
11
|
+
_BASE = "/api/v1/search-users"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class UserService:
|
|
15
|
+
|
|
16
|
+
def __init__(self, http: httpx.Client):
|
|
17
|
+
self._user = _User(http)
|
|
18
|
+
|
|
19
|
+
# 直接把常用方法抛到一级,调用体验简单
|
|
20
|
+
def search_one(self, payload: UserSearchReq) -> int:
|
|
21
|
+
return self.user.search(payload)
|
|
22
|
+
|
|
23
|
+
# 如果想要访问子对象,也保留属性
|
|
24
|
+
@property
|
|
25
|
+
def user(self) -> _User:
|
|
26
|
+
return self._user
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _User:
|
|
30
|
+
def __init__(self, http: httpx.Client):
|
|
31
|
+
self._http = http
|
|
32
|
+
|
|
33
|
+
def search(self, payload: UserSearchReq) -> int:
|
|
34
|
+
resp = self._http.post(
|
|
35
|
+
f"{_BASE}",
|
|
36
|
+
json=payload.model_dump(by_alias=True, exclude_none=True),
|
|
37
|
+
)
|
|
38
|
+
wrapper = APIWrapper[UserSearchListData].model_validate(resp.json())
|
|
39
|
+
if wrapper.code != 0:
|
|
40
|
+
raise APIError(f"backend code {wrapper.code}: {wrapper.msg}")
|
|
41
|
+
if wrapper.data.total == 0:
|
|
42
|
+
raise APIError("no dataset found")
|
|
43
|
+
for item in wrapper.data.data:
|
|
44
|
+
if item.nickname == payload.nickname:
|
|
45
|
+
return item.id
|
|
46
|
+
else:
|
|
47
|
+
raise APIError("no user found")
|
aihub/utils/__init__.py
ADDED
|
File without changes
|
aihub/utils/download.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import concurrent.futures
|
|
4
|
+
import os
|
|
5
|
+
import tempfile
|
|
6
|
+
from typing import List, TypedDict
|
|
7
|
+
|
|
8
|
+
import pyarrow.parquet as pq
|
|
9
|
+
from tqdm import tqdm
|
|
10
|
+
|
|
11
|
+
from .http import http_download_file
|
|
12
|
+
from .s3 import s3_to_url
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DatasetParquetMeta(TypedDict):
|
|
16
|
+
parent_dir: str
|
|
17
|
+
name: str
|
|
18
|
+
s3path: str
|
|
19
|
+
type: int # 0=file, 1=dir
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
_ENUM_FILE = 0
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _read_parquet_index(file_path: str) -> List[DatasetParquetMeta]:
|
|
26
|
+
table = pq.read_table(file_path)
|
|
27
|
+
return table.to_pylist() # 每行转 dict
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _safe_rel(part: str) -> str:
|
|
31
|
+
if not part:
|
|
32
|
+
return ""
|
|
33
|
+
drive, tail = os.path.splitdrive(part)
|
|
34
|
+
return tail.lstrip(r"\/")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def dataset_download(index_url: str, local_dir: str, worker: int = 4) -> None:
|
|
38
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
39
|
+
tmp_file = os.path.join(tmpdir, "index.parquet")
|
|
40
|
+
http_download_file(index_url, tmp_file)
|
|
41
|
+
rows = _read_parquet_index(tmp_file)
|
|
42
|
+
|
|
43
|
+
host = (index_url.split("//", 1)[-1]).split("/", 1)[0]
|
|
44
|
+
|
|
45
|
+
files = [
|
|
46
|
+
(
|
|
47
|
+
os.path.join(
|
|
48
|
+
local_dir,
|
|
49
|
+
_safe_rel(row["parent_dir"]),
|
|
50
|
+
_safe_rel(row["name"]),
|
|
51
|
+
),
|
|
52
|
+
s3_to_url(row["s3path"], host),
|
|
53
|
+
)
|
|
54
|
+
for row in rows if row["type"] == _ENUM_FILE
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
if worker < 1:
|
|
58
|
+
worker = 1
|
|
59
|
+
|
|
60
|
+
with tqdm(total=len(files), desc="Downloading dataset") as bar, \
|
|
61
|
+
concurrent.futures.ThreadPoolExecutor(max_workers=worker) as pool:
|
|
62
|
+
|
|
63
|
+
def _one(flocal: str, furl: str):
|
|
64
|
+
http_download_file(furl, flocal)
|
|
65
|
+
bar.update()
|
|
66
|
+
|
|
67
|
+
futures = [pool.submit(_one, p, u) for p, u in files]
|
|
68
|
+
for fut in concurrent.futures.as_completed(futures):
|
|
69
|
+
fut.result()
|
aihub/utils/http.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def http_download_file(url: str, dst_path: str, chunk: int = 1 << 16) -> None:
|
|
8
|
+
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
|
|
9
|
+
with httpx.stream("GET", url, follow_redirects=True, timeout=None) as r:
|
|
10
|
+
r.raise_for_status()
|
|
11
|
+
with open(dst_path, "wb") as f:
|
|
12
|
+
for block in r.iter_bytes(chunk):
|
|
13
|
+
f.write(block)
|
aihub/utils/s3.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from loguru import logger
|
|
9
|
+
from minio import Minio
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def s3_to_url(s3_path: str, host: str) -> str:
|
|
13
|
+
key = s3_path.replace("s3://", "").lstrip("/")
|
|
14
|
+
return f"http://{host.rstrip('/')}/{key}"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def S3_path_to_info(s3_path) -> tuple[str | Any, str | Any] | None:
|
|
18
|
+
if not s3_path.startswith("s3://"):
|
|
19
|
+
return None
|
|
20
|
+
|
|
21
|
+
pattern = r"s3://(?P<bucket>\w+)/(?P<objectname>.+)"
|
|
22
|
+
|
|
23
|
+
match = re.match(pattern, s3_path)
|
|
24
|
+
|
|
25
|
+
if match:
|
|
26
|
+
bucket = match.group("bucket")
|
|
27
|
+
objectname = match.group("objectname")
|
|
28
|
+
return bucket, objectname
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def local_path_to_s3_key(work_dir: str, local_path: str) -> str:
|
|
33
|
+
work_dir = Path(work_dir)
|
|
34
|
+
local_path = Path(local_path)
|
|
35
|
+
s3_key = str(local_path.relative_to(work_dir))
|
|
36
|
+
return s3_key
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def upload_dir_to_s3(
|
|
40
|
+
s3_client: Minio, local_dir: str, bucket: str, object_prefix: str
|
|
41
|
+
) -> None:
|
|
42
|
+
logger.info(
|
|
43
|
+
f"Uploading directory {local_dir} to S3 bucket {bucket} with prefix {object_prefix}"
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
for root, dirs, files in os.walk(local_dir):
|
|
47
|
+
for file in files:
|
|
48
|
+
local_path = Path(root) / file
|
|
49
|
+
s3_key = local_path_to_s3_key(local_dir, str(local_path))
|
|
50
|
+
s3_client.fput_object(
|
|
51
|
+
bucket, os.path.join(object_prefix, s3_key), str(local_path)
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
logger.info(
|
|
55
|
+
f"Uploaded directory {local_dir} to S3 bucket {bucket} with prefix {object_prefix}"
|
|
56
|
+
)
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def download_dir_from_s3(
|
|
61
|
+
s3_client: Minio, bucket: str, object_prefix: str, local_dir: str
|
|
62
|
+
) -> None:
|
|
63
|
+
logger.info(
|
|
64
|
+
f"Downloading directory from S3 bucket {bucket} with prefix {object_prefix} to {local_dir}"
|
|
65
|
+
)
|
|
66
|
+
objs = s3_client.list_objects(bucket, object_prefix, recursive=True)
|
|
67
|
+
|
|
68
|
+
for obj in objs:
|
|
69
|
+
file_name = Path(obj.object_name).relative_to(object_prefix)
|
|
70
|
+
s3_client.fget_object(
|
|
71
|
+
bucket, obj.object_name, os.path.join(local_dir, file_name)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
logger.info(
|
|
75
|
+
f"Downloaded directory from S3 bucket {bucket} with prefix {object_prefix} to {local_dir}"
|
|
76
|
+
)
|
|
77
|
+
return
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: intellif-aihub
|
|
3
|
+
Version: 0.1.2
|
|
4
|
+
Summary: Intellif AI-hub SDK.
|
|
5
|
+
Author-email: Platform Team <aihub@example.com>
|
|
6
|
+
License-Expression: Apache-2.0
|
|
7
|
+
Keywords: AI-hub,sdk,intellif
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Operating System :: OS Independent
|
|
10
|
+
Requires-Python: >=3.8
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
License-File: LICENSE
|
|
13
|
+
Requires-Dist: httpx<0.28,>=0.27
|
|
14
|
+
Requires-Dist: pydantic<3.0,>=2.5.3
|
|
15
|
+
Requires-Dist: typing-extensions<5.0,>=4.13.2
|
|
16
|
+
Requires-Dist: pyarrow<16.0,>=15.0
|
|
17
|
+
Requires-Dist: tqdm<5.0,>=4.66
|
|
18
|
+
Requires-Dist: loguru>=0.7.3
|
|
19
|
+
Requires-Dist: minio>=7.2.7
|
|
20
|
+
Dynamic: license-file
|
|
21
|
+
|
|
22
|
+
# Intellif AI-Hub SDK
|
|
23
|
+
|
|
24
|
+
**Intellif AI-Hub** 官方 Python 开发包。
|
|
25
|
+
一个 `Client` 对象即可完成数据集管理、标注统计、任务中心等常见操作,无需手写 HTTP 请求。
|
|
26
|
+
|
|
27
|
+
```
|
|
28
|
+
aihub_sdk/
|
|
29
|
+
├─ pyproject.toml
|
|
30
|
+
├─ requirements.txt
|
|
31
|
+
├─ src/aihub/
|
|
32
|
+
│ ├─ client.py
|
|
33
|
+
│ ├─ exceptions.py
|
|
34
|
+
│ ├─ models/…
|
|
35
|
+
│ ├─ services/…
|
|
36
|
+
│ └─ utils/…
|
|
37
|
+
└─ tests/
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
---
|
|
41
|
+
|
|
42
|
+
## 💻 安装
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
# PyPI 安装
|
|
46
|
+
pip install intellif-aihub
|
|
47
|
+
# 运行环境:Python ≥ 3.8
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
---
|
|
51
|
+
|
|
52
|
+
## 🚀 快速上手
|
|
53
|
+
|
|
54
|
+
```python
|
|
55
|
+
from aihub import Client
|
|
56
|
+
|
|
57
|
+
BASE = "http://192.168.13.160:30021"
|
|
58
|
+
TOKEN = "eyJhb..." # 或设置环境变量:export AI_HUB_TOKEN=...
|
|
59
|
+
|
|
60
|
+
with Client(base_url=BASE, token=TOKEN) as cli:
|
|
61
|
+
# 1. 同时创建数据集 + 版本(上传本地 ZIP)
|
|
62
|
+
ds_id, ver_id, tag = cli.dataset_management.create_dataset_and_version(
|
|
63
|
+
dataset_name="cats",
|
|
64
|
+
is_local_upload=True,
|
|
65
|
+
local_file_path="/data/cats.zip",
|
|
66
|
+
version_description="first release",
|
|
67
|
+
)
|
|
68
|
+
print("数据集标识:", tag) # 输出:cats/V1
|
|
69
|
+
|
|
70
|
+
# 2. 下载数据集
|
|
71
|
+
cli.dataset_management.run_download(
|
|
72
|
+
dataset_version_name=tag,
|
|
73
|
+
local_dir="/tmp/cats",
|
|
74
|
+
worker=8,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# 3. 获取标注平台全局统计
|
|
78
|
+
stats = cli.labelfree.get_project_global_stats("cat-project")
|
|
79
|
+
print("总标注数:", stats.global_stats.total_annotations)
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
---
|
|
83
|
+
|
|
84
|
+
## 🌍 环境变量
|
|
85
|
+
|
|
86
|
+
| 变量 | 作用 | 默认值 |
|
|
87
|
+
|----------------------------|-------------------------------------------|----------------------------------|
|
|
88
|
+
| `AI_HUB_TOKEN` | API 鉴权 Token(可不在 `Client` 中显式传入) | – |
|
|
89
|
+
|
|
90
|
+
---
|
|
91
|
+
|
|
92
|
+
## 📦 打包 & 发布
|
|
93
|
+
|
|
94
|
+
项目采用 PEP 517 / `pyproject.toml` 构建规范。
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
# 1️⃣ 构建 wheel / sdist
|
|
98
|
+
python -m pip install --upgrade build
|
|
99
|
+
python -m build # 生成 dist/*.whl dist/*.tar.gz
|
|
100
|
+
|
|
101
|
+
# 2️⃣ 本地验证
|
|
102
|
+
pip install --force-reinstall dist/*.whl
|
|
103
|
+
python -c "import aihub, sys; print('SDK 版本:', aihub.__version__)"
|
|
104
|
+
|
|
105
|
+
# 3️⃣ 发布到 PyPI 或私有仓库
|
|
106
|
+
python -m pip install --upgrade twine
|
|
107
|
+
twine upload dist/*
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
---
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
aihub/__init__.py,sha256=YvuYzWnKtqBb-IqG8HAu-nhIYAsgj9Vmc_b9o7vO-js,22
|
|
2
|
+
aihub/client.py,sha256=DJEG7fGAiWFuh-2-KucZdMxy2v3caDvCn-_1CuxZ90o,2953
|
|
3
|
+
aihub/exceptions.py,sha256=l2cMAvipTqQOio3o11fXsCCSCevbuK4PTsxofkobFjk,500
|
|
4
|
+
aihub/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
aihub/models/artifact.py,sha256=I07sXB3iIRlVnHm1HkYCXKIIPt5RGncxXuxANat8Vzs,3530
|
|
6
|
+
aihub/models/common.py,sha256=qmabc2LkAdQJXIcpT1P35zxd0Lc8yDYdD4ame1iF4Bs,241
|
|
7
|
+
aihub/models/dataset_management.py,sha256=lP92aOsZJihg4SEhf1jeITcXp-N8l_YzHYf1l9Zq7-g,3381
|
|
8
|
+
aihub/models/document_center.py,sha256=xmAk_JIY3GjuVDZurMUonmSz3Siy3TAxhj3ewIJ6dUQ,489
|
|
9
|
+
aihub/models/labelfree.py,sha256=hH9jOJ6_I3fdn1jlAtEuMwP3Vv9EzYlda_GKb8IJN2I,1179
|
|
10
|
+
aihub/models/quota_schedule_management.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
aihub/models/tag_management.py,sha256=5_G9R6OZWYnm0ix_mLzMIujZhO_NNx6MeFA_Gl_zYBI,319
|
|
12
|
+
aihub/models/task_center.py,sha256=N4Tdg_G3TgVc1Qse8sKbYihnbGkIVwHfGAEq-4v-zX4,3870
|
|
13
|
+
aihub/models/user.py,sha256=Zv95o8ZAdAgmAsN5VsBhHRCCrw-RiAbHPg_Ljc-L3Mo,1004
|
|
14
|
+
aihub/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
aihub/services/artifact.py,sha256=FaimaoHdZBr5hg6_jwj9md8EmxdWNFxIAFz1ML_xKu8,10370
|
|
16
|
+
aihub/services/dataset_management.py,sha256=vNIc_JocgotFCFyFeCade0PWZspRjIkGhzt0qj1xQ0s,8672
|
|
17
|
+
aihub/services/document_center.py,sha256=DClItxWXMMFPnKMR7kyNGohhXP3wowImj-Lm8vzBgNo,1339
|
|
18
|
+
aihub/services/labelfree.py,sha256=Pc0kW-x8VLawqQWdY9ZDpZMPRB2KgXNk9-JE8Dz15Bs,1194
|
|
19
|
+
aihub/services/quota_schedule_management.py,sha256=THSAJEi7QzilV9uT9Fy14p2ToIfn8nXQNgtIeEispvg,395
|
|
20
|
+
aihub/services/reporter.py,sha256=ot93SmhxgwDJOzlHSCwlxDOuSydTWUEUQ-Ctp97wJBQ,669
|
|
21
|
+
aihub/services/tag_management.py,sha256=Hci74medV5-p5VAZOYDEGrjSO1ioqET5IAnx7nqOiBo,943
|
|
22
|
+
aihub/services/task_center.py,sha256=EA9j13IjQJYQMTNKDR-SbY-GUCfWoJmHc37ckVuPbUE,6110
|
|
23
|
+
aihub/services/user.py,sha256=5qpvIKQhD0UWPQalbieLjyQY1cKp8pMlx_LTUKyIAKU,1327
|
|
24
|
+
aihub/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
|
+
aihub/utils/download.py,sha256=Rh1m3VpMlw8-Kl36sowJ7M0dpB68u-9V4Vo3GQChq1I,1758
|
|
26
|
+
aihub/utils/http.py,sha256=rSNh4uNP7E3YGm3H1indRHctxC5Wu5xNBPvDrb9UHt4,421
|
|
27
|
+
aihub/utils/s3.py,sha256=ISIBP-XdBPkURpXnN56ZnIWokOOg2SRUh_qvxJk-G1Q,2187
|
|
28
|
+
intellif_aihub-0.1.2.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
29
|
+
intellif_aihub-0.1.2.dist-info/METADATA,sha256=hS0Y6s7g8IlicFlk7myET52gYJyFVRUGMo6FisCGqRY,2920
|
|
30
|
+
intellif_aihub-0.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
31
|
+
intellif_aihub-0.1.2.dist-info/top_level.txt,sha256=vIvTtSIN73xv46BpYM-ctVGnyOiUQ9EWP_6ngvdIlvw,6
|
|
32
|
+
intellif_aihub-0.1.2.dist-info/RECORD,,
|