veadk-python 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of veadk-python might be problematic. Click here for more details.
- veadk/agent.py +3 -0
- veadk/cli/cli_prompt.py +9 -2
- veadk/cli/cli_web.py +3 -0
- veadk/configs/database_configs.py +9 -0
- veadk/consts.py +7 -0
- veadk/evaluation/adk_evaluator/adk_evaluator.py +5 -2
- veadk/evaluation/base_evaluator.py +36 -25
- veadk/evaluation/deepeval_evaluator/deepeval_evaluator.py +5 -3
- veadk/integrations/ve_prompt_pilot/ve_prompt_pilot.py +9 -3
- veadk/integrations/ve_tls/utils.py +1 -2
- veadk/integrations/ve_tls/ve_tls.py +9 -5
- veadk/integrations/ve_tos/ve_tos.py +538 -67
- veadk/knowledgebase/backends/base_backend.py +4 -4
- veadk/knowledgebase/backends/vikingdb_knowledge_backend.py +162 -50
- veadk/knowledgebase/entry.py +25 -0
- veadk/knowledgebase/knowledgebase.py +19 -4
- veadk/memory/long_term_memory.py +20 -7
- veadk/memory/long_term_memory_backends/mem0_backend.py +129 -0
- veadk/runner.py +12 -19
- veadk/tools/builtin_tools/generate_image.py +355 -0
- veadk/tools/builtin_tools/image_edit.py +56 -16
- veadk/tools/builtin_tools/image_generate.py +51 -15
- veadk/tools/builtin_tools/video_generate.py +41 -41
- veadk/tools/load_knowledgebase_tool.py +2 -8
- veadk/tracing/telemetry/attributes/extractors/llm_attributes_extractors.py +1 -1
- veadk/tracing/telemetry/opentelemetry_tracer.py +8 -2
- veadk/version.py +1 -1
- {veadk_python-0.2.8.dist-info → veadk_python-0.2.9.dist-info}/METADATA +13 -2
- {veadk_python-0.2.8.dist-info → veadk_python-0.2.9.dist-info}/RECORD +33 -30
- {veadk_python-0.2.8.dist-info → veadk_python-0.2.9.dist-info}/WHEEL +0 -0
- {veadk_python-0.2.8.dist-info → veadk_python-0.2.9.dist-info}/entry_points.txt +0 -0
- {veadk_python-0.2.8.dist-info → veadk_python-0.2.9.dist-info}/licenses/LICENSE +0 -0
- {veadk_python-0.2.8.dist-info → veadk_python-0.2.9.dist-info}/top_level.txt +0 -0
|
@@ -15,7 +15,8 @@
|
|
|
15
15
|
import asyncio
|
|
16
16
|
import os
|
|
17
17
|
from datetime import datetime
|
|
18
|
-
from
|
|
18
|
+
from io import StringIO
|
|
19
|
+
from typing import TYPE_CHECKING, List, Optional, Union
|
|
19
20
|
from urllib.parse import urlparse
|
|
20
21
|
|
|
21
22
|
from veadk.consts import DEFAULT_TOS_BUCKET_NAME
|
|
@@ -40,6 +41,13 @@ class VeTOS:
|
|
|
40
41
|
) -> None:
|
|
41
42
|
self.ak = ak if ak else os.getenv("VOLCENGINE_ACCESS_KEY", "")
|
|
42
43
|
self.sk = sk if sk else os.getenv("VOLCENGINE_SECRET_KEY", "")
|
|
44
|
+
# Add empty value validation
|
|
45
|
+
if not self.ak or not self.sk:
|
|
46
|
+
raise ValueError(
|
|
47
|
+
"VOLCENGINE_ACCESS_KEY and VOLCENGINE_SECRET_KEY must be provided "
|
|
48
|
+
"either via parameters or environment variables."
|
|
49
|
+
)
|
|
50
|
+
|
|
43
51
|
self.region = region
|
|
44
52
|
self.bucket_name = (
|
|
45
53
|
bucket_name if bucket_name else getenv("", DEFAULT_TOS_BUCKET_NAME)
|
|
@@ -68,7 +76,7 @@ class VeTOS:
|
|
|
68
76
|
)
|
|
69
77
|
logger.info("Init TOS client.")
|
|
70
78
|
except Exception as e:
|
|
71
|
-
logger.error(f"Client initialization failed:{e}")
|
|
79
|
+
logger.error(f"Client initialization failed: {e}")
|
|
72
80
|
|
|
73
81
|
def _refresh_client(self):
|
|
74
82
|
try:
|
|
@@ -85,38 +93,74 @@ class VeTOS:
|
|
|
85
93
|
logger.error(f"Failed to refresh client: {str(e)}")
|
|
86
94
|
self._client = None
|
|
87
95
|
|
|
88
|
-
def
|
|
89
|
-
|
|
96
|
+
def _check_bucket_name(self, bucket_name: str = "") -> str:
|
|
97
|
+
return bucket_name or self.bucket_name
|
|
98
|
+
|
|
99
|
+
def bucket_exists(self, bucket_name: str) -> bool:
|
|
100
|
+
"""Check if bucket exists
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
bucket_name: Bucket name
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
bool: True if bucket exists, False otherwise
|
|
107
|
+
"""
|
|
108
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
90
109
|
if not self._client:
|
|
91
110
|
logger.error("TOS client is not initialized")
|
|
92
111
|
return False
|
|
112
|
+
|
|
93
113
|
try:
|
|
94
|
-
self._client.head_bucket(
|
|
95
|
-
logger.
|
|
96
|
-
|
|
97
|
-
if e.status_code == 404:
|
|
98
|
-
try:
|
|
99
|
-
self._client.create_bucket(
|
|
100
|
-
bucket=self.bucket_name,
|
|
101
|
-
storage_class=self._tos_module.StorageClassType.Storage_Class_Standard,
|
|
102
|
-
acl=self._tos_module.ACLType.ACL_Public_Read,
|
|
103
|
-
)
|
|
104
|
-
logger.info(f"Bucket {self.bucket_name} created successfully")
|
|
105
|
-
self._refresh_client()
|
|
106
|
-
except Exception as create_error:
|
|
107
|
-
logger.error(f"Bucket creation failed: {str(create_error)}")
|
|
108
|
-
return False
|
|
109
|
-
else:
|
|
110
|
-
logger.error(f"Bucket check failed: {str(e)}")
|
|
111
|
-
return False
|
|
114
|
+
self._client.head_bucket(bucket_name)
|
|
115
|
+
logger.debug(f"Bucket {bucket_name} exists")
|
|
116
|
+
return True
|
|
112
117
|
except Exception as e:
|
|
113
|
-
logger.error(
|
|
118
|
+
logger.error(
|
|
119
|
+
f"Unexpected error when checking bucket {bucket_name}: {str(e)}"
|
|
120
|
+
)
|
|
121
|
+
return False
|
|
122
|
+
|
|
123
|
+
def create_bucket(self, bucket_name: str = "") -> bool:
|
|
124
|
+
"""Create bucket (if not exists)
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
bucket_name: Bucket name
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
bool: True if bucket exists or created successfully, False otherwise
|
|
131
|
+
"""
|
|
132
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
133
|
+
if not self._client:
|
|
134
|
+
logger.error("TOS client is not initialized")
|
|
135
|
+
return False
|
|
136
|
+
|
|
137
|
+
# Check if bucket already exists
|
|
138
|
+
if self.bucket_exists(bucket_name):
|
|
139
|
+
logger.info(f"Bucket {bucket_name} already exists, no need to create")
|
|
140
|
+
return True
|
|
141
|
+
|
|
142
|
+
# Try to create bucket
|
|
143
|
+
try:
|
|
144
|
+
logger.info(f"Attempting to create bucket: {bucket_name}")
|
|
145
|
+
self._client.create_bucket(
|
|
146
|
+
bucket=bucket_name,
|
|
147
|
+
storage_class=self._tos_module.StorageClassType.Storage_Class_Standard,
|
|
148
|
+
acl=self._tos_module.ACLType.ACL_Public_Read,
|
|
149
|
+
)
|
|
150
|
+
logger.info(f"Bucket {bucket_name} created successfully")
|
|
151
|
+
self._refresh_client()
|
|
152
|
+
except self._tos_module.exceptions.TosServerError as e:
|
|
153
|
+
logger.error(
|
|
154
|
+
f"Failed to create bucket {bucket_name}: status_code={e.status_code}, {str(e)}"
|
|
155
|
+
)
|
|
114
156
|
return False
|
|
115
157
|
|
|
116
|
-
#
|
|
117
|
-
return self._set_cors_rules()
|
|
158
|
+
# Set CORS rules
|
|
159
|
+
return self._set_cors_rules(bucket_name)
|
|
160
|
+
|
|
161
|
+
def _set_cors_rules(self, bucket_name: str) -> bool:
|
|
162
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
118
163
|
|
|
119
|
-
def _set_cors_rules(self) -> bool:
|
|
120
164
|
if not self._client:
|
|
121
165
|
logger.error("TOS client is not initialized")
|
|
122
166
|
return False
|
|
@@ -127,91 +171,519 @@ class VeTOS:
|
|
|
127
171
|
allowed_headers=["*"],
|
|
128
172
|
max_age_seconds=1000,
|
|
129
173
|
)
|
|
130
|
-
self._client.put_bucket_cors(
|
|
131
|
-
logger.info(f"CORS rules for bucket {
|
|
174
|
+
self._client.put_bucket_cors(bucket_name, [rule])
|
|
175
|
+
logger.info(f"CORS rules for bucket {bucket_name} set successfully")
|
|
132
176
|
return True
|
|
133
177
|
except Exception as e:
|
|
134
|
-
logger.error(
|
|
135
|
-
f"Failed to set CORS rules for bucket {self.bucket_name}: {str(e)}"
|
|
136
|
-
)
|
|
178
|
+
logger.error(f"Failed to set CORS rules for bucket {bucket_name}: {str(e)}")
|
|
137
179
|
return False
|
|
138
180
|
|
|
139
|
-
def
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
181
|
+
def _build_object_key_for_file(self, data_path: str) -> str:
|
|
182
|
+
"""Builds the TOS object key and URL for the given parameters.
|
|
183
|
+
|
|
184
|
+
Args:
|
|
185
|
+
user_id (str): User ID
|
|
186
|
+
app_name (str): App name
|
|
187
|
+
session_id (str): Session ID
|
|
188
|
+
data_path (str): Data path
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
tuple[str, str]: Object key and TOS URL.
|
|
192
|
+
"""
|
|
193
|
+
|
|
143
194
|
parsed_url = urlparse(data_path)
|
|
144
195
|
|
|
145
|
-
|
|
146
|
-
|
|
196
|
+
# Generate object key
|
|
197
|
+
if parsed_url.scheme in ("http", "https", "ftp", "ftps"):
|
|
198
|
+
# For URL, remove protocol part, keep domain and path
|
|
199
|
+
object_key = f"{parsed_url.netloc}{parsed_url.path}"
|
|
147
200
|
else:
|
|
148
|
-
|
|
201
|
+
# For local files, use path relative to current working directory
|
|
202
|
+
abs_path = os.path.abspath(data_path)
|
|
203
|
+
cwd = os.getcwd()
|
|
204
|
+
# If file is in current working directory or its subdirectories, use relative path
|
|
205
|
+
try:
|
|
206
|
+
rel_path = os.path.relpath(abs_path, cwd)
|
|
207
|
+
# Check if path contains relative path symbols (../, ./ etc.)
|
|
208
|
+
if (
|
|
209
|
+
not rel_path.startswith("../")
|
|
210
|
+
and not rel_path.startswith("..\\")
|
|
211
|
+
and not rel_path.startswith("./")
|
|
212
|
+
and not rel_path.startswith(".\\")
|
|
213
|
+
):
|
|
214
|
+
object_key = rel_path
|
|
215
|
+
else:
|
|
216
|
+
# If path contains relative path symbols, use only filename
|
|
217
|
+
object_key = os.path.basename(data_path)
|
|
218
|
+
except ValueError:
|
|
219
|
+
# If unable to calculate relative path (cross-volume), use filename
|
|
220
|
+
object_key = os.path.basename(data_path)
|
|
221
|
+
|
|
222
|
+
# Remove leading slash to avoid signature errors
|
|
223
|
+
if object_key.startswith("/"):
|
|
224
|
+
object_key = object_key[1:]
|
|
225
|
+
|
|
226
|
+
# If object key is empty or contains unsafe path symbols, use filename
|
|
227
|
+
if (
|
|
228
|
+
not object_key
|
|
229
|
+
or "../" in object_key
|
|
230
|
+
or "..\\" in object_key
|
|
231
|
+
or "./" in object_key
|
|
232
|
+
or ".\\" in object_key
|
|
233
|
+
):
|
|
234
|
+
object_key = os.path.basename(data_path)
|
|
235
|
+
|
|
236
|
+
return object_key
|
|
237
|
+
|
|
238
|
+
def _build_object_key_for_text(self) -> str:
|
|
239
|
+
"""generate TOS object key"""
|
|
149
240
|
|
|
150
|
-
|
|
151
|
-
|
|
241
|
+
object_key: str = f"{datetime.now().strftime('%Y%m%d%H%M%S')}.txt"
|
|
242
|
+
|
|
243
|
+
return object_key
|
|
244
|
+
|
|
245
|
+
def _build_object_key_for_bytes(self) -> str:
|
|
246
|
+
object_key: str = f"{datetime.now().strftime('%Y%m%d%H%M%S')}"
|
|
247
|
+
|
|
248
|
+
return object_key
|
|
249
|
+
|
|
250
|
+
def build_tos_url(self, object_key: str, bucket_name: str = "") -> str:
|
|
251
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
152
252
|
tos_url: str = (
|
|
153
|
-
f"https://{
|
|
253
|
+
f"https://{bucket_name}.tos-{self.region}.volces.com/{object_key}"
|
|
154
254
|
)
|
|
255
|
+
return tos_url
|
|
256
|
+
|
|
257
|
+
def build_tos_signed_url(self, object_key: str, bucket_name: str = "") -> str:
|
|
258
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
155
259
|
|
|
156
|
-
|
|
260
|
+
out = self._client.pre_signed_url(
|
|
261
|
+
self._tos_module.HttpMethodType.Http_Method_Get,
|
|
262
|
+
bucket=bucket_name,
|
|
263
|
+
key=object_key,
|
|
264
|
+
expires=604800,
|
|
265
|
+
)
|
|
266
|
+
tos_url = out.signed_url
|
|
267
|
+
return tos_url
|
|
157
268
|
|
|
269
|
+
# deprecated
|
|
158
270
|
def upload(
|
|
159
271
|
self,
|
|
160
|
-
object_key: str,
|
|
161
272
|
data: Union[str, bytes],
|
|
273
|
+
bucket_name: str = "",
|
|
274
|
+
object_key: str = "",
|
|
275
|
+
metadata: dict | None = None,
|
|
162
276
|
):
|
|
277
|
+
"""Uploads data to TOS.
|
|
278
|
+
|
|
279
|
+
Args:
|
|
280
|
+
data (Union[str, bytes]): The data to upload, either as a file path or raw bytes.
|
|
281
|
+
bucket_name (str): The name of the TOS bucket to upload to.
|
|
282
|
+
object_key (str): The object key for the uploaded data.
|
|
283
|
+
metadata (dict | None, optional): Metadata to associate with the object. Defaults to None.
|
|
284
|
+
|
|
285
|
+
Raises:
|
|
286
|
+
ValueError: If the data type is unsupported.
|
|
287
|
+
"""
|
|
163
288
|
if isinstance(data, str):
|
|
164
289
|
# data is a file path
|
|
165
|
-
return asyncio.to_thread(
|
|
290
|
+
return asyncio.to_thread(
|
|
291
|
+
self.upload_file, data, bucket_name, object_key, metadata
|
|
292
|
+
)
|
|
166
293
|
elif isinstance(data, bytes):
|
|
167
294
|
# data is bytes content
|
|
168
|
-
return asyncio.to_thread(
|
|
295
|
+
return asyncio.to_thread(
|
|
296
|
+
self.upload_bytes, data, bucket_name, object_key, metadata
|
|
297
|
+
)
|
|
169
298
|
else:
|
|
170
299
|
error_msg = f"Upload failed: data type error. Only str (file path) and bytes are supported, got {type(data)}"
|
|
171
300
|
logger.error(error_msg)
|
|
172
301
|
raise ValueError(error_msg)
|
|
173
302
|
|
|
174
|
-
def
|
|
303
|
+
def _ensure_client_and_bucket(self, bucket_name: str) -> bool:
|
|
304
|
+
"""Ensure TOS client is initialized and bucket exists
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
bucket_name: Bucket name
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
bool: True if client is initialized and bucket exists, False otherwise
|
|
311
|
+
"""
|
|
312
|
+
if not self._client:
|
|
313
|
+
logger.error("TOS client is not initialized")
|
|
314
|
+
return False
|
|
315
|
+
if not self.create_bucket(bucket_name):
|
|
316
|
+
logger.error(f"Failed to create or access bucket: {bucket_name}")
|
|
317
|
+
return False
|
|
318
|
+
return True
|
|
319
|
+
|
|
320
|
+
def upload_text(
|
|
321
|
+
self,
|
|
322
|
+
text: str,
|
|
323
|
+
bucket_name: str = "",
|
|
324
|
+
object_key: str = "",
|
|
325
|
+
metadata: dict | None = None,
|
|
326
|
+
) -> None:
|
|
327
|
+
"""Upload text content to TOS bucket
|
|
328
|
+
|
|
329
|
+
Args:
|
|
330
|
+
text: Text content to upload
|
|
331
|
+
bucket_name: TOS bucket name
|
|
332
|
+
object_key: Object key, auto-generated if None
|
|
333
|
+
metadata: Metadata to associate with the object
|
|
334
|
+
"""
|
|
335
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
336
|
+
if not object_key:
|
|
337
|
+
object_key = self._build_object_key_for_text()
|
|
338
|
+
|
|
339
|
+
if not self._ensure_client_and_bucket(bucket_name):
|
|
340
|
+
return
|
|
341
|
+
data = StringIO(text)
|
|
175
342
|
try:
|
|
176
|
-
if not self._client:
|
|
177
|
-
return
|
|
178
|
-
if not self.create_bucket():
|
|
179
|
-
return
|
|
180
343
|
self._client.put_object(
|
|
181
|
-
bucket=
|
|
344
|
+
bucket=bucket_name, key=object_key, content=data, meta=metadata
|
|
182
345
|
)
|
|
183
|
-
logger.debug(f"Upload success,
|
|
184
|
-
self._close()
|
|
346
|
+
logger.debug(f"Upload success, object_key: {object_key}")
|
|
185
347
|
return
|
|
186
348
|
except Exception as e:
|
|
187
349
|
logger.error(f"Upload failed: {e}")
|
|
188
|
-
self._close()
|
|
189
350
|
return
|
|
351
|
+
finally:
|
|
352
|
+
data.close()
|
|
353
|
+
|
|
354
|
+
async def async_upload_text(
|
|
355
|
+
self,
|
|
356
|
+
text: str,
|
|
357
|
+
bucket_name: str = "",
|
|
358
|
+
object_key: str = "",
|
|
359
|
+
metadata: dict | None = None,
|
|
360
|
+
) -> None:
|
|
361
|
+
"""Asynchronously upload text content to TOS bucket
|
|
362
|
+
|
|
363
|
+
Args:
|
|
364
|
+
text: Text content to upload
|
|
365
|
+
bucket_name: TOS bucket name
|
|
366
|
+
object_key: Object key, auto-generated if None
|
|
367
|
+
metadata: Metadata to associate with the object
|
|
368
|
+
"""
|
|
369
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
370
|
+
if not object_key:
|
|
371
|
+
object_key = self._build_object_key_for_text()
|
|
372
|
+
# Use common function to check client and bucket
|
|
373
|
+
if not self._ensure_client_and_bucket(bucket_name):
|
|
374
|
+
return
|
|
375
|
+
data = StringIO(text)
|
|
376
|
+
try:
|
|
377
|
+
# Use asyncio.to_thread to execute blocking TOS operations in thread
|
|
378
|
+
await asyncio.to_thread(
|
|
379
|
+
self._client.put_object,
|
|
380
|
+
bucket=bucket_name,
|
|
381
|
+
key=object_key,
|
|
382
|
+
content=data,
|
|
383
|
+
meta=metadata,
|
|
384
|
+
)
|
|
385
|
+
logger.debug(f"Async upload success, object_key: {object_key}")
|
|
386
|
+
return
|
|
387
|
+
except Exception as e:
|
|
388
|
+
logger.error(f"Async upload failed: {e}")
|
|
389
|
+
return
|
|
390
|
+
finally:
|
|
391
|
+
data.close()
|
|
190
392
|
|
|
191
|
-
def
|
|
393
|
+
def upload_bytes(
|
|
394
|
+
self,
|
|
395
|
+
data: bytes,
|
|
396
|
+
bucket_name: str = "",
|
|
397
|
+
object_key: str = "",
|
|
398
|
+
metadata: dict | None = None,
|
|
399
|
+
) -> None:
|
|
400
|
+
"""Upload byte data to TOS bucket
|
|
401
|
+
|
|
402
|
+
Args:
|
|
403
|
+
data: Byte data to upload
|
|
404
|
+
bucket_name: TOS bucket name
|
|
405
|
+
object_key: Object key, auto-generated if None
|
|
406
|
+
metadata: Metadata to associate with the object
|
|
407
|
+
"""
|
|
408
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
409
|
+
if not object_key:
|
|
410
|
+
object_key = self._build_object_key_for_bytes()
|
|
411
|
+
# Use common function to check client and bucket
|
|
412
|
+
if not self._ensure_client_and_bucket(bucket_name):
|
|
413
|
+
return
|
|
414
|
+
try:
|
|
415
|
+
self._client.put_object(
|
|
416
|
+
bucket=bucket_name, key=object_key, content=data, meta=metadata
|
|
417
|
+
)
|
|
418
|
+
logger.debug(f"Upload success, object_key: {object_key}")
|
|
419
|
+
return
|
|
420
|
+
except Exception as e:
|
|
421
|
+
logger.error(f"Upload failed: {e}")
|
|
422
|
+
return
|
|
423
|
+
|
|
424
|
+
async def async_upload_bytes(
|
|
425
|
+
self,
|
|
426
|
+
data: bytes,
|
|
427
|
+
bucket_name: str = "",
|
|
428
|
+
object_key: str = "",
|
|
429
|
+
metadata: dict | None = None,
|
|
430
|
+
) -> None:
|
|
431
|
+
"""Asynchronously upload byte data to TOS bucket
|
|
432
|
+
|
|
433
|
+
Args:
|
|
434
|
+
data: Byte data to upload
|
|
435
|
+
bucket_name: TOS bucket name
|
|
436
|
+
object_key: Object key, auto-generated if None
|
|
437
|
+
metadata: Metadata to associate with the object
|
|
438
|
+
"""
|
|
439
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
440
|
+
if not object_key:
|
|
441
|
+
object_key = self._build_object_key_for_bytes()
|
|
442
|
+
# Use common function to check client and bucket
|
|
443
|
+
if not self._ensure_client_and_bucket(bucket_name):
|
|
444
|
+
return
|
|
445
|
+
try:
|
|
446
|
+
# Use asyncio.to_thread to execute blocking TOS operations in thread
|
|
447
|
+
await asyncio.to_thread(
|
|
448
|
+
self._client.put_object,
|
|
449
|
+
bucket=bucket_name,
|
|
450
|
+
key=object_key,
|
|
451
|
+
content=data,
|
|
452
|
+
meta=metadata,
|
|
453
|
+
)
|
|
454
|
+
logger.debug(f"Async upload success, object_key: {object_key}")
|
|
455
|
+
return
|
|
456
|
+
except Exception as e:
|
|
457
|
+
logger.error(f"Async upload failed: {e}")
|
|
458
|
+
return
|
|
459
|
+
|
|
460
|
+
def upload_file(
|
|
461
|
+
self,
|
|
462
|
+
file_path: str,
|
|
463
|
+
bucket_name: str = "",
|
|
464
|
+
object_key: str = "",
|
|
465
|
+
metadata: dict | None = None,
|
|
466
|
+
) -> None:
|
|
467
|
+
"""Upload file to TOS bucket
|
|
468
|
+
|
|
469
|
+
Args:
|
|
470
|
+
file_path: Local file path
|
|
471
|
+
bucket_name: TOS bucket name
|
|
472
|
+
object_key: Object key, auto-generated if None
|
|
473
|
+
metadata: Metadata to associate with the object
|
|
474
|
+
"""
|
|
475
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
476
|
+
if not object_key:
|
|
477
|
+
object_key = self._build_object_key_for_file(file_path)
|
|
478
|
+
# Use common function to check client and bucket
|
|
479
|
+
if not self._ensure_client_and_bucket(bucket_name):
|
|
480
|
+
return
|
|
192
481
|
try:
|
|
193
|
-
if not self._client:
|
|
194
|
-
return
|
|
195
|
-
if not self.create_bucket():
|
|
196
|
-
return
|
|
197
482
|
self._client.put_object_from_file(
|
|
198
|
-
bucket=
|
|
483
|
+
bucket=bucket_name, key=object_key, file_path=file_path, meta=metadata
|
|
199
484
|
)
|
|
200
|
-
self._close()
|
|
201
485
|
logger.debug(f"Upload success, object_key: {object_key}")
|
|
202
486
|
return
|
|
203
487
|
except Exception as e:
|
|
204
488
|
logger.error(f"Upload failed: {e}")
|
|
205
|
-
self._close()
|
|
206
489
|
return
|
|
207
490
|
|
|
208
|
-
def
|
|
209
|
-
|
|
491
|
+
async def async_upload_file(
|
|
492
|
+
self,
|
|
493
|
+
file_path: str,
|
|
494
|
+
bucket_name: str = "",
|
|
495
|
+
object_key: str = "",
|
|
496
|
+
metadata: dict | None = None,
|
|
497
|
+
) -> None:
|
|
498
|
+
"""Asynchronously upload file to TOS bucket
|
|
499
|
+
|
|
500
|
+
Args:
|
|
501
|
+
file_path: Local file path
|
|
502
|
+
bucket_name: TOS bucket name
|
|
503
|
+
object_key: Object key, auto-generated if None
|
|
504
|
+
metadata: Metadata to associate with the object
|
|
505
|
+
"""
|
|
506
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
507
|
+
if not object_key:
|
|
508
|
+
object_key = self._build_object_key_for_file(file_path)
|
|
509
|
+
# Use common function to check client and bucket
|
|
510
|
+
if not self._ensure_client_and_bucket(bucket_name):
|
|
511
|
+
return
|
|
512
|
+
try:
|
|
513
|
+
# Use asyncio.to_thread to execute blocking TOS operations in thread
|
|
514
|
+
await asyncio.to_thread(
|
|
515
|
+
self._client.put_object_from_file,
|
|
516
|
+
bucket=bucket_name,
|
|
517
|
+
key=object_key,
|
|
518
|
+
file_path=file_path,
|
|
519
|
+
meta=metadata,
|
|
520
|
+
)
|
|
521
|
+
logger.debug(f"Async upload success, object_key: {object_key}")
|
|
522
|
+
return
|
|
523
|
+
except Exception as e:
|
|
524
|
+
logger.error(f"Async upload failed: {e}")
|
|
525
|
+
return
|
|
526
|
+
|
|
527
|
+
def upload_files(
|
|
528
|
+
self,
|
|
529
|
+
file_paths: List[str],
|
|
530
|
+
bucket_name: str = "",
|
|
531
|
+
object_keys: Optional[List[str]] = None,
|
|
532
|
+
metadata: dict | None = None,
|
|
533
|
+
) -> None:
|
|
534
|
+
"""Upload multiple files to TOS bucket
|
|
535
|
+
|
|
536
|
+
Args:
|
|
537
|
+
file_paths: List of local file paths
|
|
538
|
+
bucket_name: TOS bucket name
|
|
539
|
+
object_keys: List of object keys, auto-generated if empty or length mismatch
|
|
540
|
+
metadata: Metadata to associate with the object
|
|
541
|
+
"""
|
|
542
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
543
|
+
|
|
544
|
+
# If object_keys is None, create empty list
|
|
545
|
+
if object_keys is None:
|
|
546
|
+
object_keys = []
|
|
547
|
+
|
|
548
|
+
# If object_keys length doesn't match file_paths, generate object key for each file
|
|
549
|
+
if len(object_keys) != len(file_paths):
|
|
550
|
+
object_keys = []
|
|
551
|
+
for file_path in file_paths:
|
|
552
|
+
object_key = self._build_object_key_for_file(file_path)
|
|
553
|
+
object_keys.append(object_key)
|
|
554
|
+
logger.debug(f"Generated object keys: {object_keys}")
|
|
555
|
+
|
|
556
|
+
# Upload each file
|
|
557
|
+
try:
|
|
558
|
+
for file_path, object_key in zip(file_paths, object_keys):
|
|
559
|
+
# Note: upload_file method doesn't return value, we use exceptions to determine success
|
|
560
|
+
self.upload_file(file_path, bucket_name, object_key, metadata=metadata)
|
|
561
|
+
return
|
|
562
|
+
except Exception as e:
|
|
563
|
+
logger.error(f"Upload files failed: {str(e)}")
|
|
564
|
+
return
|
|
565
|
+
|
|
566
|
+
async def async_upload_files(
|
|
567
|
+
self,
|
|
568
|
+
file_paths: List[str],
|
|
569
|
+
bucket_name: str = "",
|
|
570
|
+
object_keys: Optional[List[str]] = None,
|
|
571
|
+
metadata: dict | None = None,
|
|
572
|
+
) -> None:
|
|
573
|
+
"""Asynchronously upload multiple files to TOS bucket
|
|
574
|
+
|
|
575
|
+
Args:
|
|
576
|
+
file_paths: List of local file paths
|
|
577
|
+
bucket_name: TOS bucket name
|
|
578
|
+
object_keys: List of object keys, auto-generated if empty or length mismatch
|
|
579
|
+
metadata: Metadata to associate with the object
|
|
580
|
+
"""
|
|
581
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
582
|
+
|
|
583
|
+
# If object_keys is None, create empty list
|
|
584
|
+
if object_keys is None:
|
|
585
|
+
object_keys = []
|
|
586
|
+
|
|
587
|
+
# If object_keys length doesn't match file_paths, generate object key for each file
|
|
588
|
+
if len(object_keys) != len(file_paths):
|
|
589
|
+
object_keys = []
|
|
590
|
+
for file_path in file_paths:
|
|
591
|
+
object_key = self._build_object_key_for_file(file_path)
|
|
592
|
+
object_keys.append(object_key)
|
|
593
|
+
logger.debug(f"Generated object keys: {object_keys}")
|
|
594
|
+
|
|
595
|
+
# Upload each file
|
|
596
|
+
try:
|
|
597
|
+
for file_path, object_key in zip(file_paths, object_keys):
|
|
598
|
+
# Use asyncio.to_thread to execute blocking TOS operations in thread
|
|
599
|
+
await asyncio.to_thread(
|
|
600
|
+
self._client.put_object_from_file,
|
|
601
|
+
bucket=bucket_name,
|
|
602
|
+
key=object_key,
|
|
603
|
+
file_path=file_path,
|
|
604
|
+
metadata=metadata,
|
|
605
|
+
)
|
|
606
|
+
logger.debug(f"Async upload success, object_key: {object_key}")
|
|
607
|
+
return
|
|
608
|
+
except Exception as e:
|
|
609
|
+
logger.error(f"Async upload files failed: {str(e)}")
|
|
610
|
+
return
|
|
611
|
+
|
|
612
|
+
def upload_directory(
|
|
613
|
+
self, directory_path: str, bucket_name: str = "", metadata: dict | None = None
|
|
614
|
+
) -> None:
|
|
615
|
+
"""Upload entire directory to TOS bucket
|
|
616
|
+
|
|
617
|
+
Args:
|
|
618
|
+
directory_path: Local directory path
|
|
619
|
+
bucket_name: TOS bucket name
|
|
620
|
+
metadata: Metadata to associate with the objects
|
|
621
|
+
"""
|
|
622
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
623
|
+
|
|
624
|
+
def _upload_dir(root_dir):
|
|
625
|
+
items = os.listdir(root_dir)
|
|
626
|
+
for item in items:
|
|
627
|
+
path = os.path.join(root_dir, item)
|
|
628
|
+
if os.path.isdir(path):
|
|
629
|
+
_upload_dir(path)
|
|
630
|
+
if os.path.isfile(path):
|
|
631
|
+
# Use relative path of file as object key
|
|
632
|
+
object_key = os.path.relpath(path, directory_path)
|
|
633
|
+
# upload_file method doesn't return value, use exceptions to determine success
|
|
634
|
+
self.upload_file(path, bucket_name, object_key, metadata=metadata)
|
|
635
|
+
|
|
636
|
+
try:
|
|
637
|
+
_upload_dir(directory_path)
|
|
638
|
+
logger.debug(f"Upload directory success: {directory_path}")
|
|
639
|
+
return
|
|
640
|
+
except Exception as e:
|
|
641
|
+
logger.error(f"Upload directory failed: {str(e)}")
|
|
642
|
+
raise
|
|
643
|
+
|
|
644
|
+
async def async_upload_directory(
|
|
645
|
+
self, directory_path: str, bucket_name: str = "", metadata: dict | None = None
|
|
646
|
+
) -> None:
|
|
647
|
+
"""Asynchronously upload entire directory to TOS bucket
|
|
648
|
+
|
|
649
|
+
Args:
|
|
650
|
+
directory_path: Local directory path
|
|
651
|
+
bucket_name: TOS bucket name
|
|
652
|
+
metadata: Metadata to associate with the objects
|
|
653
|
+
"""
|
|
654
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
655
|
+
|
|
656
|
+
async def _aupload_dir(root_dir):
|
|
657
|
+
items = os.listdir(root_dir)
|
|
658
|
+
for item in items:
|
|
659
|
+
path = os.path.join(root_dir, item)
|
|
660
|
+
if os.path.isdir(path):
|
|
661
|
+
await _aupload_dir(path)
|
|
662
|
+
if os.path.isfile(path):
|
|
663
|
+
# Use relative path of file as object key
|
|
664
|
+
object_key = os.path.relpath(path, directory_path)
|
|
665
|
+
# Asynchronously upload single file
|
|
666
|
+
await self.async_upload_file(
|
|
667
|
+
path, bucket_name, object_key, metadata=metadata
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
try:
|
|
671
|
+
await _aupload_dir(directory_path)
|
|
672
|
+
logger.debug(f"Async upload directory success: {directory_path}")
|
|
673
|
+
return
|
|
674
|
+
except Exception as e:
|
|
675
|
+
logger.error(f"Async upload directory failed: {str(e)}")
|
|
676
|
+
raise
|
|
677
|
+
|
|
678
|
+
def download(self, bucket_name: str, object_key: str, save_path: str) -> bool:
|
|
679
|
+
"""download object from TOS"""
|
|
680
|
+
bucket_name = self._check_bucket_name(bucket_name)
|
|
681
|
+
|
|
210
682
|
if not self._client:
|
|
211
683
|
logger.error("TOS client is not initialized")
|
|
212
684
|
return False
|
|
213
685
|
try:
|
|
214
|
-
object_stream = self._client.get_object(
|
|
686
|
+
object_stream = self._client.get_object(bucket_name, object_key)
|
|
215
687
|
|
|
216
688
|
save_dir = os.path.dirname(save_path)
|
|
217
689
|
if save_dir and not os.path.exists(save_dir):
|
|
@@ -226,9 +698,8 @@ class VeTOS:
|
|
|
226
698
|
|
|
227
699
|
except Exception as e:
|
|
228
700
|
logger.error(f"Image download failed: {str(e)}")
|
|
229
|
-
|
|
230
701
|
return False
|
|
231
702
|
|
|
232
|
-
def
|
|
703
|
+
def close(self):
|
|
233
704
|
if self._client:
|
|
234
705
|
self._client.close()
|