mixpeek 0.11.0__py3-none-any.whl → 0.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mixpeek/__init__.py +1 -93
- mixpeek/client.py +27 -0
- mixpeek/endpoints/collections.py +86 -0
- mixpeek/endpoints/embed.py +66 -0
- mixpeek/endpoints/index.py +51 -0
- mixpeek/endpoints/register.py +34 -0
- mixpeek/endpoints/search.py +67 -0
- mixpeek/endpoints/tasks.py +26 -0
- mixpeek/endpoints/tools.py +138 -0
- mixpeek/exceptions.py +13 -0
- mixpeek-0.11.1.dist-info/METADATA +375 -0
- mixpeek-0.11.1.dist-info/RECORD +15 -0
- {mixpeek-0.11.0.dist-info → mixpeek-0.11.1.dist-info}/WHEEL +2 -1
- mixpeek-0.11.1.dist-info/top_level.txt +1 -0
- mixpeek/_base_client.py +0 -2041
- mixpeek/_client.py +0 -444
- mixpeek/_compat.py +0 -219
- mixpeek/_constants.py +0 -14
- mixpeek/_exceptions.py +0 -108
- mixpeek/_files.py +0 -123
- mixpeek/_models.py +0 -785
- mixpeek/_qs.py +0 -150
- mixpeek/_resource.py +0 -43
- mixpeek/_response.py +0 -824
- mixpeek/_streaming.py +0 -333
- mixpeek/_types.py +0 -217
- mixpeek/_utils/__init__.py +0 -55
- mixpeek/_utils/_logs.py +0 -25
- mixpeek/_utils/_proxy.py +0 -62
- mixpeek/_utils/_reflection.py +0 -42
- mixpeek/_utils/_streams.py +0 -12
- mixpeek/_utils/_sync.py +0 -81
- mixpeek/_utils/_transform.py +0 -382
- mixpeek/_utils/_typing.py +0 -120
- mixpeek/_utils/_utils.py +0 -397
- mixpeek/_version.py +0 -4
- mixpeek/lib/.keep +0 -4
- mixpeek/resources/__init__.py +0 -159
- mixpeek/resources/accounts/__init__.py +0 -33
- mixpeek/resources/accounts/accounts.py +0 -102
- mixpeek/resources/accounts/private.py +0 -232
- mixpeek/resources/agent/__init__.py +0 -33
- mixpeek/resources/agent/agent.py +0 -225
- mixpeek/resources/agent/task.py +0 -189
- mixpeek/resources/collections/__init__.py +0 -33
- mixpeek/resources/collections/collections.py +0 -459
- mixpeek/resources/collections/files.py +0 -679
- mixpeek/resources/describe.py +0 -338
- mixpeek/resources/embed.py +0 -234
- mixpeek/resources/indexes.py +0 -506
- mixpeek/resources/read.py +0 -183
- mixpeek/resources/recognize.py +0 -183
- mixpeek/resources/search.py +0 -542
- mixpeek/resources/tasks.py +0 -297
- mixpeek/resources/transcribe.py +0 -192
- mixpeek/types/__init__.py +0 -19
- mixpeek/types/accounts/__init__.py +0 -6
- mixpeek/types/accounts/private_update_params.py +0 -25
- mixpeek/types/accounts/user.py +0 -32
- mixpeek/types/agent/__init__.py +0 -3
- mixpeek/types/agent_create_params.py +0 -18
- mixpeek/types/agentresponse.py +0 -11
- mixpeek/types/collection_search_params.py +0 -29
- mixpeek/types/collections/__init__.py +0 -9
- mixpeek/types/collections/file_create_params.py +0 -31
- mixpeek/types/collections/file_full_params.py +0 -22
- mixpeek/types/collections/file_update_params.py +0 -18
- mixpeek/types/collections/fileresponse.py +0 -23
- mixpeek/types/collections/groupedfiledata.py +0 -38
- mixpeek/types/describe_upload_params.py +0 -21
- mixpeek/types/describe_url_params.py +0 -20
- mixpeek/types/embed_create_params.py +0 -29
- mixpeek/types/embeddingresponse.py +0 -15
- mixpeek/types/index_face_params.py +0 -23
- mixpeek/types/index_upload_params.py +0 -27
- mixpeek/types/index_url_params.py +0 -159
- mixpeek/types/search_text_params.py +0 -45
- mixpeek/types/search_upload_params.py +0 -25
- mixpeek/types/search_url_params.py +0 -45
- mixpeek/types/taskresponse.py +0 -15
- mixpeek/types/transcribe_url_params.py +0 -18
- mixpeek-0.11.0.dist-info/METADATA +0 -356
- mixpeek-0.11.0.dist-info/RECORD +0 -73
- mixpeek-0.11.0.dist-info/licenses/LICENSE +0 -201
- /mixpeek/{py.typed → endpoints/__init__.py} +0 -0
mixpeek/__init__.py
CHANGED
@@ -1,93 +1 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
from . import types
|
4
|
-
from ._types import NOT_GIVEN, NoneType, NotGiven, Transport, ProxiesTypes
|
5
|
-
from ._utils import file_from_path
|
6
|
-
from ._client import (
|
7
|
-
Client,
|
8
|
-
Stream,
|
9
|
-
Timeout,
|
10
|
-
Transport,
|
11
|
-
MixpeekSDK,
|
12
|
-
AsyncClient,
|
13
|
-
AsyncStream,
|
14
|
-
RequestOptions,
|
15
|
-
AsyncMixpeekSDK,
|
16
|
-
)
|
17
|
-
from ._models import BaseModel
|
18
|
-
from ._version import __title__, __version__
|
19
|
-
from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse
|
20
|
-
from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS
|
21
|
-
from ._exceptions import (
|
22
|
-
APIError,
|
23
|
-
ConflictError,
|
24
|
-
NotFoundError,
|
25
|
-
APIStatusError,
|
26
|
-
RateLimitError,
|
27
|
-
APITimeoutError,
|
28
|
-
BadRequestError,
|
29
|
-
MixpeekSDKError,
|
30
|
-
APIConnectionError,
|
31
|
-
AuthenticationError,
|
32
|
-
InternalServerError,
|
33
|
-
PermissionDeniedError,
|
34
|
-
UnprocessableEntityError,
|
35
|
-
APIResponseValidationError,
|
36
|
-
)
|
37
|
-
from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient
|
38
|
-
from ._utils._logs import setup_logging as _setup_logging
|
39
|
-
|
40
|
-
__all__ = [
|
41
|
-
"types",
|
42
|
-
"__version__",
|
43
|
-
"__title__",
|
44
|
-
"NoneType",
|
45
|
-
"Transport",
|
46
|
-
"ProxiesTypes",
|
47
|
-
"NotGiven",
|
48
|
-
"NOT_GIVEN",
|
49
|
-
"MixpeekSDKError",
|
50
|
-
"APIError",
|
51
|
-
"APIStatusError",
|
52
|
-
"APITimeoutError",
|
53
|
-
"APIConnectionError",
|
54
|
-
"APIResponseValidationError",
|
55
|
-
"BadRequestError",
|
56
|
-
"AuthenticationError",
|
57
|
-
"PermissionDeniedError",
|
58
|
-
"NotFoundError",
|
59
|
-
"ConflictError",
|
60
|
-
"UnprocessableEntityError",
|
61
|
-
"RateLimitError",
|
62
|
-
"InternalServerError",
|
63
|
-
"Timeout",
|
64
|
-
"RequestOptions",
|
65
|
-
"Client",
|
66
|
-
"AsyncClient",
|
67
|
-
"Stream",
|
68
|
-
"AsyncStream",
|
69
|
-
"MixpeekSDK",
|
70
|
-
"AsyncMixpeekSDK",
|
71
|
-
"file_from_path",
|
72
|
-
"BaseModel",
|
73
|
-
"DEFAULT_TIMEOUT",
|
74
|
-
"DEFAULT_MAX_RETRIES",
|
75
|
-
"DEFAULT_CONNECTION_LIMITS",
|
76
|
-
"DefaultHttpxClient",
|
77
|
-
"DefaultAsyncHttpxClient",
|
78
|
-
]
|
79
|
-
|
80
|
-
_setup_logging()
|
81
|
-
|
82
|
-
# Update the __module__ attribute for exported symbols so that
|
83
|
-
# error messages point to this module instead of the module
|
84
|
-
# it was originally defined in, e.g.
|
85
|
-
# mixpeek._exceptions.NotFoundError -> mixpeek.NotFoundError
|
86
|
-
__locals = locals()
|
87
|
-
for __name in __all__:
|
88
|
-
if not __name.startswith("__"):
|
89
|
-
try:
|
90
|
-
__locals[__name].__module__ = "mixpeek"
|
91
|
-
except (TypeError, AttributeError):
|
92
|
-
# Some of our exported symbols are builtins which we can't set attributes for.
|
93
|
-
pass
|
1
|
+
from .client import Mixpeek
|
mixpeek/client.py
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
import requests
|
2
|
+
|
3
|
+
from .endpoints.embed import Embed
|
4
|
+
from .endpoints.collections import Collections
|
5
|
+
from .endpoints.index import Index
|
6
|
+
from .endpoints.search import Search
|
7
|
+
from .endpoints.tools import Tools
|
8
|
+
from .endpoints.register import Register
|
9
|
+
|
10
|
+
|
11
|
+
class Mixpeek:
|
12
|
+
def __init__(self, api_key: str):
|
13
|
+
self.api_key = api_key
|
14
|
+
self.base_url = "https://api.mixpeek.com/"
|
15
|
+
self.headers = {
|
16
|
+
"Authorization": f"Bearer {self.api_key}",
|
17
|
+
"Content-Type": "application/json"
|
18
|
+
}
|
19
|
+
# these are remote
|
20
|
+
self.embed = Embed(self.base_url, self.headers)
|
21
|
+
self.collections = Collections(self.base_url, self.headers)
|
22
|
+
self.index = Index(self.base_url, self.headers)
|
23
|
+
self.search = Search(self.base_url, self.headers)
|
24
|
+
self.register = Register(self.base_url, self.headers)
|
25
|
+
|
26
|
+
# tools is all local
|
27
|
+
self.tools = Tools()
|
@@ -0,0 +1,86 @@
|
|
1
|
+
import requests
|
2
|
+
|
3
|
+
class Collections:
|
4
|
+
def __init__(self, base_url, headers):
|
5
|
+
self.base_url = base_url
|
6
|
+
self.headers = headers
|
7
|
+
|
8
|
+
def list_files(self, collection_id, randomize=False, page=1, page_size=10, filters=None, sort_by=None, sort_order="asc"):
|
9
|
+
try:
|
10
|
+
url = f"{self.base_url}collections/"
|
11
|
+
data = {
|
12
|
+
"collection_id": collection_id,
|
13
|
+
"randomize": randomize,
|
14
|
+
"page": page,
|
15
|
+
"page_size": page_size,
|
16
|
+
"filters": filters,
|
17
|
+
"sort_by": sort_by,
|
18
|
+
"sort_order": sort_order
|
19
|
+
}
|
20
|
+
response = requests.post(url, json=data, headers=self.headers)
|
21
|
+
response.raise_for_status()
|
22
|
+
return response.json()
|
23
|
+
except requests.RequestException as e:
|
24
|
+
return {"error": str(e)}
|
25
|
+
|
26
|
+
def list_collections(self):
|
27
|
+
try:
|
28
|
+
url = f"{self.base_url}collections/"
|
29
|
+
response = requests.get(url, headers=self.headers)
|
30
|
+
response.raise_for_status()
|
31
|
+
return response.json()
|
32
|
+
except requests.RequestException as e:
|
33
|
+
return {"error": str(e)}
|
34
|
+
|
35
|
+
def search_files(self, query, collection_id, page=1, page_size=10, sort_by=None, sort_order="asc"):
|
36
|
+
try:
|
37
|
+
url = f"{self.base_url}collections/search"
|
38
|
+
data = {
|
39
|
+
"query": query,
|
40
|
+
"collection_id": collection_id,
|
41
|
+
"page": page,
|
42
|
+
"page_size": page_size,
|
43
|
+
"sort_by": sort_by,
|
44
|
+
"sort_order": sort_order
|
45
|
+
}
|
46
|
+
response = requests.post(url, json=data, headers=self.headers)
|
47
|
+
response.raise_for_status()
|
48
|
+
return response.json()
|
49
|
+
except requests.RequestException as e:
|
50
|
+
return {"error": str(e)}
|
51
|
+
|
52
|
+
def get_file_by_id(self, file_id):
|
53
|
+
try:
|
54
|
+
url = f"{self.base_url}collections/file/{file_id}"
|
55
|
+
response = requests.get(url, headers=self.headers)
|
56
|
+
response.raise_for_status()
|
57
|
+
return response.json()
|
58
|
+
except requests.RequestException as e:
|
59
|
+
return {"error": str(e)}
|
60
|
+
|
61
|
+
def get_full_file(self, file_id):
|
62
|
+
try:
|
63
|
+
url = f"{self.base_url}collections/file/{file_id}/full"
|
64
|
+
response = requests.get(url, headers=self.headers)
|
65
|
+
response.raise_for_status()
|
66
|
+
return response.json()
|
67
|
+
except requests.RequestException as e:
|
68
|
+
return {"error": str(e)}
|
69
|
+
|
70
|
+
def delete_file_by_id(self, file_id):
|
71
|
+
try:
|
72
|
+
url = f"{self.base_url}collections/file/{file_id}"
|
73
|
+
response = requests.delete(url, headers=self.headers)
|
74
|
+
response.raise_for_status()
|
75
|
+
return response.json()
|
76
|
+
except requests.RequestException as e:
|
77
|
+
return {"error": str(e)}
|
78
|
+
|
79
|
+
def delete_collection(self, collection_id):
|
80
|
+
try:
|
81
|
+
url = f"{self.base_url}collections/{collection_id}"
|
82
|
+
response = requests.delete(url, headers=self.headers)
|
83
|
+
response.raise_for_status()
|
84
|
+
return response.json()
|
85
|
+
except requests.RequestException as e:
|
86
|
+
return {"error": str(e)}
|
@@ -0,0 +1,66 @@
|
|
1
|
+
import requests
|
2
|
+
#test
|
3
|
+
class Embed:
|
4
|
+
def __init__(self, base_url, headers):
|
5
|
+
self.base_url = base_url
|
6
|
+
self.headers = headers
|
7
|
+
|
8
|
+
def video(self, model_id: str, input: str, input_type: str):
|
9
|
+
try:
|
10
|
+
url = f"{self.base_url}embed/"
|
11
|
+
data = {
|
12
|
+
"modality": "video",
|
13
|
+
"model_id": model_id,
|
14
|
+
"input": input,
|
15
|
+
"input_type": input_type
|
16
|
+
}
|
17
|
+
response = requests.post(url, json=data, headers=self.headers)
|
18
|
+
response.raise_for_status()
|
19
|
+
return response.json()
|
20
|
+
except requests.RequestException as e:
|
21
|
+
return {"error": str(e)}
|
22
|
+
|
23
|
+
def text(self, model_id: str, input: str, input_type: str):
|
24
|
+
try:
|
25
|
+
url = f"{self.base_url}embed/"
|
26
|
+
data = {
|
27
|
+
"modality": "text",
|
28
|
+
"model_id": model_id,
|
29
|
+
"input": input,
|
30
|
+
"input_type": input_type
|
31
|
+
}
|
32
|
+
response = requests.post(url, json=data, headers=self.headers)
|
33
|
+
response.raise_for_status()
|
34
|
+
return response.json()
|
35
|
+
except requests.RequestException as e:
|
36
|
+
return {"error": str(e)}
|
37
|
+
|
38
|
+
def image(self, model_id: str, input: str, input_type: str):
|
39
|
+
try:
|
40
|
+
url = f"{self.base_url}embed/"
|
41
|
+
data = {
|
42
|
+
"modality": "image",
|
43
|
+
"model_id": model_id,
|
44
|
+
"input": input,
|
45
|
+
"input_type": input_type
|
46
|
+
}
|
47
|
+
response = requests.post(url, json=data, headers=self.headers)
|
48
|
+
response.raise_for_status()
|
49
|
+
return response.json()
|
50
|
+
except requests.RequestException as e:
|
51
|
+
return {"error": str(e)}
|
52
|
+
|
53
|
+
def audio(self, model_id: str, input: str, input_type: str):
|
54
|
+
try:
|
55
|
+
url = f"{self.base_url}embed/"
|
56
|
+
data = {
|
57
|
+
"modality": "audio",
|
58
|
+
"model_id": model_id,
|
59
|
+
"input": input,
|
60
|
+
"input_type": input_type
|
61
|
+
}
|
62
|
+
response = requests.post(url, json=data, headers=self.headers)
|
63
|
+
response.raise_for_status()
|
64
|
+
return response.json()
|
65
|
+
except requests.RequestException as e:
|
66
|
+
return {"error": str(e)}
|
@@ -0,0 +1,51 @@
|
|
1
|
+
import requests
|
2
|
+
from .tasks import Task
|
3
|
+
|
4
|
+
class Index:
|
5
|
+
def __init__(self, base_url, headers):
|
6
|
+
self.base_url = base_url
|
7
|
+
self.headers = headers
|
8
|
+
|
9
|
+
def _prepare_data(self, base_data, metadata=None, video_settings=None, image_settings=None):
|
10
|
+
if metadata is not None:
|
11
|
+
base_data["metadata"] = metadata
|
12
|
+
if video_settings is not None:
|
13
|
+
base_data["video_settings"] = video_settings
|
14
|
+
if image_settings is not None:
|
15
|
+
base_data["image_settings"] = image_settings
|
16
|
+
return base_data
|
17
|
+
|
18
|
+
def url(self, target_url, collection_id, metadata=None, video_settings=None, image_settings=None):
|
19
|
+
try:
|
20
|
+
endpoint = f"{self.base_url}index/url"
|
21
|
+
data = self._prepare_data({"url": target_url, "collection_id": collection_id}, metadata, video_settings, image_settings)
|
22
|
+
|
23
|
+
response = requests.post(endpoint, json=data, headers=self.headers)
|
24
|
+
response.raise_for_status()
|
25
|
+
task_id = response.json().get("task_id")
|
26
|
+
if task_id:
|
27
|
+
return Task(self.base_url, self.headers, task_id)
|
28
|
+
else:
|
29
|
+
return response.json()
|
30
|
+
except requests.RequestException as e:
|
31
|
+
return {"error": str(e)}
|
32
|
+
|
33
|
+
def upload(self, file_path, collection_id, metadata=None, settings=None):
|
34
|
+
try:
|
35
|
+
endpoint = f"{self.base_url}index/upload"
|
36
|
+
data = self._prepare_data({"collection_id": collection_id}, metadata, settings)
|
37
|
+
|
38
|
+
with open(file_path, 'rb') as file:
|
39
|
+
files = [('file', (file.name, file, 'application/octet-stream'))]
|
40
|
+
response = requests.post(endpoint, headers=self.headers, data=data, files=files)
|
41
|
+
|
42
|
+
response.raise_for_status()
|
43
|
+
task_id = response.json().get("task_id")
|
44
|
+
if task_id:
|
45
|
+
return Task(self.base_url, self.headers, task_id)
|
46
|
+
else:
|
47
|
+
return response.json()
|
48
|
+
except requests.RequestException as e:
|
49
|
+
return {"error": str(e)}
|
50
|
+
except IOError as e:
|
51
|
+
return {"error": f"File error: {str(e)}"}
|
@@ -0,0 +1,34 @@
|
|
1
|
+
import requests
|
2
|
+
from .tasks import Task
|
3
|
+
|
4
|
+
class Register:
|
5
|
+
def __init__(self, base_url, headers):
|
6
|
+
self.base_url = base_url
|
7
|
+
self.headers = headers
|
8
|
+
|
9
|
+
def _prepare_data(self, base_data, metadata=None, settings=None):
|
10
|
+
if metadata is not None:
|
11
|
+
base_data["metadata"] = metadata
|
12
|
+
if settings is not None:
|
13
|
+
base_data["settings"] = settings
|
14
|
+
return base_data
|
15
|
+
|
16
|
+
def faces(self, file_path, collection_id, metadata=None, settings=None):
|
17
|
+
try:
|
18
|
+
endpoint = f"{self.base_url}register/faces"
|
19
|
+
data = self._prepare_data({"collection_id": collection_id}, metadata, settings)
|
20
|
+
|
21
|
+
with open(file_path, 'rb') as file:
|
22
|
+
files = [('file', (file.name, file, 'application/octet-stream'))]
|
23
|
+
response = requests.post(endpoint, headers=self.headers, data=data, files=files)
|
24
|
+
|
25
|
+
response.raise_for_status()
|
26
|
+
task_id = response.json().get("task_id")
|
27
|
+
if task_id:
|
28
|
+
return Task(self.base_url, self.headers, task_id)
|
29
|
+
else:
|
30
|
+
return response.json()
|
31
|
+
except requests.RequestException as e:
|
32
|
+
return {"error": str(e)}
|
33
|
+
except IOError as e:
|
34
|
+
return {"error": f"File error: {str(e)}"}
|
@@ -0,0 +1,67 @@
|
|
1
|
+
import requests
|
2
|
+
import os
|
3
|
+
import json
|
4
|
+
|
5
|
+
class Search:
|
6
|
+
def __init__(self, base_url, headers):
|
7
|
+
self.base_url = base_url
|
8
|
+
self.headers = headers
|
9
|
+
|
10
|
+
def text(self, input, modality, input_type="text", filters=None, group_by_file=True, page=1, page_size=10):
|
11
|
+
try:
|
12
|
+
url = f"{self.base_url}search/text"
|
13
|
+
data = {
|
14
|
+
"input": input,
|
15
|
+
"modality": modality,
|
16
|
+
"input_type": input_type,
|
17
|
+
"filters": filters or {},
|
18
|
+
"group_by_file": group_by_file,
|
19
|
+
"pagination": {
|
20
|
+
"page": page,
|
21
|
+
"page_size": page_size
|
22
|
+
}
|
23
|
+
}
|
24
|
+
response = requests.post(url, json=data, headers=self.headers)
|
25
|
+
response.raise_for_status()
|
26
|
+
return response.json()
|
27
|
+
except requests.RequestException as e:
|
28
|
+
return {"error": str(e)}
|
29
|
+
|
30
|
+
# def upload(self, file_path, filters=None, page=1, page_size=10):
|
31
|
+
# try:
|
32
|
+
# url = f"{self.base_url}search/upload"
|
33
|
+
|
34
|
+
# filename = os.path.basename(file_path)
|
35
|
+
# files = {
|
36
|
+
# 'file': (filename, open(file_path, 'rb'), 'application/octet-stream')
|
37
|
+
# }
|
38
|
+
|
39
|
+
# payload = {
|
40
|
+
# 'filters': json.dumps(filters or {}),
|
41
|
+
# 'page': str(page),
|
42
|
+
# 'page_size': str(page_size)
|
43
|
+
# }
|
44
|
+
|
45
|
+
# response = requests.post(url, headers=self.headers, data=payload, files=files)
|
46
|
+
# response.raise_for_status()
|
47
|
+
# return response.json()
|
48
|
+
# except requests.RequestException as e:
|
49
|
+
# return {"error": str(e)}
|
50
|
+
|
51
|
+
def url(self, target_url, filters=None, modality="text", page=1, page_size=10):
|
52
|
+
try:
|
53
|
+
url = f"{self.base_url}search/url"
|
54
|
+
data = {
|
55
|
+
"url": target_url,
|
56
|
+
"filters": filters or {},
|
57
|
+
"modality": modality,
|
58
|
+
"pagination": {
|
59
|
+
"page": page,
|
60
|
+
"page_size": page_size
|
61
|
+
}
|
62
|
+
}
|
63
|
+
response = requests.post(url, json=data, headers=self.headers)
|
64
|
+
response.raise_for_status()
|
65
|
+
return response.json()
|
66
|
+
except requests.RequestException as e:
|
67
|
+
return {"error": str(e)}
|
@@ -0,0 +1,26 @@
|
|
1
|
+
import requests
|
2
|
+
import time
|
3
|
+
|
4
|
+
class Task:
|
5
|
+
def __init__(self, base_url, headers, task_id):
|
6
|
+
self.base_url = base_url
|
7
|
+
self.headers = headers
|
8
|
+
self.task_id = task_id
|
9
|
+
|
10
|
+
def get_task_status(self, task_id):
|
11
|
+
try:
|
12
|
+
endpoint = f"{self.base_url}tasks/{task_id}"
|
13
|
+
response = requests.get(endpoint, headers=self.headers)
|
14
|
+
response.raise_for_status()
|
15
|
+
return response.json()
|
16
|
+
except requests.RequestException as e:
|
17
|
+
return {"error": str(e)}
|
18
|
+
|
19
|
+
def wait_for_done(self, sleep_interval=2, callback=None):
|
20
|
+
while True:
|
21
|
+
status = self.get_task_status(self.task_id)
|
22
|
+
if callback:
|
23
|
+
callback(status)
|
24
|
+
if status.get("status") == "DONE":
|
25
|
+
return status
|
26
|
+
time.sleep(sleep_interval)
|
@@ -0,0 +1,138 @@
|
|
1
|
+
import subprocess
|
2
|
+
import tempfile
|
3
|
+
import os
|
4
|
+
import base64
|
5
|
+
from urllib.parse import urlparse
|
6
|
+
from urllib.request import urlretrieve
|
7
|
+
from tqdm import tqdm
|
8
|
+
from PIL import Image
|
9
|
+
import io
|
10
|
+
|
11
|
+
class Tools:
|
12
|
+
def __init__(self):
|
13
|
+
self.video = self.Video(self)
|
14
|
+
self.image = self.Image(self)
|
15
|
+
|
16
|
+
class Image:
|
17
|
+
def __init__(self, parent):
|
18
|
+
pass
|
19
|
+
|
20
|
+
def process(self, image_source: str):
|
21
|
+
# Download image if it's a URL
|
22
|
+
if urlparse(image_source).scheme in ('http', 'https'):
|
23
|
+
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
24
|
+
urlretrieve(image_source, temp_file.name)
|
25
|
+
image_source = temp_file.name
|
26
|
+
|
27
|
+
# Open and process the image
|
28
|
+
with Image.open(image_source) as img:
|
29
|
+
# Convert image to RGB mode if it's not already
|
30
|
+
if img.mode != 'RGB':
|
31
|
+
img = img.convert('RGB')
|
32
|
+
|
33
|
+
# Convert image to base64
|
34
|
+
buffered = io.BytesIO()
|
35
|
+
img.save(buffered, format="JPEG")
|
36
|
+
base64_string = base64.b64encode(buffered.getvalue()).decode('utf-8')
|
37
|
+
|
38
|
+
# Remove temporary file if it was created
|
39
|
+
if urlparse(image_source).scheme in ('http', 'https'):
|
40
|
+
os.unlink(image_source)
|
41
|
+
|
42
|
+
return base64_string
|
43
|
+
|
44
|
+
class Video:
|
45
|
+
def __init__(self, parent):
|
46
|
+
pass
|
47
|
+
|
48
|
+
def process(self, video_source: str, chunk_interval: float, resolution: list):
|
49
|
+
chunker = VideoChunker(video_source, chunk_interval, resolution)
|
50
|
+
|
51
|
+
for chunk in chunker:
|
52
|
+
data = {
|
53
|
+
"base64_chunk": chunk["base64"],
|
54
|
+
"start_time": chunk["start_time"],
|
55
|
+
"end_time": chunk["end_time"]
|
56
|
+
}
|
57
|
+
yield data
|
58
|
+
|
59
|
+
|
60
|
+
class VideoChunker:
|
61
|
+
def __init__(self, video_source, chunk_interval, target_resolution):
|
62
|
+
self.video_source = video_source
|
63
|
+
self.chunk_interval = chunk_interval
|
64
|
+
self.target_resolution = f"{target_resolution[0]}x{target_resolution[1]}"
|
65
|
+
self.temp_dir = tempfile.mkdtemp()
|
66
|
+
self.total_duration = None
|
67
|
+
self.current_time = 0
|
68
|
+
|
69
|
+
def __del__(self):
|
70
|
+
self.cleanup()
|
71
|
+
|
72
|
+
def cleanup(self):
|
73
|
+
for file in os.listdir(self.temp_dir):
|
74
|
+
os.remove(os.path.join(self.temp_dir, file))
|
75
|
+
os.rmdir(self.temp_dir)
|
76
|
+
|
77
|
+
def __iter__(self):
|
78
|
+
return self
|
79
|
+
|
80
|
+
def __next__(self):
|
81
|
+
if self.total_duration is None:
|
82
|
+
self._initialize_video()
|
83
|
+
|
84
|
+
chunk = self._process_chunk()
|
85
|
+
if chunk is None:
|
86
|
+
raise StopIteration
|
87
|
+
return chunk
|
88
|
+
|
89
|
+
def _initialize_video(self):
|
90
|
+
if urlparse(self.video_source).scheme in ('http', 'https'):
|
91
|
+
print("Downloading video...")
|
92
|
+
temp_file = os.path.join(self.temp_dir, 'temp_video')
|
93
|
+
urlretrieve(self.video_source, temp_file)
|
94
|
+
self.video_source = temp_file
|
95
|
+
|
96
|
+
# Get video duration
|
97
|
+
result = subprocess.run(['ffprobe', '-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', self.video_source], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
98
|
+
self.total_duration = float(result.stdout)
|
99
|
+
|
100
|
+
print(f"Total video duration: {self.total_duration:.2f} seconds")
|
101
|
+
self.progress_bar = tqdm(total=100, desc="Processing video", unit="%")
|
102
|
+
|
103
|
+
def _process_chunk(self):
|
104
|
+
if self.current_time >= self.total_duration:
|
105
|
+
return None
|
106
|
+
|
107
|
+
start_time = self.current_time
|
108
|
+
end_time = min(start_time + self.chunk_interval, self.total_duration)
|
109
|
+
|
110
|
+
# Generate chunk using FFmpeg
|
111
|
+
temp_output = os.path.join(self.temp_dir, f"chunk_{self.current_time}.mp4")
|
112
|
+
subprocess.run([
|
113
|
+
'ffmpeg', '-y', '-i', self.video_source,
|
114
|
+
'-ss', str(start_time), '-to', str(end_time),
|
115
|
+
'-vf', f'scale={self.target_resolution}',
|
116
|
+
'-c:v', 'libx264', '-preset', 'ultrafast',
|
117
|
+
temp_output
|
118
|
+
], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
119
|
+
|
120
|
+
# Convert to base64
|
121
|
+
with open(temp_output, 'rb') as f:
|
122
|
+
base64_string = base64.b64encode(f.read()).decode('utf-8')
|
123
|
+
|
124
|
+
# Remove temporary file
|
125
|
+
os.remove(temp_output)
|
126
|
+
|
127
|
+
# Update progress
|
128
|
+
progress = (end_time / self.total_duration) * 100
|
129
|
+
self.progress_bar.n = int(progress)
|
130
|
+
self.progress_bar.refresh()
|
131
|
+
|
132
|
+
self.current_time = end_time
|
133
|
+
|
134
|
+
return {
|
135
|
+
"base64": base64_string,
|
136
|
+
"start_time": start_time,
|
137
|
+
"end_time": end_time
|
138
|
+
}
|
mixpeek/exceptions.py
ADDED