synapse-sdk 1.0.0a23__py3-none-any.whl → 2025.12.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- synapse_sdk/__init__.py +24 -0
- synapse_sdk/cli/__init__.py +310 -5
- synapse_sdk/cli/alias/__init__.py +22 -0
- synapse_sdk/cli/alias/create.py +36 -0
- synapse_sdk/cli/alias/dataclass.py +31 -0
- synapse_sdk/cli/alias/default.py +16 -0
- synapse_sdk/cli/alias/delete.py +15 -0
- synapse_sdk/cli/alias/list.py +19 -0
- synapse_sdk/cli/alias/read.py +15 -0
- synapse_sdk/cli/alias/update.py +17 -0
- synapse_sdk/cli/alias/utils.py +61 -0
- synapse_sdk/cli/code_server.py +687 -0
- synapse_sdk/cli/config.py +440 -0
- synapse_sdk/cli/devtools.py +90 -0
- synapse_sdk/cli/plugin/__init__.py +33 -0
- synapse_sdk/cli/{create_plugin.py → plugin/create.py} +2 -2
- synapse_sdk/{plugins/cli → cli/plugin}/publish.py +23 -15
- synapse_sdk/clients/agent/__init__.py +9 -3
- synapse_sdk/clients/agent/container.py +143 -0
- synapse_sdk/clients/agent/core.py +19 -0
- synapse_sdk/clients/agent/ray.py +298 -9
- synapse_sdk/clients/backend/__init__.py +30 -12
- synapse_sdk/clients/backend/annotation.py +13 -5
- synapse_sdk/clients/backend/core.py +31 -4
- synapse_sdk/clients/backend/data_collection.py +186 -0
- synapse_sdk/clients/backend/hitl.py +17 -0
- synapse_sdk/clients/backend/integration.py +16 -1
- synapse_sdk/clients/backend/ml.py +5 -1
- synapse_sdk/clients/backend/models.py +78 -0
- synapse_sdk/clients/base.py +384 -41
- synapse_sdk/clients/ray/serve.py +2 -0
- synapse_sdk/clients/validators/collections.py +31 -0
- synapse_sdk/devtools/config.py +94 -0
- synapse_sdk/devtools/server.py +41 -0
- synapse_sdk/devtools/streamlit_app/__init__.py +5 -0
- synapse_sdk/devtools/streamlit_app/app.py +128 -0
- synapse_sdk/devtools/streamlit_app/services/__init__.py +11 -0
- synapse_sdk/devtools/streamlit_app/services/job_service.py +233 -0
- synapse_sdk/devtools/streamlit_app/services/plugin_service.py +236 -0
- synapse_sdk/devtools/streamlit_app/services/serve_service.py +95 -0
- synapse_sdk/devtools/streamlit_app/ui/__init__.py +15 -0
- synapse_sdk/devtools/streamlit_app/ui/config_tab.py +76 -0
- synapse_sdk/devtools/streamlit_app/ui/deployment_tab.py +66 -0
- synapse_sdk/devtools/streamlit_app/ui/http_tab.py +125 -0
- synapse_sdk/devtools/streamlit_app/ui/jobs_tab.py +573 -0
- synapse_sdk/devtools/streamlit_app/ui/serve_tab.py +346 -0
- synapse_sdk/devtools/streamlit_app/ui/status_bar.py +118 -0
- synapse_sdk/devtools/streamlit_app/utils/__init__.py +40 -0
- synapse_sdk/devtools/streamlit_app/utils/json_viewer.py +197 -0
- synapse_sdk/devtools/streamlit_app/utils/log_formatter.py +38 -0
- synapse_sdk/devtools/streamlit_app/utils/styles.py +241 -0
- synapse_sdk/devtools/streamlit_app/utils/ui_components.py +289 -0
- synapse_sdk/devtools/streamlit_app.py +10 -0
- synapse_sdk/loggers.py +120 -9
- synapse_sdk/plugins/README.md +1340 -0
- synapse_sdk/plugins/__init__.py +0 -13
- synapse_sdk/plugins/categories/base.py +117 -11
- synapse_sdk/plugins/categories/data_validation/actions/validation.py +72 -0
- synapse_sdk/plugins/categories/data_validation/templates/plugin/validation.py +33 -5
- synapse_sdk/plugins/categories/export/actions/__init__.py +3 -0
- synapse_sdk/plugins/categories/export/actions/export/__init__.py +28 -0
- synapse_sdk/plugins/categories/export/actions/export/action.py +165 -0
- synapse_sdk/plugins/categories/export/actions/export/enums.py +113 -0
- synapse_sdk/plugins/categories/export/actions/export/exceptions.py +53 -0
- synapse_sdk/plugins/categories/export/actions/export/models.py +74 -0
- synapse_sdk/plugins/categories/export/actions/export/run.py +195 -0
- synapse_sdk/plugins/categories/export/actions/export/utils.py +187 -0
- synapse_sdk/plugins/categories/export/templates/config.yaml +21 -0
- synapse_sdk/plugins/categories/export/templates/plugin/__init__.py +390 -0
- synapse_sdk/plugins/categories/export/templates/plugin/export.py +160 -0
- synapse_sdk/plugins/categories/neural_net/actions/deployment.py +13 -12
- synapse_sdk/plugins/categories/neural_net/actions/train.py +1134 -31
- synapse_sdk/plugins/categories/neural_net/actions/tune.py +534 -0
- synapse_sdk/plugins/categories/neural_net/base/inference.py +1 -1
- synapse_sdk/plugins/categories/neural_net/templates/config.yaml +32 -4
- synapse_sdk/plugins/categories/neural_net/templates/plugin/inference.py +26 -10
- synapse_sdk/plugins/categories/pre_annotation/actions/__init__.py +4 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/pre_annotation/__init__.py +3 -0
- synapse_sdk/plugins/categories/{export/actions/export.py → pre_annotation/actions/pre_annotation/action.py} +4 -4
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/__init__.py +28 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/action.py +148 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/enums.py +269 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/exceptions.py +14 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/factory.py +76 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/models.py +100 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/orchestrator.py +248 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/run.py +64 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/strategies/__init__.py +17 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/strategies/annotation.py +265 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/strategies/base.py +170 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/strategies/extraction.py +83 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/strategies/metrics.py +92 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/strategies/preprocessor.py +243 -0
- synapse_sdk/plugins/categories/pre_annotation/actions/to_task/strategies/validation.py +143 -0
- synapse_sdk/plugins/categories/pre_annotation/templates/config.yaml +19 -0
- synapse_sdk/plugins/categories/pre_annotation/templates/plugin/to_task.py +40 -0
- synapse_sdk/plugins/categories/smart_tool/templates/config.yaml +2 -0
- synapse_sdk/plugins/categories/upload/__init__.py +0 -0
- synapse_sdk/plugins/categories/upload/actions/__init__.py +0 -0
- synapse_sdk/plugins/categories/upload/actions/upload/__init__.py +19 -0
- synapse_sdk/plugins/categories/upload/actions/upload/action.py +236 -0
- synapse_sdk/plugins/categories/upload/actions/upload/context.py +185 -0
- synapse_sdk/plugins/categories/upload/actions/upload/enums.py +493 -0
- synapse_sdk/plugins/categories/upload/actions/upload/exceptions.py +36 -0
- synapse_sdk/plugins/categories/upload/actions/upload/factory.py +138 -0
- synapse_sdk/plugins/categories/upload/actions/upload/models.py +214 -0
- synapse_sdk/plugins/categories/upload/actions/upload/orchestrator.py +183 -0
- synapse_sdk/plugins/categories/upload/actions/upload/registry.py +113 -0
- synapse_sdk/plugins/categories/upload/actions/upload/run.py +179 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/__init__.py +1 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/base.py +107 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/cleanup.py +62 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/collection.py +63 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/generate.py +91 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/initialize.py +82 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/metadata.py +235 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/organize.py +201 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/upload.py +104 -0
- synapse_sdk/plugins/categories/upload/actions/upload/steps/validate.py +71 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/__init__.py +1 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/base.py +82 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/data_unit/__init__.py +1 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/data_unit/batch.py +39 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/data_unit/single.py +29 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/file_discovery/__init__.py +1 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/file_discovery/flat.py +300 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/file_discovery/recursive.py +287 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/metadata/__init__.py +1 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/metadata/excel.py +174 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/metadata/none.py +16 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/upload/__init__.py +1 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/upload/sync.py +84 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/validation/__init__.py +1 -0
- synapse_sdk/plugins/categories/upload/actions/upload/strategies/validation/default.py +60 -0
- synapse_sdk/plugins/categories/upload/actions/upload/utils.py +250 -0
- synapse_sdk/plugins/categories/upload/templates/README.md +470 -0
- synapse_sdk/plugins/categories/upload/templates/config.yaml +33 -0
- synapse_sdk/plugins/categories/upload/templates/plugin/__init__.py +310 -0
- synapse_sdk/plugins/categories/upload/templates/plugin/upload.py +102 -0
- synapse_sdk/plugins/enums.py +3 -1
- synapse_sdk/plugins/models.py +148 -11
- synapse_sdk/plugins/templates/plugin-config-schema.json +406 -0
- synapse_sdk/plugins/templates/schema.json +491 -0
- synapse_sdk/plugins/templates/synapse-{{cookiecutter.plugin_code}}-plugin/config.yaml +1 -0
- synapse_sdk/plugins/templates/synapse-{{cookiecutter.plugin_code}}-plugin/requirements.txt +1 -1
- synapse_sdk/plugins/utils/__init__.py +46 -0
- synapse_sdk/plugins/utils/actions.py +119 -0
- synapse_sdk/plugins/utils/config.py +203 -0
- synapse_sdk/plugins/{utils.py → utils/legacy.py} +26 -46
- synapse_sdk/plugins/utils/ray_gcs.py +66 -0
- synapse_sdk/plugins/utils/registry.py +58 -0
- synapse_sdk/shared/__init__.py +25 -0
- synapse_sdk/shared/enums.py +93 -0
- synapse_sdk/types.py +19 -0
- synapse_sdk/utils/converters/__init__.py +240 -0
- synapse_sdk/utils/converters/coco/__init__.py +0 -0
- synapse_sdk/utils/converters/coco/from_dm.py +322 -0
- synapse_sdk/utils/converters/coco/to_dm.py +215 -0
- synapse_sdk/utils/converters/dm/__init__.py +57 -0
- synapse_sdk/utils/converters/dm/base.py +137 -0
- synapse_sdk/utils/converters/dm/from_v1.py +273 -0
- synapse_sdk/utils/converters/dm/to_v1.py +321 -0
- synapse_sdk/utils/converters/dm/tools/__init__.py +214 -0
- synapse_sdk/utils/converters/dm/tools/answer.py +95 -0
- synapse_sdk/utils/converters/dm/tools/bounding_box.py +132 -0
- synapse_sdk/utils/converters/dm/tools/bounding_box_3d.py +121 -0
- synapse_sdk/utils/converters/dm/tools/classification.py +75 -0
- synapse_sdk/utils/converters/dm/tools/keypoint.py +117 -0
- synapse_sdk/utils/converters/dm/tools/named_entity.py +111 -0
- synapse_sdk/utils/converters/dm/tools/polygon.py +122 -0
- synapse_sdk/utils/converters/dm/tools/polyline.py +124 -0
- synapse_sdk/utils/converters/dm/tools/prompt.py +94 -0
- synapse_sdk/utils/converters/dm/tools/relation.py +86 -0
- synapse_sdk/utils/converters/dm/tools/segmentation.py +141 -0
- synapse_sdk/utils/converters/dm/tools/segmentation_3d.py +83 -0
- synapse_sdk/utils/converters/dm/types.py +168 -0
- synapse_sdk/utils/converters/dm/utils.py +162 -0
- synapse_sdk/utils/converters/dm_legacy/__init__.py +56 -0
- synapse_sdk/utils/converters/dm_legacy/from_v1.py +627 -0
- synapse_sdk/utils/converters/dm_legacy/to_v1.py +367 -0
- synapse_sdk/utils/converters/pascal/__init__.py +0 -0
- synapse_sdk/utils/converters/pascal/from_dm.py +244 -0
- synapse_sdk/utils/converters/pascal/to_dm.py +214 -0
- synapse_sdk/utils/converters/yolo/__init__.py +0 -0
- synapse_sdk/utils/converters/yolo/from_dm.py +384 -0
- synapse_sdk/utils/converters/yolo/to_dm.py +267 -0
- synapse_sdk/utils/dataset.py +46 -0
- synapse_sdk/utils/encryption.py +158 -0
- synapse_sdk/utils/file/__init__.py +58 -0
- synapse_sdk/utils/file/archive.py +32 -0
- synapse_sdk/utils/file/checksum.py +56 -0
- synapse_sdk/utils/file/chunking.py +31 -0
- synapse_sdk/utils/file/download.py +385 -0
- synapse_sdk/utils/file/encoding.py +40 -0
- synapse_sdk/utils/file/io.py +22 -0
- synapse_sdk/utils/file/upload.py +165 -0
- synapse_sdk/utils/file/video/__init__.py +29 -0
- synapse_sdk/utils/file/video/transcode.py +307 -0
- synapse_sdk/utils/file.py.backup +301 -0
- synapse_sdk/utils/http.py +138 -0
- synapse_sdk/utils/network.py +309 -0
- synapse_sdk/utils/storage/__init__.py +72 -0
- synapse_sdk/utils/storage/providers/__init__.py +183 -0
- synapse_sdk/utils/storage/providers/file_system.py +134 -0
- synapse_sdk/utils/storage/providers/gcp.py +13 -0
- synapse_sdk/utils/storage/providers/http.py +190 -0
- synapse_sdk/utils/storage/providers/s3.py +91 -0
- synapse_sdk/utils/storage/providers/sftp.py +47 -0
- synapse_sdk/utils/storage/registry.py +17 -0
- synapse_sdk-2025.12.3.dist-info/METADATA +123 -0
- synapse_sdk-2025.12.3.dist-info/RECORD +279 -0
- {synapse_sdk-1.0.0a23.dist-info → synapse_sdk-2025.12.3.dist-info}/WHEEL +1 -1
- synapse_sdk/clients/backend/dataset.py +0 -51
- synapse_sdk/plugins/categories/import/actions/import.py +0 -10
- synapse_sdk/plugins/cli/__init__.py +0 -21
- synapse_sdk/plugins/templates/synapse-{{cookiecutter.plugin_code}}-plugin/.env +0 -24
- synapse_sdk/plugins/templates/synapse-{{cookiecutter.plugin_code}}-plugin/.env.dist +0 -24
- synapse_sdk/plugins/templates/synapse-{{cookiecutter.plugin_code}}-plugin/main.py +0 -4
- synapse_sdk/utils/file.py +0 -168
- synapse_sdk/utils/storage.py +0 -91
- synapse_sdk-1.0.0a23.dist-info/METADATA +0 -44
- synapse_sdk-1.0.0a23.dist-info/RECORD +0 -114
- /synapse_sdk/{plugins/cli → cli/plugin}/run.py +0 -0
- /synapse_sdk/{plugins/categories/import → clients/validators}/__init__.py +0 -0
- /synapse_sdk/{plugins/categories/import/actions → devtools}/__init__.py +0 -0
- {synapse_sdk-1.0.0a23.dist-info → synapse_sdk-2025.12.3.dist-info}/entry_points.txt +0 -0
- {synapse_sdk-1.0.0a23.dist-info → synapse_sdk-2025.12.3.dist-info/licenses}/LICENSE +0 -0
- {synapse_sdk-1.0.0a23.dist-info → synapse_sdk-2025.12.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any, Dict, Iterable, Optional, Union
|
|
5
|
+
|
|
6
|
+
from synapse_sdk.clients.base import BaseClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ContainerClientMixin(BaseClient):
|
|
10
|
+
"""Client mixin exposing the agent container management API."""
|
|
11
|
+
|
|
12
|
+
def health_check(self):
|
|
13
|
+
"""Perform a health check on Docker sock."""
|
|
14
|
+
path = 'health/'
|
|
15
|
+
return self._get(path)
|
|
16
|
+
|
|
17
|
+
def list_containers(self, params: Optional[Dict[str, Any]] = None, *, list_all: bool = False):
|
|
18
|
+
"""List containers managed by the agent.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
params: Optional query parameters (e.g. {'status': 'running'}).
|
|
22
|
+
list_all: When True, returns ``(generator, count)`` covering every page.
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
dict | tuple: Standard paginated response or a tuple for ``list_all``.
|
|
26
|
+
"""
|
|
27
|
+
path = 'containers/'
|
|
28
|
+
return self._list(path, params=params, list_all=list_all)
|
|
29
|
+
|
|
30
|
+
def get_container(self, container_id: Union[int, str]):
|
|
31
|
+
"""Retrieve details for a specific container."""
|
|
32
|
+
path = f'containers/{container_id}/'
|
|
33
|
+
return self._get(path)
|
|
34
|
+
|
|
35
|
+
def delete_container(self, container_id: Union[int, str]):
|
|
36
|
+
"""Stop and remove a container."""
|
|
37
|
+
path = f'containers/{container_id}/'
|
|
38
|
+
return self._delete(path)
|
|
39
|
+
|
|
40
|
+
def create_container(
|
|
41
|
+
self,
|
|
42
|
+
plugin_release: Optional[Union[str, Any]] = None,
|
|
43
|
+
*,
|
|
44
|
+
model: Optional[int] = None,
|
|
45
|
+
params: Optional[Dict[str, Any]] = None,
|
|
46
|
+
envs: Optional[Dict[str, str]] = None,
|
|
47
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
48
|
+
labels: Optional[Iterable[str]] = None,
|
|
49
|
+
plugin_file: Optional[Union[str, Path]] = None,
|
|
50
|
+
):
|
|
51
|
+
"""Create a Docker container running a plugin Gradio interface.
|
|
52
|
+
|
|
53
|
+
If a container with the same ``plugin_release`` and ``model`` already exists,
|
|
54
|
+
it will be restarted instead of creating a new one.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
plugin_release: Plugin identifier. Accepts either ``synapse_sdk.plugins.models.PluginRelease``
|
|
58
|
+
instances or the ``"<plugin_code>@<version>"`` shorthand string.
|
|
59
|
+
model: Optional model ID to associate with the container. Used together with
|
|
60
|
+
``plugin_release`` to uniquely identify a container for restart behavior.
|
|
61
|
+
params: Arbitrary parameters forwarded to ``plugin/gradio_interface.py``.
|
|
62
|
+
envs: Extra environment variables injected into the container.
|
|
63
|
+
metadata: Additional metadata stored with the container record.
|
|
64
|
+
labels: Optional container labels/tags for display or filtering.
|
|
65
|
+
plugin_file: Optional path to a packaged plugin release to upload directly.
|
|
66
|
+
The archive must contain ``plugin/gradio_interface.py``.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
dict: Container creation response that includes the exposed Gradio endpoint.
|
|
70
|
+
If an existing container was restarted, the response includes ``restarted: True``.
|
|
71
|
+
|
|
72
|
+
Raises:
|
|
73
|
+
FileNotFoundError: If ``plugin_file`` is provided but does not exist.
|
|
74
|
+
ValueError: If neither ``plugin_release`` nor ``plugin_file`` are provided.
|
|
75
|
+
"""
|
|
76
|
+
if not plugin_release and not plugin_file:
|
|
77
|
+
raise ValueError('Either "plugin_release" or "plugin_file" must be provided to create a container.')
|
|
78
|
+
|
|
79
|
+
data: Dict[str, Any] = {}
|
|
80
|
+
|
|
81
|
+
if plugin_release:
|
|
82
|
+
data.update(self._serialize_plugin_release(plugin_release))
|
|
83
|
+
|
|
84
|
+
if model is not None:
|
|
85
|
+
data['model'] = model
|
|
86
|
+
|
|
87
|
+
optional_payload = {
|
|
88
|
+
'params': params if params is not None else None,
|
|
89
|
+
'envs': envs or None,
|
|
90
|
+
'metadata': metadata or None,
|
|
91
|
+
'labels': list(labels) if labels else None,
|
|
92
|
+
}
|
|
93
|
+
data.update({key: value for key, value in optional_payload.items() if value is not None})
|
|
94
|
+
|
|
95
|
+
files = None
|
|
96
|
+
if plugin_file:
|
|
97
|
+
file_path = Path(plugin_file)
|
|
98
|
+
if not file_path.exists():
|
|
99
|
+
raise FileNotFoundError(f'Plugin release file not found: {file_path}')
|
|
100
|
+
files = {'file': file_path}
|
|
101
|
+
post_kwargs = {'data': data}
|
|
102
|
+
if files:
|
|
103
|
+
post_kwargs['files'] = files
|
|
104
|
+
|
|
105
|
+
return self._post('containers/', **post_kwargs)
|
|
106
|
+
|
|
107
|
+
@staticmethod
|
|
108
|
+
def _serialize_plugin_release(plugin_release: Union[str, Any]) -> Dict[str, Any]:
|
|
109
|
+
"""Normalize plugin release data for API payloads."""
|
|
110
|
+
if hasattr(plugin_release, 'code') and hasattr(plugin_release, 'version'):
|
|
111
|
+
payload = {
|
|
112
|
+
'plugin_release': plugin_release.code,
|
|
113
|
+
'plugin': getattr(plugin_release, 'plugin', None),
|
|
114
|
+
'version': plugin_release.version,
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
# Extract action and entrypoint from the first action in the config
|
|
118
|
+
if hasattr(plugin_release, 'config') and 'actions' in plugin_release.config:
|
|
119
|
+
actions = plugin_release.config['actions']
|
|
120
|
+
if actions:
|
|
121
|
+
# Get the first action (typically 'gradio')
|
|
122
|
+
action_name = next(iter(actions.keys()))
|
|
123
|
+
action_config = actions[action_name]
|
|
124
|
+
payload['action'] = action_name
|
|
125
|
+
|
|
126
|
+
# Convert entrypoint from dotted path to file path
|
|
127
|
+
if 'entrypoint' in action_config:
|
|
128
|
+
entrypoint = action_config['entrypoint']
|
|
129
|
+
# Convert 'plugin.gradio_interface.app' to 'plugin/gradio_interface.py'
|
|
130
|
+
file_path = entrypoint.rsplit('.', 1)[0].replace('.', '/') + '.py'
|
|
131
|
+
payload['entrypoint'] = file_path
|
|
132
|
+
|
|
133
|
+
return payload
|
|
134
|
+
|
|
135
|
+
if isinstance(plugin_release, str):
|
|
136
|
+
payload = {'plugin_release': plugin_release}
|
|
137
|
+
if '@' in plugin_release:
|
|
138
|
+
plugin, version = plugin_release.rsplit('@', 1)
|
|
139
|
+
payload.setdefault('plugin', plugin)
|
|
140
|
+
payload.setdefault('version', version)
|
|
141
|
+
return payload
|
|
142
|
+
|
|
143
|
+
raise TypeError('plugin_release must be a PluginRelease instance or a formatted string "code@version"')
|
|
@@ -5,3 +5,22 @@ class CoreClientMixin(BaseClient):
|
|
|
5
5
|
def health_check(self):
|
|
6
6
|
path = 'health/'
|
|
7
7
|
return self._get(path)
|
|
8
|
+
|
|
9
|
+
def get_metrics(self, panel):
|
|
10
|
+
path = f'metrics/{panel}/'
|
|
11
|
+
return self._get(path)
|
|
12
|
+
|
|
13
|
+
def get_code_server_info(self, workspace_path=None):
|
|
14
|
+
"""Get code-server connection information from the agent.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
workspace_path: Optional path to set as the workspace directory
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
dict: Code-server connection information
|
|
21
|
+
"""
|
|
22
|
+
path = 'code-server/info/'
|
|
23
|
+
params = {}
|
|
24
|
+
if workspace_path:
|
|
25
|
+
params['workspace'] = workspace_path
|
|
26
|
+
return self._get(path, params=params)
|
synapse_sdk/clients/agent/ray.py
CHANGED
|
@@ -1,8 +1,86 @@
|
|
|
1
|
+
import weakref
|
|
2
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
3
|
+
|
|
1
4
|
from synapse_sdk.clients.base import BaseClient
|
|
2
5
|
from synapse_sdk.clients.exceptions import ClientError
|
|
6
|
+
from synapse_sdk.utils.network import (
|
|
7
|
+
HTTPStreamManager,
|
|
8
|
+
StreamLimits,
|
|
9
|
+
WebSocketStreamManager,
|
|
10
|
+
http_to_websocket_url,
|
|
11
|
+
sanitize_error_message,
|
|
12
|
+
validate_resource_id,
|
|
13
|
+
validate_timeout,
|
|
14
|
+
)
|
|
3
15
|
|
|
4
16
|
|
|
5
17
|
class RayClientMixin(BaseClient):
|
|
18
|
+
"""Mixin class providing Ray cluster management and monitoring functionality.
|
|
19
|
+
|
|
20
|
+
This mixin extends BaseClient with Ray-specific operations for interacting with
|
|
21
|
+
Apache Ray distributed computing clusters. It provides comprehensive job management,
|
|
22
|
+
node monitoring, task tracking, and Ray Serve application control capabilities.
|
|
23
|
+
|
|
24
|
+
Key Features:
|
|
25
|
+
- Job lifecycle management (list, get, monitor)
|
|
26
|
+
- Real-time log streaming via WebSocket and HTTP protocols
|
|
27
|
+
- Node and task monitoring
|
|
28
|
+
- Ray Serve application deployment and management
|
|
29
|
+
- Robust error handling with input validation
|
|
30
|
+
- Resource management with automatic cleanup
|
|
31
|
+
|
|
32
|
+
Streaming Capabilities:
|
|
33
|
+
- WebSocket streaming for real-time log tailing
|
|
34
|
+
- HTTP streaming as fallback protocol
|
|
35
|
+
- Configurable timeouts and stream limits
|
|
36
|
+
- Automatic protocol validation and error recovery
|
|
37
|
+
|
|
38
|
+
Resource Management:
|
|
39
|
+
- Thread pool for concurrent operations (5 workers)
|
|
40
|
+
- WeakSet for tracking active connections
|
|
41
|
+
- Automatic cleanup on object destruction
|
|
42
|
+
- Stream limits to prevent resource exhaustion
|
|
43
|
+
|
|
44
|
+
Usage Examples:
|
|
45
|
+
Basic job operations:
|
|
46
|
+
>>> client = RayClient(base_url="http://ray-head:8265")
|
|
47
|
+
>>> jobs = client.list_jobs()
|
|
48
|
+
>>> job = client.get_job('job-12345')
|
|
49
|
+
|
|
50
|
+
Real-time log streaming:
|
|
51
|
+
>>> # WebSocket streaming (preferred)
|
|
52
|
+
>>> for log_line in client.tail_job_logs('job-12345', protocol='websocket'):
|
|
53
|
+
... print(log_line)
|
|
54
|
+
|
|
55
|
+
>>> # HTTP streaming (fallback)
|
|
56
|
+
>>> for log_line in client.tail_job_logs('job-12345', protocol='stream'):
|
|
57
|
+
... print(log_line)
|
|
58
|
+
|
|
59
|
+
Node and task monitoring:
|
|
60
|
+
>>> nodes = client.list_nodes()
|
|
61
|
+
>>> tasks = client.list_tasks()
|
|
62
|
+
>>> node_details = client.get_node('node-id')
|
|
63
|
+
|
|
64
|
+
Ray Serve management:
|
|
65
|
+
>>> apps = client.list_serve_applications()
|
|
66
|
+
>>> client.delete_serve_application('app-id')
|
|
67
|
+
|
|
68
|
+
Note:
|
|
69
|
+
This class is designed as a mixin and should be combined with other
|
|
70
|
+
client classes that provide authentication and base functionality.
|
|
71
|
+
It requires the BaseClient foundation for HTTP operations.
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
def __init__(self, *args, **kwargs):
|
|
75
|
+
super().__init__(*args, **kwargs)
|
|
76
|
+
self._thread_pool = ThreadPoolExecutor(max_workers=5, thread_name_prefix='ray_client_')
|
|
77
|
+
self._active_connections = weakref.WeakSet()
|
|
78
|
+
|
|
79
|
+
# Initialize stream managers
|
|
80
|
+
stream_limits = StreamLimits()
|
|
81
|
+
self._websocket_manager = WebSocketStreamManager(self._thread_pool, stream_limits)
|
|
82
|
+
self._http_manager = HTTPStreamManager(self.requests_session, stream_limits)
|
|
83
|
+
|
|
6
84
|
def get_job(self, pk):
|
|
7
85
|
path = f'jobs/{pk}/'
|
|
8
86
|
return self._get(path)
|
|
@@ -15,19 +93,180 @@ class RayClientMixin(BaseClient):
|
|
|
15
93
|
path = f'jobs/{pk}/logs/'
|
|
16
94
|
return self._get(path)
|
|
17
95
|
|
|
18
|
-
def
|
|
19
|
-
|
|
20
|
-
|
|
96
|
+
def websocket_tail_job_logs(self, pk, stream_timeout=10):
|
|
97
|
+
"""Stream job logs in real-time using WebSocket protocol.
|
|
98
|
+
|
|
99
|
+
Establishes a WebSocket connection to stream job logs as they are generated.
|
|
100
|
+
This method provides the lowest latency for real-time log monitoring and is
|
|
101
|
+
the preferred protocol when available.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
pk (str): Job primary key or identifier. Must be alphanumeric with
|
|
105
|
+
optional hyphens/underscores, max 100 characters.
|
|
106
|
+
stream_timeout (float, optional): Maximum time in seconds to wait for
|
|
107
|
+
log data. Defaults to 10. Must be positive
|
|
108
|
+
and cannot exceed 300 seconds.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
Generator[str, None, None]: A generator yielding log lines as strings.
|
|
112
|
+
Each line includes a newline character.
|
|
113
|
+
|
|
114
|
+
Raises:
|
|
115
|
+
ClientError:
|
|
116
|
+
- 400: If long polling is enabled (incompatible)
|
|
117
|
+
- 400: If pk is empty, contains invalid characters, or too long
|
|
118
|
+
- 400: If stream_timeout is not positive or exceeds maximum
|
|
119
|
+
- 500: If WebSocket library is unavailable
|
|
120
|
+
- 503: If connection to Ray cluster fails
|
|
121
|
+
- 408: If connection timeout occurs
|
|
122
|
+
- 429: If stream limits are exceeded (lines, size, messages)
|
|
123
|
+
|
|
124
|
+
Usage:
|
|
125
|
+
>>> # Basic log streaming
|
|
126
|
+
>>> for log_line in client.websocket_tail_job_logs('job-12345'):
|
|
127
|
+
... print(log_line.strip())
|
|
128
|
+
|
|
129
|
+
>>> # With custom timeout
|
|
130
|
+
>>> for log_line in client.websocket_tail_job_logs('job-12345', stream_timeout=30):
|
|
131
|
+
... if 'ERROR' in log_line:
|
|
132
|
+
... break
|
|
133
|
+
|
|
134
|
+
Technical Notes:
|
|
135
|
+
- Uses WebSocketStreamManager for connection management
|
|
136
|
+
- Automatic input validation and sanitization
|
|
137
|
+
- Resource cleanup handled by WeakSet tracking
|
|
138
|
+
- Stream limits prevent memory exhaustion
|
|
139
|
+
- Thread pool manages WebSocket operations
|
|
140
|
+
|
|
141
|
+
See Also:
|
|
142
|
+
stream_tail_job_logs: HTTP-based alternative
|
|
143
|
+
tail_job_logs: Protocol-agnostic wrapper method
|
|
144
|
+
"""
|
|
145
|
+
if hasattr(self, 'long_poll_handler') and self.long_poll_handler:
|
|
146
|
+
raise ClientError(400, '"websocket_tail_job_logs" does not support long polling')
|
|
147
|
+
|
|
148
|
+
# Validate inputs using network utilities
|
|
149
|
+
validated_pk = validate_resource_id(pk, 'job')
|
|
150
|
+
validated_timeout = validate_timeout(stream_timeout)
|
|
151
|
+
|
|
152
|
+
# Build WebSocket URL
|
|
153
|
+
path = f'ray/jobs/{validated_pk}/logs/ws/'
|
|
154
|
+
url = self._get_url(path, trailing_slash=True)
|
|
155
|
+
ws_url = http_to_websocket_url(url)
|
|
156
|
+
|
|
157
|
+
# Get headers and use WebSocket manager
|
|
158
|
+
headers = self._get_headers()
|
|
159
|
+
headers['Agent-Token'] = f'Token {self.agent_token}'
|
|
160
|
+
context = f'job {validated_pk}'
|
|
161
|
+
|
|
162
|
+
return self._websocket_manager.stream_logs(ws_url, headers, validated_timeout, context)
|
|
163
|
+
|
|
164
|
+
def stream_tail_job_logs(self, pk, stream_timeout=10):
|
|
165
|
+
"""Stream job logs in real-time using HTTP chunked transfer encoding.
|
|
166
|
+
|
|
167
|
+
Establishes an HTTP connection with chunked transfer encoding to stream
|
|
168
|
+
job logs as they are generated. This method serves as a reliable fallback
|
|
169
|
+
when WebSocket connections are not available or suitable.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
pk (str): Job primary key or identifier. Must be alphanumeric with
|
|
173
|
+
optional hyphens/underscores, max 100 characters.
|
|
174
|
+
stream_timeout (float, optional): Maximum time in seconds to wait for
|
|
175
|
+
log data. Defaults to 10. Must be positive
|
|
176
|
+
and cannot exceed 300 seconds.
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
Generator[str, None, None]: A generator yielding log lines as strings.
|
|
180
|
+
Each line includes a newline character.
|
|
181
|
+
|
|
182
|
+
Raises:
|
|
183
|
+
ClientError:
|
|
184
|
+
- 400: If long polling is enabled (incompatible)
|
|
185
|
+
- 400: If pk is empty, contains invalid characters, or too long
|
|
186
|
+
- 400: If stream_timeout is not positive or exceeds maximum
|
|
187
|
+
- 503: If connection to Ray cluster fails
|
|
188
|
+
- 408: If connection or read timeout occurs
|
|
189
|
+
- 404: If job is not found
|
|
190
|
+
- 429: If stream limits are exceeded (lines, size, messages)
|
|
191
|
+
- 500: If unexpected streaming error occurs
|
|
192
|
+
|
|
193
|
+
Usage:
|
|
194
|
+
>>> # Basic HTTP log streaming
|
|
195
|
+
>>> for log_line in client.stream_tail_job_logs('job-12345'):
|
|
196
|
+
... print(log_line.strip())
|
|
21
197
|
|
|
22
|
-
|
|
198
|
+
>>> # With error handling and custom timeout
|
|
199
|
+
>>> try:
|
|
200
|
+
... for log_line in client.stream_tail_job_logs('job-12345', stream_timeout=60):
|
|
201
|
+
... if 'COMPLETED' in log_line:
|
|
202
|
+
... break
|
|
203
|
+
... except ClientError as e:
|
|
204
|
+
... print(f"Streaming failed: {e}")
|
|
23
205
|
|
|
24
|
-
|
|
206
|
+
Technical Notes:
|
|
207
|
+
- Uses HTTPStreamManager for connection management
|
|
208
|
+
- Automatic input validation and sanitization
|
|
209
|
+
- Proper HTTP response cleanup on completion/error
|
|
210
|
+
- Stream limits prevent memory exhaustion
|
|
211
|
+
- Filters out oversized lines (>10KB) automatically
|
|
212
|
+
- Connection reuse through requests session
|
|
213
|
+
|
|
214
|
+
See Also:
|
|
215
|
+
websocket_tail_job_logs: WebSocket-based alternative (preferred)
|
|
216
|
+
tail_job_logs: Protocol-agnostic wrapper method
|
|
217
|
+
"""
|
|
218
|
+
if hasattr(self, 'long_poll_handler') and self.long_poll_handler:
|
|
219
|
+
raise ClientError(400, '"stream_tail_job_logs" does not support long polling')
|
|
220
|
+
|
|
221
|
+
# Validate inputs using network utilities
|
|
222
|
+
validated_pk = validate_resource_id(pk, 'job')
|
|
223
|
+
validated_timeout = validate_timeout(stream_timeout)
|
|
224
|
+
|
|
225
|
+
# Build HTTP URL and prepare request
|
|
226
|
+
path = f'ray/jobs/{validated_pk}/logs/stream/'
|
|
227
|
+
url = self._get_url(path, trailing_slash=True)
|
|
25
228
|
headers = self._get_headers()
|
|
229
|
+
headers['Agent-Token'] = f'Token {self.agent_token}'
|
|
230
|
+
timeout = (self.timeout['connect'], validated_timeout)
|
|
231
|
+
context = f'job {validated_pk}'
|
|
232
|
+
|
|
233
|
+
return self._http_manager.stream_logs(url, headers, timeout, context)
|
|
234
|
+
|
|
235
|
+
def tail_job_logs(self, pk, stream_timeout=10, protocol='stream'):
|
|
236
|
+
"""Tail job logs using either WebSocket or HTTP streaming.
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
pk: Job primary key
|
|
240
|
+
stream_timeout: Timeout for streaming operations
|
|
241
|
+
protocol: 'websocket' or 'stream' (default: 'stream')
|
|
242
|
+
"""
|
|
243
|
+
# Validate protocol first
|
|
244
|
+
if protocol not in ('websocket', 'stream'):
|
|
245
|
+
raise ClientError(400, f'Unsupported protocol: {protocol}. Use "websocket" or "stream"')
|
|
26
246
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
247
|
+
# Pre-validate common inputs using network utilities
|
|
248
|
+
validate_resource_id(pk, 'job')
|
|
249
|
+
validate_timeout(stream_timeout)
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
if protocol == 'websocket':
|
|
253
|
+
return self.websocket_tail_job_logs(pk, stream_timeout)
|
|
254
|
+
else: # protocol == 'stream'
|
|
255
|
+
return self.stream_tail_job_logs(pk, stream_timeout)
|
|
256
|
+
except ClientError:
|
|
257
|
+
raise
|
|
258
|
+
except Exception as e:
|
|
259
|
+
# Fallback error handling using network utility
|
|
260
|
+
sanitized_error = sanitize_error_message(str(e), f'job {pk}')
|
|
261
|
+
raise ClientError(500, f'Protocol {protocol} failed: {sanitized_error}')
|
|
262
|
+
|
|
263
|
+
def __del__(self):
|
|
264
|
+
"""Cleanup resources when object is destroyed."""
|
|
265
|
+
try:
|
|
266
|
+
if hasattr(self, '_thread_pool'):
|
|
267
|
+
self._thread_pool.shutdown(wait=False)
|
|
268
|
+
except Exception:
|
|
269
|
+
pass # Ignore cleanup errors during destruction
|
|
31
270
|
|
|
32
271
|
def get_node(self, pk):
|
|
33
272
|
path = f'nodes/{pk}/'
|
|
@@ -56,3 +295,53 @@ class RayClientMixin(BaseClient):
|
|
|
56
295
|
def delete_serve_application(self, pk):
|
|
57
296
|
path = f'serve_applications/{pk}/'
|
|
58
297
|
return self._delete(path)
|
|
298
|
+
|
|
299
|
+
def stop_job(self, pk):
|
|
300
|
+
"""Stop a running job gracefully.
|
|
301
|
+
|
|
302
|
+
Uses Ray's stop_job() API to request graceful termination of the job.
|
|
303
|
+
This preserves job state and allows for potential resubmission later.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
pk (str): Job primary key or identifier. Must be alphanumeric with
|
|
307
|
+
optional hyphens/underscores, max 100 characters.
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
dict: Response containing job status and stop details.
|
|
311
|
+
|
|
312
|
+
Raises:
|
|
313
|
+
ClientError:
|
|
314
|
+
- 400: If pk is empty, contains invalid characters, or too long
|
|
315
|
+
- 400: If job is already in terminal state (STOPPED, FAILED, etc.)
|
|
316
|
+
- 404: If job is not found
|
|
317
|
+
- 503: If connection to Ray cluster fails
|
|
318
|
+
- 500: If unexpected error occurs during stop
|
|
319
|
+
|
|
320
|
+
Usage:
|
|
321
|
+
>>> # Stop a running job
|
|
322
|
+
>>> result = client.stop_job('job-12345')
|
|
323
|
+
>>> print(result['status']) # Should show 'STOPPING' or similar
|
|
324
|
+
|
|
325
|
+
>>> # Handle stop errors
|
|
326
|
+
>>> try:
|
|
327
|
+
... client.stop_job('job-12345')
|
|
328
|
+
... except ClientError as e:
|
|
329
|
+
... print(f"Stop failed: {e}")
|
|
330
|
+
|
|
331
|
+
Technical Notes:
|
|
332
|
+
- Uses Ray's stop_job() API for graceful termination
|
|
333
|
+
- Validates job state before attempting stop
|
|
334
|
+
- Maintains consistency with existing SDK patterns
|
|
335
|
+
- Provides detailed error messages for debugging
|
|
336
|
+
|
|
337
|
+
See Also:
|
|
338
|
+
resume_job: Method for restarting stopped jobs
|
|
339
|
+
"""
|
|
340
|
+
# Validate inputs using network utilities
|
|
341
|
+
validated_pk = validate_resource_id(pk, 'job')
|
|
342
|
+
|
|
343
|
+
# Build API path for job stop
|
|
344
|
+
path = f'jobs/{validated_pk}/stop/'
|
|
345
|
+
|
|
346
|
+
# Use _post method with empty data to match Ray's API pattern
|
|
347
|
+
return self._post(path)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from synapse_sdk.clients.backend.annotation import AnnotationClientMixin
|
|
2
2
|
from synapse_sdk.clients.backend.core import CoreClientMixin
|
|
3
|
-
from synapse_sdk.clients.backend.
|
|
3
|
+
from synapse_sdk.clients.backend.data_collection import DataCollectionClientMixin
|
|
4
|
+
from synapse_sdk.clients.backend.hitl import HITLClientMixin
|
|
4
5
|
from synapse_sdk.clients.backend.integration import IntegrationClientMixin
|
|
5
6
|
from synapse_sdk.clients.backend.ml import MLClientMixin
|
|
6
7
|
|
|
@@ -8,27 +9,44 @@ from synapse_sdk.clients.backend.ml import MLClientMixin
|
|
|
8
9
|
class BackendClient(
|
|
9
10
|
AnnotationClientMixin,
|
|
10
11
|
CoreClientMixin,
|
|
11
|
-
|
|
12
|
+
DataCollectionClientMixin,
|
|
12
13
|
IntegrationClientMixin,
|
|
13
14
|
MLClientMixin,
|
|
15
|
+
HITLClientMixin,
|
|
14
16
|
):
|
|
17
|
+
"""BackendClient is a client for the synapse backend API.
|
|
18
|
+
|
|
19
|
+
* Access token overrides authorization token and tenant token.
|
|
20
|
+
|
|
21
|
+
Attrs:
|
|
22
|
+
access_token (str): The synapse access token for the synapse backend API.
|
|
23
|
+
authorization_token (str): The authorization token for the synapse backend API.
|
|
24
|
+
tenant_token (str): The tenant token for the synapse backend API.
|
|
25
|
+
agent_token (str): The agent token for the backend API.
|
|
26
|
+
timeout (Dict): Set reasonable default timeouts for better UX. It can receive keys called 'connect' and 'read'.
|
|
27
|
+
"""
|
|
28
|
+
|
|
15
29
|
name = 'Backend'
|
|
16
|
-
|
|
17
|
-
|
|
30
|
+
access_token = None
|
|
31
|
+
authorization_token = None
|
|
32
|
+
tenant_token = None
|
|
18
33
|
agent_token = None
|
|
19
34
|
|
|
20
|
-
def __init__(self, base_url, token=None, tenant=None, agent_token=None):
|
|
21
|
-
super().__init__(base_url)
|
|
22
|
-
self.
|
|
23
|
-
self.
|
|
35
|
+
def __init__(self, base_url, access_token=None, token=None, tenant=None, agent_token=None, timeout=None, **kwargs):
|
|
36
|
+
super().__init__(base_url, timeout=timeout)
|
|
37
|
+
self.access_token = access_token
|
|
38
|
+
self.authorization_token = token
|
|
39
|
+
self.tenant_token = tenant
|
|
24
40
|
self.agent_token = agent_token
|
|
25
41
|
|
|
26
42
|
def _get_headers(self):
|
|
27
43
|
headers = {}
|
|
28
|
-
if self.
|
|
29
|
-
headers['
|
|
30
|
-
if self.
|
|
31
|
-
headers['
|
|
44
|
+
if self.access_token:
|
|
45
|
+
headers['Synapse-Access-Token'] = f'Token {self.access_token}'
|
|
46
|
+
if self.authorization_token:
|
|
47
|
+
headers['Authorization'] = f'Token {self.authorization_token}'
|
|
48
|
+
if self.tenant_token:
|
|
49
|
+
headers['Synapse-Tenant'] = f'Token {self.tenant_token}'
|
|
32
50
|
if self.agent_token:
|
|
33
51
|
headers['SYNAPSE-Agent'] = f'Token {self.agent_token}'
|
|
34
52
|
return headers
|
|
@@ -7,18 +7,26 @@ class AnnotationClientMixin(BaseClient):
|
|
|
7
7
|
path = f'projects/{pk}/'
|
|
8
8
|
return self._get(path)
|
|
9
9
|
|
|
10
|
+
def get_task(self, pk, params):
|
|
11
|
+
path = f'tasks/{pk}/'
|
|
12
|
+
return self._get(path, params=params)
|
|
13
|
+
|
|
14
|
+
def annotate_task_data(self, pk, data):
|
|
15
|
+
path = f'tasks/{pk}/annotate_task_data/'
|
|
16
|
+
return self._put(path, data=data)
|
|
17
|
+
|
|
10
18
|
def get_task_tag(self, pk):
|
|
11
19
|
path = f'task_tags/{pk}/'
|
|
12
20
|
return self._get(path)
|
|
13
21
|
|
|
14
|
-
def list_task_tags(self,
|
|
22
|
+
def list_task_tags(self, params):
|
|
15
23
|
path = 'task_tags/'
|
|
16
|
-
return self._list(path,
|
|
24
|
+
return self._list(path, params=params)
|
|
17
25
|
|
|
18
|
-
def list_tasks(self,
|
|
19
|
-
path = 'tasks/'
|
|
26
|
+
def list_tasks(self, params=None, url_conversion=None, list_all=False):
|
|
27
|
+
path = 'sdk/tasks/'
|
|
20
28
|
url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
|
|
21
|
-
return self._list(path,
|
|
29
|
+
return self._list(path, params=params, url_conversion=url_conversion, list_all=list_all)
|
|
22
30
|
|
|
23
31
|
def create_tasks(self, data):
|
|
24
32
|
path = 'tasks/'
|
|
@@ -3,15 +3,42 @@ import os
|
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
|
|
5
5
|
from synapse_sdk.clients.base import BaseClient
|
|
6
|
+
from synapse_sdk.utils.file import read_file_in_chunks
|
|
6
7
|
|
|
7
8
|
|
|
8
9
|
class CoreClientMixin(BaseClient):
|
|
9
10
|
def create_chunked_upload(self, file_path):
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
while chunk := file.read(chunk_size):
|
|
13
|
-
yield chunk
|
|
11
|
+
"""
|
|
12
|
+
Upload a file using chunked upload for efficient handling of large files.
|
|
14
13
|
|
|
14
|
+
This method breaks the file into chunks and uploads them sequentially to the server.
|
|
15
|
+
It calculates an MD5 hash of the entire file to ensure data integrity during upload.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
file_path (str | Path): Path to the file to upload
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
dict: Response from the server after successful upload completion,
|
|
22
|
+
typically containing upload confirmation and file metadata
|
|
23
|
+
|
|
24
|
+
Raises:
|
|
25
|
+
FileNotFoundError: If the specified file doesn't exist
|
|
26
|
+
PermissionError: If the file can't be read due to permissions
|
|
27
|
+
ClientError: If there's an error during the upload process
|
|
28
|
+
OSError: If there's an OS-level error accessing the file
|
|
29
|
+
|
|
30
|
+
Example:
|
|
31
|
+
```python
|
|
32
|
+
client = CoreClientMixin(base_url='https://api.example.com')
|
|
33
|
+
result = client.create_chunked_upload('/path/to/large_file.zip')
|
|
34
|
+
print(f"Upload completed: {result}")
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Note:
|
|
38
|
+
- Uses 50MB chunks by default for optimal upload performance
|
|
39
|
+
- Automatically resumes from the last successfully uploaded chunk
|
|
40
|
+
- Verifies upload integrity using MD5 checksum
|
|
41
|
+
"""
|
|
15
42
|
file_path = Path(file_path)
|
|
16
43
|
size = os.path.getsize(file_path)
|
|
17
44
|
hash_md5 = hashlib.md5()
|