synapse-sdk 1.0.0a32__py3-none-any.whl → 1.0.0a34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

@@ -1,6 +1,7 @@
1
1
  from synapse_sdk.clients.backend.annotation import AnnotationClientMixin
2
2
  from synapse_sdk.clients.backend.core import CoreClientMixin
3
3
  from synapse_sdk.clients.backend.dataset import DatasetClientMixin
4
+ from synapse_sdk.clients.backend.hitl import HITLClientMixin
4
5
  from synapse_sdk.clients.backend.integration import IntegrationClientMixin
5
6
  from synapse_sdk.clients.backend.ml import MLClientMixin
6
7
 
@@ -11,6 +12,7 @@ class BackendClient(
11
12
  DatasetClientMixin,
12
13
  IntegrationClientMixin,
13
14
  MLClientMixin,
15
+ HITLClientMixin,
14
16
  ):
15
17
  name = 'Backend'
16
18
  token = None
@@ -11,14 +11,14 @@ class AnnotationClientMixin(BaseClient):
11
11
  path = f'task_tags/{pk}/'
12
12
  return self._get(path)
13
13
 
14
- def list_task_tags(self, data):
14
+ def list_task_tags(self, params):
15
15
  path = 'task_tags/'
16
- return self._list(path, data=data)
16
+ return self._list(path, params=params)
17
17
 
18
- def list_tasks(self, data, url_conversion=None, list_all=False):
18
+ def list_tasks(self, params=None, url_conversion=None, list_all=False):
19
19
  path = 'tasks/'
20
20
  url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
21
- return self._list(path, data=data, url_conversion=url_conversion, list_all=list_all)
21
+ return self._list(path, params=params, url_conversion=url_conversion, list_all=list_all)
22
22
 
23
23
  def create_tasks(self, data):
24
24
  path = 'tasks/'
@@ -0,0 +1,17 @@
1
+ from synapse_sdk.clients.base import BaseClient
2
+ from synapse_sdk.clients.utils import get_default_url_conversion
3
+
4
+
5
+ class HITLClientMixin(BaseClient):
6
+ def get_assignment(self, pk):
7
+ path = f'assignments/{pk}/'
8
+ return self._get(path)
9
+
10
+ def list_assignments(self, params=None, url_conversion=None, list_all=False):
11
+ path = 'assignments/'
12
+ url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
13
+ return self._list(path, params=params, url_conversion=url_conversion, list_all=list_all)
14
+
15
+ def set_tags_assignments(self, data, params=None):
16
+ path = 'assignments/set_tags/'
17
+ return self._post(path, payload=data, params=params)
@@ -1,3 +1,4 @@
1
+ from synapse_sdk.clients.backend.models import Storage
1
2
  from synapse_sdk.clients.base import BaseClient
2
3
  from synapse_sdk.utils.file import convert_file_to_base64
3
4
 
@@ -79,5 +80,6 @@ class IntegrationClientMixin(BaseClient):
79
80
  return self._list(path, params=params, list_all=list_all)
80
81
 
81
82
  def get_storage(self, pk):
83
+ """Get specific storage data from synapse backend."""
82
84
  path = f'storages/{pk}/'
83
- return self._get(path)
85
+ return self._get(path, pydantic_model=Storage)
@@ -0,0 +1,44 @@
1
+ from enum import Enum
2
+ from typing import Dict
3
+
4
+ from pydantic import BaseModel
5
+
6
+
7
+ class StorageCategory(str, Enum):
8
+ """Synapse Backend Storage Category Enum."""
9
+
10
+ INTERNAL = 'internal'
11
+ EXTERNAL = 'external'
12
+
13
+
14
+ class StorageProvider(str, Enum):
15
+ """Synapse Backend Storage Provider Enum."""
16
+
17
+ AMAZON_S3 = 'amazon_s3'
18
+ AZURE = 'azure'
19
+ DIGITAL_OCEAN = 'digital_ocean'
20
+ FILE_SYSTEM = 'file_system'
21
+ FTP = 'ftp'
22
+ SFTP = 'sftp'
23
+ MINIO = 'minio'
24
+ GCP = 'gcp'
25
+
26
+
27
+ class Storage(BaseModel):
28
+ """Synapse Backend Storage Model.
29
+
30
+ Attrs:
31
+ id (int): The storage pk.
32
+ name (str): The storage name.
33
+ category (str): The storage category. (ex: internal, external)
34
+ provider (str): The storage provider. (ex: s3, gcp)
35
+ configuration (Dict): The storage configuration.
36
+ is_default (bool): The storage is default for Synapse backend workspace.
37
+ """
38
+
39
+ id: int
40
+ name: str
41
+ category: StorageCategory
42
+ provider: StorageProvider
43
+ configuration: Dict
44
+ is_default: bool
@@ -85,26 +85,59 @@ class BaseClient:
85
85
  except ValueError:
86
86
  return response.text
87
87
 
88
- def _get(self, path, url_conversion=None, **kwargs):
88
+ def _get(self, path, url_conversion=None, pydantic_model=None, **kwargs):
89
+ """
90
+ Perform a GET request and optionally convert response to a pydantic model.
91
+
92
+ Args:
93
+ path (str): URL path to request.
94
+ url_conversion (dict, optional): Configuration for URL to path conversion.
95
+ pydantic_model (Type, optional): Pydantic model to convert the response to.
96
+ **kwargs: Additional keyword arguments to pass to the request.
97
+
98
+ Returns:
99
+ The response data, optionally converted to a pydantic model.
100
+ """
89
101
  response = self._request('get', path, **kwargs)
102
+
90
103
  if url_conversion:
91
104
  if url_conversion['is_list']:
92
105
  files_url_to_path_from_objs(response['results'], **url_conversion, is_async=True)
93
106
  else:
94
107
  files_url_to_path_from_objs(response, **url_conversion)
108
+
109
+ if pydantic_model:
110
+ return self._validate_response_with_pydantic_model(response, pydantic_model)
111
+
95
112
  return response
96
113
 
97
- def _post(self, path, **kwargs):
98
- return self._request('post', path, **kwargs)
114
+ def _post(self, path, pydantic_model=None, **kwargs):
115
+ response = self._request('post', path, **kwargs)
116
+ if pydantic_model:
117
+ return self._validate_response_with_pydantic_model(response, pydantic_model)
118
+ else:
119
+ return response
99
120
 
100
- def _put(self, path, **kwargs):
101
- return self._request('put', path, **kwargs)
121
+ def _put(self, path, pydantic_model=None, **kwargs):
122
+ response = self._request('put', path, **kwargs)
123
+ if pydantic_model:
124
+ return self._validate_response_with_pydantic_model(response, pydantic_model)
125
+ else:
126
+ return response
102
127
 
103
- def _patch(self, path, **kwargs):
104
- return self._request('patch', path, **kwargs)
128
+ def _patch(self, path, pydantic_model=None, **kwargs):
129
+ response = self._request('patch', path, **kwargs)
130
+ if pydantic_model:
131
+ return self._validate_response_with_pydantic_model(response, pydantic_model)
132
+ else:
133
+ return response
105
134
 
106
- def _delete(self, path, **kwargs):
107
- return self._request('delete', path, **kwargs)
135
+ def _delete(self, path, pydantic_model=None, **kwargs):
136
+ response = self._request('delete', path, **kwargs)
137
+ if pydantic_model:
138
+ return self._validate_response_with_pydantic_model(response, pydantic_model)
139
+ else:
140
+ return response
108
141
 
109
142
  def _list(self, path, url_conversion=None, list_all=False, **kwargs):
110
143
  response = self._get(path, **kwargs)
@@ -121,3 +154,17 @@ class BaseClient:
121
154
 
122
155
  def exists(self, api, *args, **kwargs):
123
156
  return getattr(self, api)(*args, **kwargs)['count'] > 0
157
+
158
+ def _validate_response_with_pydantic_model(self, response, pydantic_model):
159
+ """Validate a response with a pydantic model."""
160
+ # Check if model is a pydantic model (has the __pydantic_model__ attribute)
161
+ if (
162
+ hasattr(pydantic_model, '__pydantic_model__')
163
+ or hasattr(pydantic_model, 'model_validate')
164
+ or hasattr(pydantic_model, 'parse_obj')
165
+ ):
166
+ pydantic_model.model_validate(response)
167
+ return response
168
+ else:
169
+ # Not a pydantic model
170
+ raise TypeError('The provided model is not a pydantic model')
File without changes
@@ -0,0 +1,31 @@
1
+ class FileSpecificationValidator:
2
+ """File specification validator class for synapse backend collection.
3
+
4
+ Args:
5
+ file_spec_template (list):
6
+ * List of dictionaries containing file specification template
7
+ * This is from synapse-backend file specification data.
8
+ organized_files (list): List of dictionaries containing organized files.
9
+ """
10
+
11
+ def __init__(self, file_spec_template, organized_files):
12
+ self.file_spec_template = file_spec_template
13
+ self.organized_files = organized_files
14
+
15
+ def validate(self):
16
+ """Validate the file specification template with organized files.
17
+
18
+ Returns:
19
+ bool: True if the file specification template is valid, False otherwise.
20
+ """
21
+ for spec in self.file_spec_template:
22
+ spec_name = spec['name']
23
+ is_required = spec['is_required']
24
+
25
+ for file_group in self.organized_files:
26
+ files = file_group['files']
27
+ if is_required and spec_name not in files:
28
+ return False
29
+ if spec_name in files and not files[spec_name]:
30
+ return False
31
+ return True
@@ -1,3 +1,6 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Any, Literal
3
+
1
4
  from pydantic import BaseModel, field_validator
2
5
  from pydantic_core import PydanticCustomError
3
6
 
@@ -9,11 +12,158 @@ from synapse_sdk.plugins.enums import PluginCategory, RunMethod
9
12
  from synapse_sdk.utils.storage import get_pathlib
10
13
 
11
14
 
15
+ class ExportTargetHandler(ABC):
16
+ """
17
+ Abstract base class for handling export targets.
18
+
19
+ This class defines the blueprint for export target handlers, requiring the implementation
20
+ of methods to validate filters, retrieve results, and process collections of results.
21
+ """
22
+
23
+ @abstractmethod
24
+ def validate_filter(self, value: dict, client: Any):
25
+ """
26
+ Validate filter query params to request original data from api.
27
+
28
+ Args:
29
+ value (dict): The filter criteria to validate.
30
+ client (Any): The client used to validate the filter.
31
+
32
+ Raises:
33
+ PydanticCustomError: If the filter criteria are invalid.
34
+
35
+ Returns:
36
+ dict: The validated filter criteria.
37
+ """
38
+ pass
39
+
40
+ @abstractmethod
41
+ def get_results(self, client: Any, filters: dict):
42
+ """
43
+ Retrieve original data from target sources.
44
+
45
+ Args:
46
+ client (Any): The client used to retrieve the results.
47
+ filters (dict): The filter criteria to apply.
48
+
49
+ Returns:
50
+ tuple: A tuple containing the results and the total count of results.
51
+ """
52
+ pass
53
+
54
+ @abstractmethod
55
+ def get_export_item(self, results):
56
+ """
57
+ Providing elements to build export data.
58
+
59
+ Args:
60
+ results (list): The results to process.
61
+
62
+ Yields:
63
+ generator: A generator that yields processed data items.
64
+ """
65
+ pass
66
+
67
+
68
+ class AssignmentExportTargetHandler(ExportTargetHandler):
69
+ def validate_filter(self, value: dict, client: Any):
70
+ if 'project' not in value:
71
+ raise PydanticCustomError('missing_field', _('Project is required for Assignment.'))
72
+ try:
73
+ client.list_assignments(params=value)
74
+ except ClientError:
75
+ raise PydanticCustomError('client_error', _('Unable to get Assignment.'))
76
+ return value
77
+
78
+ def get_results(self, client: Any, filters: dict):
79
+ return client.list_assignments(params=filters, list_all=True)
80
+
81
+ def get_export_item(self, results):
82
+ for result in results:
83
+ yield {
84
+ 'data': result['data'],
85
+ 'files': result['file'],
86
+ 'id': result['id'],
87
+ }
88
+
89
+
90
+ class GroundTruthExportTargetHandler(ExportTargetHandler):
91
+ def validate_filter(self, value: dict, client: Any):
92
+ if 'ground_truth_dataset_version' not in value:
93
+ raise PydanticCustomError('missing_field', _('Ground Truth dataset version is required.'))
94
+ try:
95
+ client.get_ground_truth_version(value['ground_truth_dataset_version'])
96
+ except ClientError:
97
+ raise PydanticCustomError('client_error', _('Unable to get Ground Truth dataset version.'))
98
+ return value
99
+
100
+ def get_results(self, client: Any, filters: dict):
101
+ filters['ground_truth_dataset_versions'] = filters.pop('ground_truth_dataset_version')
102
+ return client.list_ground_truth_events(params=filters, list_all=True)
103
+
104
+ def get_export_item(self, results):
105
+ for result in results:
106
+ files_key = next(iter(result['data_unit']['files']))
107
+ yield {
108
+ 'data': result['data'],
109
+ 'files': result['data_unit']['files'][files_key],
110
+ 'id': result['ground_truth'],
111
+ }
112
+
113
+
114
+ class TaskExportTargetHandler(ExportTargetHandler):
115
+ def validate_filter(self, value: dict, client: Any):
116
+ if 'project' not in value:
117
+ raise PydanticCustomError('missing_field', _('Project is required for Task.'))
118
+ try:
119
+ client.list_tasks(params=value)
120
+ except ClientError:
121
+ raise PydanticCustomError('client_error', _('Unable to get Task.'))
122
+ return value
123
+
124
+ def get_results(self, client: Any, filters: dict):
125
+ filters['expand'] = 'data_unit'
126
+ return client.list_tasks(params=filters, list_all=True)
127
+
128
+ def get_export_item(self, results):
129
+ for result in results:
130
+ files_key = next(iter(result['data_unit']['files']))
131
+ yield {
132
+ 'data': result['data'],
133
+ 'files': result['data_unit']['files'][files_key],
134
+ 'id': result['id'],
135
+ }
136
+
137
+
138
+ class TargetHandlerFactory:
139
+ @staticmethod
140
+ def get_handler(target: str) -> ExportTargetHandler:
141
+ if target == 'assignment':
142
+ return AssignmentExportTargetHandler()
143
+ elif target == 'ground_truth':
144
+ return GroundTruthExportTargetHandler()
145
+ elif target == 'task':
146
+ return TaskExportTargetHandler()
147
+ else:
148
+ raise ValueError(f'Unknown target: {target}')
149
+
150
+
12
151
  class ExportParams(BaseModel):
152
+ """
153
+ Parameters for the export action.
154
+
155
+ Attributes:
156
+ storage (int): The storage ID to save the exported data.
157
+ save_original_file (bool): Whether to save the original file.
158
+ path (str): The path to save the exported data.
159
+ target (str): The target source to export data from. (ex. ground_truth, assignment, task)
160
+ filter (dict): The filter criteria to apply.
161
+ """
162
+
13
163
  storage: int
14
164
  save_original_file: bool = True
15
165
  path: str
16
- ground_truth_dataset_version: int
166
+ target: Literal['assignment', 'ground_truth', 'task']
17
167
  filter: dict
18
168
 
19
169
  @field_validator('storage')
@@ -27,16 +177,14 @@ class ExportParams(BaseModel):
27
177
  raise PydanticCustomError('client_error', _('Unable to get storage from Synapse backend.'))
28
178
  return value
29
179
 
30
- @field_validator('ground_truth_dataset_version')
180
+ @field_validator('filter')
31
181
  @staticmethod
32
- def check_ground_truth_dataset_version_exists(value, info):
182
+ def check_filter_by_target(value, info):
33
183
  action = info.context['action']
34
184
  client = action.client
35
- try:
36
- client.get_ground_truth_version(value)
37
- except ClientError:
38
- raise PydanticCustomError('client_error', _('Unable to get Ground Truth dataset version.'))
39
- return value
185
+ target = action.params['target']
186
+ handler = TargetHandlerFactory.get_handler(target)
187
+ return handler.validate_filter(value, client)
40
188
 
41
189
 
42
190
  @register_action
@@ -51,32 +199,24 @@ class ExportAction(Action):
51
199
  }
52
200
  }
53
201
 
54
- def get_dataset(self, results):
55
- """Get dataset for export."""
56
- for result in results:
57
- yield {
58
- 'data': result['data'],
59
- 'files': result['data_unit']['files'],
60
- 'id': result['ground_truth'],
61
- }
62
-
63
- def get_filtered_results(self):
64
- """Get filtered ground truth events."""
65
- self.params['filter']['ground_truth_dataset_versions'] = self.params['ground_truth_dataset_version']
66
- filters = {'expand': 'data', **self.params['filter']}
67
-
202
+ def get_filtered_results(self, filters, handler):
203
+ """Get filtered target results."""
68
204
  try:
69
- gt_dataset_events_list = self.client.list_ground_truth_events(params=filters, list_all=True)
70
- results = gt_dataset_events_list[0]
71
- count = gt_dataset_events_list[1]
205
+ result_list = handler.get_results(self.client, filters)
206
+ results = result_list[0]
207
+ count = result_list[1]
72
208
  except ClientError:
73
209
  raise PydanticCustomError('client_error', _('Unable to get Ground Truth dataset.'))
74
210
  return results, count
75
211
 
76
212
  def start(self):
77
- self.params['results'], self.params['count'] = self.get_filtered_results()
78
- dataset = self.get_dataset(self.params['results'])
213
+ filters = {'expand': 'data', **self.params['filter']}
214
+ target = self.params['target']
215
+ handler = TargetHandlerFactory.get_handler(target)
216
+
217
+ self.params['results'], self.params['count'] = self.get_filtered_results(filters, handler)
218
+ export_items = handler.get_export_item(self.params['results'])
79
219
 
80
220
  storage = self.client.get_storage(self.params['storage'])
81
221
  pathlib_cwd = get_pathlib(storage, self.params['path'])
82
- return self.entrypoint(self.run, dataset, pathlib_cwd, **self.params)
222
+ return self.entrypoint(self.run, export_items, pathlib_cwd, **self.params)
@@ -4,12 +4,12 @@ from pathlib import Path
4
4
  import requests
5
5
 
6
6
 
7
- def export(run, input_dataset, path_root, **params):
7
+ def export(run, export_items, path_root, **params):
8
8
  """Executes the export task.
9
9
 
10
10
  Args:
11
11
  run : Execution object
12
- input_dataset (generator):
12
+ export_items (generator):
13
13
  - data (dict): dm_schema_data information.
14
14
  - files (dict): File information. Includes file URL, original file path, metadata, etc.
15
15
  - id (int): ground_truth ID
@@ -42,11 +42,11 @@ def export(run, input_dataset, path_root, **params):
42
42
  total = params['count']
43
43
  # progress init
44
44
  run.set_progress(0, total, category='dataset_conversion')
45
- for no, input_data in enumerate(input_dataset, start=1):
45
+ for no, export_item in enumerate(export_items, start=1):
46
46
  run.set_progress(no, total, category='dataset_conversion')
47
47
  if no == 1:
48
48
  run.log_message('Converting dataset.')
49
- preprocessed_data = before_convert(input_data)
49
+ preprocessed_data = before_convert(export_item)
50
50
  converted_data = convert_data(preprocessed_data)
51
51
  final_data = after_convert(converted_data)
52
52
 
@@ -97,7 +97,7 @@ def get_original_file_pathlib(files):
97
97
  Returns:
98
98
  pathlib.Path: The original file path extracted from the metadata.
99
99
  """
100
- return Path(next(iter(files.values()))['meta']['path_original'])
100
+ return Path(files['meta']['path_original'])
101
101
 
102
102
 
103
103
  def save_original_file(result, base_path, error_file_list):
@@ -108,7 +108,7 @@ def save_original_file(result, base_path, error_file_list):
108
108
  base_path (Path): The directory where the file will be saved.
109
109
  error_file_list (list): A list to store error files.
110
110
  """
111
- file_url = next(iter(result['files'].values()))['url']
111
+ file_url = result['files']['url']
112
112
  file_name = get_original_file_pathlib(result['files']).name
113
113
  response = requests.get(file_url)
114
114
  try:
@@ -1,4 +1,7 @@
1
+ from packaging import version
2
+
1
3
  from synapse_sdk.clients.exceptions import ClientError
4
+ from synapse_sdk.i18n import gettext as _
2
5
  from synapse_sdk.plugins.categories.base import Action
3
6
  from synapse_sdk.plugins.categories.decorators import register_action
4
7
  from synapse_sdk.plugins.enums import PluginCategory, RunMethod
@@ -25,12 +28,36 @@ class DeploymentAction(Action):
25
28
 
26
29
  self.ray_init()
27
30
 
28
- deployment = serve.deployment(ray_actor_options=self.get_actor_options())(serve.ingress(app)(self.entrypoint))
31
+ ray_actor_options = self.get_actor_options()
32
+
33
+ if self.is_gradio_deployment:
34
+ from ray.serve.gradio_integrations import GradioServer
35
+
36
+ self.assert_gradio_version()
37
+
38
+ # GradioIngress differs from serve.ingress(app), thus the difference in self.entrypoint callable
39
+ try:
40
+ entrypoint = self.entrypoint().app
41
+ except (TypeError, ImportError):
42
+ raise ClientError(
43
+ 400,
44
+ _(
45
+ 'Gradio app is not callable.'
46
+ 'Please ensure that your Deployment class defines a callable `app` function'
47
+ ),
48
+ )
49
+
50
+ deployment = GradioServer.options(ray_actor_options=ray_actor_options).bind(entrypoint)
51
+ else:
52
+ deployment = serve.deployment(ray_actor_options=ray_actor_options)(
53
+ serve.ingress(app)(self.entrypoint)
54
+ ).bind(self.envs['SYNAPSE_PLUGIN_RUN_HOST'])
55
+
29
56
  serve.delete(self.plugin_release.code)
30
57
 
31
58
  # TODO add run object
32
59
  serve.run(
33
- deployment.bind(self.envs['SYNAPSE_PLUGIN_RUN_HOST']),
60
+ deployment,
34
61
  name=self.plugin_release.code,
35
62
  route_prefix=f'/{self.plugin_release.checksum}',
36
63
  )
@@ -51,3 +78,30 @@ class DeploymentAction(Action):
51
78
  except ClientError:
52
79
  pass
53
80
  return None
81
+
82
+ @property
83
+ def is_gradio_deployment(self):
84
+ return self.config.get('gradio_app', False)
85
+
86
+ def assert_gradio_version(self):
87
+ """Assert gradio version is not greater than 3.50.2.
88
+ Ray Serve cannot pickle gradio endpoints, thus gradio version greater than 3.50.2 is not supported (SSE Issues)
89
+ """
90
+ GRADIO_VERSION_MAX_ALLOWED = '3.50.2'
91
+
92
+ gradio_installed = False
93
+ gradio_version = None
94
+ for req in self.requirements:
95
+ if req.startswith('gradio=='):
96
+ gradio_installed = True
97
+ gradio_version = req.split('==')[1]
98
+ break
99
+
100
+ assert gradio_installed, _(
101
+ 'Gradio is not installed or version is not specified. Please install gradio==3.50.2 to use this feature.'
102
+ )
103
+
104
+ if version.parse(gradio_version) > version.parse(GRADIO_VERSION_MAX_ALLOWED):
105
+ raise AssertionError(
106
+ f'Gradio version {gradio_version} is greater than maximum allowed version {GRADIO_VERSION_MAX_ALLOWED}'
107
+ )
@@ -6,3 +6,5 @@ actions:
6
6
  entrypoint: plugin.auto_label.MyAutoLabel
7
7
  model:
8
8
  neural_net__plugin__code: sam2
9
+ ui_schema: |
10
+ Dumped FormKit Schema for auto_label action
@@ -1,4 +1,3 @@
1
- from enum import Enum
2
1
  from typing import Annotated, Dict, List
3
2
 
4
3
  from pydantic import AfterValidator, BaseModel, field_validator
@@ -6,6 +5,7 @@ from pydantic_core import PydanticCustomError
6
5
 
7
6
  from synapse_sdk.clients.exceptions import ClientError
8
7
  from synapse_sdk.clients.utils import get_batched_list
8
+ from synapse_sdk.clients.validators.collections import FileSpecificationValidator
9
9
  from synapse_sdk.i18n import gettext as _
10
10
  from synapse_sdk.plugins.categories.base import Action
11
11
  from synapse_sdk.plugins.categories.decorators import register_action
@@ -145,7 +145,7 @@ class UploadAction(Action):
145
145
 
146
146
  # Analyze Collection file specifications to determine the data structure for upload.
147
147
  self.run.set_progress(0, 1, category='analyze_collection')
148
- file_specification_skeleton = self._analyze_collection()
148
+ file_specification_template = self._analyze_collection()
149
149
  self.run.set_progress(1, 1, category='analyze_collection')
150
150
 
151
151
  # Setup result dict.
@@ -153,7 +153,7 @@ class UploadAction(Action):
153
153
 
154
154
  # Organize data according to Collection file specification structure.
155
155
  organized_files = uploader.handle_upload_files()
156
- if not self._validate_organized_files(file_specification_skeleton, organized_files):
156
+ if not self._validate_organized_files(file_specification_template, organized_files):
157
157
  self.run.log_message('Validate organized files failed.')
158
158
  return result
159
159
 
@@ -219,9 +219,10 @@ class UploadAction(Action):
219
219
  collection = client.get_dataset(collection_id)
220
220
  return collection['file_specifications']
221
221
 
222
- def _validate_organized_files(self, file_specification_skeleton: Dict, organized_files: List) -> bool:
222
+ def _validate_organized_files(self, file_specification_template: Dict, organized_files: List) -> bool:
223
223
  """Validate organized files from Uploader."""
224
- return True
224
+ validator = FileSpecificationValidator(file_specification_template, organized_files)
225
+ return validator.validate()
225
226
 
226
227
  def _upload_files(self, organized_files) -> List:
227
228
  """Upload files to synapse-backend.
@@ -6,13 +6,21 @@ from synapse_sdk.utils.storage.registry import STORAGE_PROVIDERS
6
6
 
7
7
 
8
8
  def get_storage(connection_param: str | dict):
9
+ """Get storage class with connection param.
10
+
11
+ Args:
12
+ connection_param (str | dict): The connection param for the Storage provider.
13
+
14
+ Returns:
15
+ BaseStorage: The storage class object with connection param.
16
+ """
9
17
  storage_scheme = None
10
18
  if isinstance(connection_param, dict):
11
19
  storage_scheme = connection_param['provider']
12
20
  else:
13
21
  storage_scheme = urlparse(connection_param).scheme
14
22
 
15
- assert storage_scheme in STORAGE_PROVIDERS.keys(), _('지원하지 않는 저장소입니다.')
23
+ assert storage_scheme in STORAGE_PROVIDERS.keys(), _('Storage provider not supported.')
16
24
  return STORAGE_PROVIDERS[storage_scheme](connection_param)
17
25
 
18
26
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: synapse-sdk
3
- Version: 1.0.0a32
3
+ Version: 1.0.0a34
4
4
  Summary: synapse sdk
5
5
  Author-email: datamaker <developer@datamaker.io>
6
6
  License: MIT
@@ -21,22 +21,26 @@ synapse_sdk/cli/plugin/create.py,sha256=HpYTpohV1NbSrULaVUlc4jWLWznPrx7glgydTM3s
21
21
  synapse_sdk/cli/plugin/publish.py,sha256=sIl1wiuSC3lAUpE3rOF4UDKDy2G5EVLlelMjk2aT05g,1221
22
22
  synapse_sdk/cli/plugin/run.py,sha256=xz5LRm3zh8Y9DMjw5FFRFVRWSCWtYfZJskfCmrPikaQ,2598
23
23
  synapse_sdk/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- synapse_sdk/clients/base.py,sha256=m3zEX1wv_sSxa-jBCnE-9Q5-f71o1TdODkWHRE9Vo-E,4403
24
+ synapse_sdk/clients/base.py,sha256=RaQN_41NxFKtVsVKB7I7hdKJAR8ScuyCoK_x1D-U0c0,6399
25
25
  synapse_sdk/clients/exceptions.py,sha256=ylv7x10eOp4aA3a48jwonnvqvkiYwzJYXjkVkRTAjwk,220
26
26
  synapse_sdk/clients/utils.py,sha256=8pPJTdzHiRPSbZMoQYHAgR2BAMO6u_R_jMV6a2p34iQ,392
27
27
  synapse_sdk/clients/agent/__init__.py,sha256=Pz8_iTbIbnb7ywGJ3feqoZVmO2I3mEbwpWsISIxh0BU,1968
28
28
  synapse_sdk/clients/agent/core.py,sha256=YicfkO0YvjDDOt1jNWoZ0mokrh8xxKibBL4qF5yOjKs,169
29
29
  synapse_sdk/clients/agent/ray.py,sha256=JrwLyVOUDG2yYsbPrxyUtWbM-FWp9B6Bl_GdDby0rt8,1559
30
30
  synapse_sdk/clients/agent/service.py,sha256=s7KuPK_DB1nr2VHrigttV1WyFonaGHNrPvU8loRxHcE,478
31
- synapse_sdk/clients/backend/__init__.py,sha256=aozhPhvRTPHz1P90wxEay07B-Ct4vj_yTw5H9_PJEBE,1105
32
- synapse_sdk/clients/backend/annotation.py,sha256=eZc5EidgR_RfMGwvv1r1_mLkPdRd8e52c4zuuMjMX34,979
31
+ synapse_sdk/clients/backend/__init__.py,sha256=Fiehino2n3voaHTdpJHXSY7K_CDnMkQeokapbgeoTBk,1187
32
+ synapse_sdk/clients/backend/annotation.py,sha256=f4jS4qlXH7M7mQ3EuCq-NrjJ_hJNDz8pEFAYqf-e008,996
33
33
  synapse_sdk/clients/backend/core.py,sha256=5XAOdo6JZ0drfk-FMPJ96SeTd9oja-VnTwzGXdvK7Bg,1027
34
34
  synapse_sdk/clients/backend/dataset.py,sha256=11R5LuTva9jgXatxQAlKy7UEJmwIWzTsLVdFf3MZ9F8,3400
35
- synapse_sdk/clients/backend/integration.py,sha256=MCfeChpLySqlVRc1aZxCfDpQiRH--pfevkCdJDCNZEQ,2506
35
+ synapse_sdk/clients/backend/hitl.py,sha256=na2mSXFud92p4zUEuagcDWk2klxO7xn-e86cm0VZEvs,709
36
+ synapse_sdk/clients/backend/integration.py,sha256=Xi-DIimjaDSaLZWWd0rmFgj4uFav8H4r4gxTSsjX0vo,2647
36
37
  synapse_sdk/clients/backend/ml.py,sha256=JoPH9Ly2E3HJ7S5mdGLtcGq7ruQVVrYfWArogwZLlms,1193
38
+ synapse_sdk/clients/backend/models.py,sha256=ycuzIBi3pyKIFkNzrNGsv3cA49BjLmjyylNHBSRG4bI,1047
37
39
  synapse_sdk/clients/ray/__init__.py,sha256=9ZSPXVVxlJ8Wp8ku7l021ENtPjVrGgQDgqifkkVAXgM,187
38
40
  synapse_sdk/clients/ray/core.py,sha256=a4wyCocAma2HAm-BHlbZnoVbpfdR-Aad2FM0z6vPFvw,731
39
41
  synapse_sdk/clients/ray/serve.py,sha256=rbCpXZYWf0oP8XJ9faa9QFNPYU7h8dltIG8xn9ZconY,907
42
+ synapse_sdk/clients/validators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
+ synapse_sdk/clients/validators/collections.py,sha256=LtnwvutsScubOUcZ2reGHLCzseXxtNIdnH2nv098aUU,1195
40
44
  synapse_sdk/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
45
  synapse_sdk/plugins/enums.py,sha256=ibixwqA3sCNSriG1jAtL54JQc_Zwo3MufwYUqGhVncc,523
42
46
  synapse_sdk/plugins/exceptions.py,sha256=Qs7qODp_RRLO9y2otU2T4ryj5LFwIZODvSIXkAh91u0,691
@@ -56,13 +60,13 @@ synapse_sdk/plugins/categories/data_validation/templates/plugin/__init__.py,sha2
56
60
  synapse_sdk/plugins/categories/data_validation/templates/plugin/validation.py,sha256=90I5boUpEXvO3mEuKKBs528ls2A4h8Iw4ReOID2h00Y,139
57
61
  synapse_sdk/plugins/categories/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
62
  synapse_sdk/plugins/categories/export/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
- synapse_sdk/plugins/categories/export/actions/export.py,sha256=45hnvM3myFgRJyGpC_jpDu_VaDC-iaNCqy4DcH24eDU,2926
63
+ synapse_sdk/plugins/categories/export/actions/export.py,sha256=DN1z2zxjzkgAaalEVWyJGXIVkpb4QeppODm1S7XDVic,7693
60
64
  synapse_sdk/plugins/categories/export/templates/config.yaml,sha256=N7YmnFROb3s3M35SA9nmabyzoSb5O2t2TRPicwFNN2o,56
61
65
  synapse_sdk/plugins/categories/export/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
- synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=cSUhgY5FDqMWVj4XlrOfzDN8q-NxOAvB3BTMXdqd-Eg,4604
66
+ synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=nbjvgFVQpPN5Lo1UnPL5p__BYeejMZLMZ4RT_yd7vJU,4561
63
67
  synapse_sdk/plugins/categories/neural_net/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
68
  synapse_sdk/plugins/categories/neural_net/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
65
- synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=Wmi7in_Mgizt1d5XcDR080h1CIMWKh2_mjub9N380qA,1917
69
+ synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=oetIwZoee5vxriPX3r1onmxgwojUyaRTlnBIdaQ1zk8,3895
66
70
  synapse_sdk/plugins/categories/neural_net/actions/inference.py,sha256=0a655ELqNVjPFZTJDiw4EUdcMCPGveUEKyoYqpwMFBU,1019
67
71
  synapse_sdk/plugins/categories/neural_net/actions/test.py,sha256=JY25eg-Fo6WbgtMkGoo_qNqoaZkp3AQNEypJmeGzEog,320
68
72
  synapse_sdk/plugins/categories/neural_net/actions/train.py,sha256=kve6iTCg2kUeavMQTR2JFuoYDu-QWZFFlB58ZICQtdM,5406
@@ -88,12 +92,12 @@ synapse_sdk/plugins/categories/pre_annotation/templates/plugin/pre_annotation.py
88
92
  synapse_sdk/plugins/categories/smart_tool/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
89
93
  synapse_sdk/plugins/categories/smart_tool/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
90
94
  synapse_sdk/plugins/categories/smart_tool/actions/auto_label.py,sha256=fHiqA8ntmzjs2GMVMuByR7Clh2zhLie8OPF9B8OmwxM,1279
91
- synapse_sdk/plugins/categories/smart_tool/templates/config.yaml,sha256=7bvb4M1PLaoTOVDYF05L7yb8ix4rZOrAnEuZ-thMsyo,206
95
+ synapse_sdk/plugins/categories/smart_tool/templates/config.yaml,sha256=Yj5XGJQGBozlv0FCvIhqSbo9uYkIPAta71nE5rm4ye8,273
92
96
  synapse_sdk/plugins/categories/smart_tool/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
93
97
  synapse_sdk/plugins/categories/smart_tool/templates/plugin/auto_label.py,sha256=eevNg0nOcYFR4z_L_R-sCvVOYoLWSAH1jwDkAf3YCjY,320
94
98
  synapse_sdk/plugins/categories/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
99
  synapse_sdk/plugins/categories/upload/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
96
- synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=V7gArcj7FZO8AT_ihUGodrc_7q49Q7j9BKc9DvoYOYQ,11178
100
+ synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=ry5Whogy5opZ7U_G6hQlbej8ufs5BS_VaJLY648vpF4,11347
97
101
  synapse_sdk/plugins/categories/upload/templates/config.yaml,sha256=0PhB2uD-9ufavZs7EiF6xj4aBgZuif9mFFGGfzG7HuY,147
98
102
  synapse_sdk/plugins/categories/upload/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
103
  synapse_sdk/plugins/categories/upload/templates/plugin/upload.py,sha256=dnK8gy33GjG5ettayawDJv1gM3xCm1K6lM-PfeeTjQw,1163
@@ -119,15 +123,15 @@ synapse_sdk/utils/pydantic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJ
119
123
  synapse_sdk/utils/pydantic/config.py,sha256=1vYOcUI35GslfD1rrqhFkNXXJOXt4IDqOPSx9VWGfNE,123
120
124
  synapse_sdk/utils/pydantic/errors.py,sha256=0v0T12eQBr1KrFiEOBu6KMaPK4aPEGEC6etPJGoR5b4,1061
121
125
  synapse_sdk/utils/pydantic/validators.py,sha256=G47P8ObPhsePmd_QZDK8EdPnik2CbaYzr_N4Z6En8dc,193
122
- synapse_sdk/utils/storage/__init__.py,sha256=AOPEo1_4Tssr3X0_pu5cbmJP_V5ywx9qSocIzZTc2kM,1005
126
+ synapse_sdk/utils/storage/__init__.py,sha256=Xb9wcc0Dsonkma2gW376dTnIOHix3NwmHEEvnOxMeR8,1233
123
127
  synapse_sdk/utils/storage/registry.py,sha256=WaSN9SJR7s9sZgmTVl5k4mLFz-9R6X4ii82wefxs95A,335
124
128
  synapse_sdk/utils/storage/providers/__init__.py,sha256=NM9yRIWcPkH53DeNHgIhH9zaDFK8SJv0KptP1Afulyw,1125
125
129
  synapse_sdk/utils/storage/providers/gcp.py,sha256=i2BQCu1Kej1If9SuNr2_lEyTcr5M_ncGITZrL0u5wEA,363
126
130
  synapse_sdk/utils/storage/providers/s3.py,sha256=r94aUGVNf8yxihU0lN62yaXdxSS7P783_sfD-hCiK40,1191
127
131
  synapse_sdk/utils/storage/providers/sftp.py,sha256=TUQXkKJf0-fh8NhGC_1zzqI4autFNHZVCqngwkQ1aD4,523
128
- synapse_sdk-1.0.0a32.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
129
- synapse_sdk-1.0.0a32.dist-info/METADATA,sha256=jJsMUwX1BNln7bpCx7CGxkQQuE6UCCvSk4P2i9as7x4,1160
130
- synapse_sdk-1.0.0a32.dist-info/WHEEL,sha256=tTnHoFhvKQHCh4jz3yCn0WPTYIy7wXx3CJtJ7SJGV7c,91
131
- synapse_sdk-1.0.0a32.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
132
- synapse_sdk-1.0.0a32.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
133
- synapse_sdk-1.0.0a32.dist-info/RECORD,,
132
+ synapse_sdk-1.0.0a34.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
133
+ synapse_sdk-1.0.0a34.dist-info/METADATA,sha256=b2-hmwsJ801kjGKQ9F5w7h8sBgD61FJ4mv0JkETQlFg,1160
134
+ synapse_sdk-1.0.0a34.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
135
+ synapse_sdk-1.0.0a34.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
136
+ synapse_sdk-1.0.0a34.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
137
+ synapse_sdk-1.0.0a34.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (77.0.1)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5