synapse-sdk 1.0.0a28__py3-none-any.whl → 1.0.0a30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

Files changed (27) hide show
  1. synapse_sdk/cli/plugin/__init__.py +2 -0
  2. synapse_sdk/clients/backend/integration.py +12 -0
  3. synapse_sdk/clients/backend/ml.py +4 -0
  4. synapse_sdk/clients/base.py +34 -8
  5. synapse_sdk/loggers.py +9 -2
  6. synapse_sdk/plugins/categories/export/actions/export.py +75 -0
  7. synapse_sdk/plugins/categories/export/actions/utils.py +5 -0
  8. synapse_sdk/plugins/categories/export/templates/config.yaml +3 -0
  9. synapse_sdk/plugins/categories/export/templates/plugin/__init__.py +0 -0
  10. synapse_sdk/plugins/categories/export/templates/plugin/export.py +126 -0
  11. synapse_sdk/plugins/categories/neural_net/actions/train.py +5 -0
  12. synapse_sdk/plugins/categories/neural_net/templates/config.yaml +3 -0
  13. synapse_sdk/plugins/models.py +2 -2
  14. synapse_sdk/utils/file.py +35 -0
  15. synapse_sdk/utils/storage/__init__.py +20 -0
  16. synapse_sdk/utils/storage/providers/__init__.py +42 -0
  17. synapse_sdk/utils/storage/providers/gcp.py +13 -0
  18. synapse_sdk/utils/storage/providers/s3.py +43 -0
  19. synapse_sdk/utils/storage/providers/sftp.py +16 -0
  20. synapse_sdk/utils/storage/registry.py +11 -0
  21. {synapse_sdk-1.0.0a28.dist-info → synapse_sdk-1.0.0a30.dist-info}/METADATA +3 -1
  22. {synapse_sdk-1.0.0a28.dist-info → synapse_sdk-1.0.0a30.dist-info}/RECORD +26 -17
  23. {synapse_sdk-1.0.0a28.dist-info → synapse_sdk-1.0.0a30.dist-info}/WHEEL +1 -1
  24. synapse_sdk/utils/storage.py +0 -91
  25. {synapse_sdk-1.0.0a28.dist-info → synapse_sdk-1.0.0a30.dist-info}/LICENSE +0 -0
  26. {synapse_sdk-1.0.0a28.dist-info → synapse_sdk-1.0.0a30.dist-info}/entry_points.txt +0 -0
  27. {synapse_sdk-1.0.0a28.dist-info → synapse_sdk-1.0.0a30.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  import os
2
+ import sys
2
3
 
3
4
  import click
4
5
  from dotenv import load_dotenv
@@ -12,6 +13,7 @@ from .run import run
12
13
 
13
14
  load_dotenv_default_alias()
14
15
  load_dotenv(os.path.join(os.getcwd(), '.env'), override=True)
16
+ sys.path.append(os.getcwd())
15
17
 
16
18
 
17
19
  @click.group(context_settings={'obj': {}, 'auto_envvar_prefix': 'SYNAPSE_PLUGIN'})
@@ -1,4 +1,5 @@
1
1
  from synapse_sdk.clients.base import BaseClient
2
+ from synapse_sdk.utils.file import convert_file_to_base64
2
3
 
3
4
 
4
5
  class IntegrationClientMixin(BaseClient):
@@ -60,6 +61,13 @@ class IntegrationClientMixin(BaseClient):
60
61
 
61
62
  def create_logs(self, data):
62
63
  path = 'logs/'
64
+ if not isinstance(data, list):
65
+ data = [data]
66
+
67
+ for item in data:
68
+ if 'file' in item:
69
+ item['file'] = convert_file_to_base64(item['file'])
70
+
63
71
  return self._post(path, data=data)
64
72
 
65
73
  def create_serve_application(self, data):
@@ -69,3 +77,7 @@ class IntegrationClientMixin(BaseClient):
69
77
  def list_serve_applications(self, params=None, list_all=False):
70
78
  path = 'serve_applications/'
71
79
  return self._list(path, params=params, list_all=list_all)
80
+
81
+ def get_storage(self, pk):
82
+ path = f'storages/{pk}/'
83
+ return self._get(path)
@@ -22,3 +22,7 @@ class MLClientMixin(BaseClient):
22
22
  path = 'ground_truth_events/'
23
23
  url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
24
24
  return self._list(path, params=params, url_conversion=url_conversion, list_all=list_all)
25
+
26
+ def get_ground_truth_version(self, pk):
27
+ path = f'ground_truth_dataset_versions/{pk}/'
28
+ return self._get(path)
@@ -25,20 +25,46 @@ class BaseClient:
25
25
  def _get_headers(self):
26
26
  return {}
27
27
 
28
- def _request(self, method, path, **kwargs):
28
+ def _request(self, method: str, path: str, **kwargs) -> dict | str:
29
+ """Request handler for all HTTP methods.
30
+
31
+ Args:
32
+ method (str): HTTP method to use.
33
+ path (str): URL path to request.
34
+ **kwargs: Additional keyword arguments to pass to the request.
35
+
36
+ Returns:
37
+ dict | str: JSON response or text response.
38
+ """
29
39
  url = self._get_url(path)
30
40
  headers = self._get_headers()
31
41
  headers.update(kwargs.pop('headers', {}))
32
42
 
33
43
  if method in ['post', 'put', 'patch']:
44
+ # If files are included in the request, open them as binary files
34
45
  if kwargs.get('files') is not None:
35
- for name, file in kwargs['files'].items():
36
- if isinstance(file, (str, Path)):
37
- kwargs['files'][name] = Path(str(file)).open(mode='rb')
38
- if 'data' in kwargs:
39
- for name, value in kwargs['data'].items():
40
- if isinstance(value, dict):
41
- kwargs['data'][name] = json.dumps(value)
46
+ opened_files = [] # List to store opened files
47
+ try:
48
+ for name, file in kwargs['files'].items():
49
+ # If file is a path string, bind it as a Path object and open
50
+ if isinstance(file, str):
51
+ opened_file = open(Path(file), mode='rb')
52
+ kwargs['files'][name] = opened_file
53
+ opened_files.append(opened_file)
54
+ # If file is a Path object, open it directly
55
+ elif isinstance(file, Path):
56
+ opened_file = open(file, mode='rb')
57
+ kwargs['files'][name] = opened_file
58
+ opened_files.append(opened_file)
59
+ if 'data' in kwargs:
60
+ for name, value in kwargs['data'].items():
61
+ if isinstance(value, dict):
62
+ kwargs['data'][name] = json.dumps(value)
63
+ finally:
64
+ # Close all opened files
65
+ for opened_file in opened_files:
66
+ opened_file.close()
67
+
42
68
  else:
43
69
  headers['Content-Type'] = 'application/json'
44
70
  if 'data' in kwargs:
synapse_sdk/loggers.py CHANGED
@@ -68,13 +68,16 @@ class BaseLogger:
68
68
 
69
69
  return progress
70
70
 
71
+ def log(self, action, data, file=None):
72
+ raise NotImplementedError
73
+
71
74
 
72
75
  class ConsoleLogger(BaseLogger):
73
76
  def set_progress(self, current, total, category=None):
74
77
  super().set_progress(current, total, category=category)
75
78
  print(self.get_current_progress())
76
79
 
77
- def log(self, action, data):
80
+ def log(self, action, data, file=None):
78
81
  print(action, data)
79
82
 
80
83
 
@@ -99,7 +102,7 @@ class BackendLogger(BaseLogger):
99
102
  except ClientError:
100
103
  pass
101
104
 
102
- def log(self, event, data):
105
+ def log(self, event, data, file=None):
103
106
  print(event, data)
104
107
 
105
108
  log = {
@@ -108,7 +111,11 @@ class BackendLogger(BaseLogger):
108
111
  'datetime': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'),
109
112
  'job': self.job_id,
110
113
  }
114
+ if file:
115
+ log['file'] = file
116
+
111
117
  self.logs_queue.append(log)
118
+
112
119
  try:
113
120
  self.client.create_logs(self.logs_queue)
114
121
  self.logs_queue.clear()
@@ -1,6 +1,42 @@
1
+ from pydantic import BaseModel, field_validator
2
+ from pydantic_core import PydanticCustomError
3
+
4
+ from synapse_sdk.clients.exceptions import ClientError
5
+ from synapse_sdk.i18n import gettext as _
1
6
  from synapse_sdk.plugins.categories.base import Action
2
7
  from synapse_sdk.plugins.categories.decorators import register_action
3
8
  from synapse_sdk.plugins.enums import PluginCategory, RunMethod
9
+ from synapse_sdk.utils.storage import get_pathlib
10
+
11
+
12
+ class ExportParams(BaseModel):
13
+ storage: int
14
+ save_original_file: bool = True
15
+ path: str
16
+ ground_truth_dataset_version: int
17
+ filter: dict
18
+
19
+ @field_validator('storage')
20
+ @staticmethod
21
+ def check_storage_exists(value, info):
22
+ action = info.context['action']
23
+ client = action.client
24
+ try:
25
+ client.get_storage(value)
26
+ except ClientError:
27
+ raise PydanticCustomError('client_error', _('Unable to get storage from Synapse backend.'))
28
+ return value
29
+
30
+ @field_validator('ground_truth_dataset_version')
31
+ @staticmethod
32
+ def check_ground_truth_dataset_version_exists(value, info):
33
+ action = info.context['action']
34
+ client = action.client
35
+ try:
36
+ client.get_ground_truth_version(value)
37
+ except ClientError:
38
+ raise PydanticCustomError('client_error', _('Unable to get Ground Truth dataset version.'))
39
+ return value
4
40
 
5
41
 
6
42
  @register_action
@@ -8,3 +44,42 @@ class ExportAction(Action):
8
44
  name = 'export'
9
45
  category = PluginCategory.EXPORT
10
46
  method = RunMethod.JOB
47
+ params_model = ExportParams
48
+ progress_categories = {
49
+ 'dataset_conversion': {
50
+ 'proportion': 100,
51
+ }
52
+ }
53
+
54
+ def get_dataset(self, results):
55
+ """Get dataset for export."""
56
+ for result in results:
57
+ yield {
58
+ 'data': result['data'],
59
+ 'files': result['data_unit']['files'],
60
+ 'id': result['ground_truth'],
61
+ }
62
+
63
+ def get_filtered_results(self):
64
+ """Get filtered ground truth."""
65
+ ground_truth_dataset_version = self.params['ground_truth_dataset_version']
66
+ self.params['filter']['ground_truth_dataset'] = self.client.get_ground_truth_version(
67
+ ground_truth_dataset_version
68
+ )['ground_truth_dataset']
69
+ filters = {'expand': 'data', **self.params['filter']}
70
+
71
+ try:
72
+ gt_dataset_events_list = self.client.list_ground_truth_events(params=filters, list_all=True)
73
+ results = gt_dataset_events_list[0]
74
+ count = gt_dataset_events_list[1]
75
+ except ClientError:
76
+ raise PydanticCustomError('client_error', _('Unable to get Ground Truth dataset.'))
77
+ return results, count
78
+
79
+ def start(self):
80
+ self.params['results'], self.params['count'] = self.get_filtered_results()
81
+ dataset = self.get_dataset(self.params['results'])
82
+
83
+ storage = self.client.get_storage(self.params['storage'])
84
+ pathlib_cwd = get_pathlib(storage, self.params['path'])
85
+ return self.entrypoint(self.run, dataset, pathlib_cwd, **self.params)
@@ -0,0 +1,5 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def get_original_file_path(files):
5
+ return Path(next(iter(files.values()))['meta']['path_original'])
@@ -0,0 +1,3 @@
1
+ actions:
2
+ export:
3
+ entrypoint: plugin.export.export
@@ -0,0 +1,126 @@
1
+ import json
2
+
3
+ import requests
4
+
5
+ from synapse_sdk.plugins.categories.export.actions.utils import get_original_file_path
6
+
7
+
8
+ def export(run, input_dataset, path_root, **params):
9
+ """Executes the export task.
10
+
11
+ Args:
12
+ run : Execution object
13
+ input_dataset (generator):
14
+ - data (dict): dm_schema_data information.
15
+ - files (dict): File information. Includes file URL, original file path, metadata, etc.
16
+ - id (int): ground_truth ID
17
+ path_root : Save path
18
+ **params: Additional parameters
19
+
20
+ Returns:
21
+ dict: Result
22
+ """
23
+
24
+ path_root.mkdir(parents=True, exist_ok=True)
25
+ run.log_message('Starting export process.')
26
+
27
+ # results: Contains all information fetched through the list API.
28
+ results = params.get('results', [])
29
+
30
+ save_original_file = params.get('save_original_file')
31
+ errors_json_file_list = []
32
+ errors_original_file_list = []
33
+
34
+ # Path to save JSON files
35
+ json_output_path = path_root / 'json'
36
+ json_output_path.mkdir(parents=True, exist_ok=True)
37
+
38
+ # Path to save original files
39
+ origin_files_output_path = path_root / 'origin_files'
40
+ origin_files_output_path.mkdir(parents=True, exist_ok=True)
41
+
42
+ total = len(results)
43
+ for no, input_data in enumerate(input_dataset):
44
+ run.set_progress(no, total, category='dataset_conversion')
45
+ preprocessed_data = before_convert(input_data)
46
+ converted_data = convert_data(preprocessed_data)
47
+ final_data = after_convert(converted_data)
48
+
49
+ # Call if original file extraction is needed
50
+ if save_original_file:
51
+ save_original_file(final_data, origin_files_output_path, errors_original_file_list)
52
+
53
+ # Extract data as JSON files
54
+ save_as_json(final_data, json_output_path, errors_json_file_list)
55
+
56
+ run.log_message('Saving converted dataset.')
57
+ run.end_log()
58
+
59
+ # Save error list files
60
+ if len(errors_json_file_list) > 0 or len(errors_original_file_list) > 0:
61
+ export_error_file = {'json_file_name': errors_json_file_list, 'origin_file_name': errors_original_file_list}
62
+ with (path_root / 'error_file_list.json').open('w', encoding='utf-8') as f:
63
+ json.dump(export_error_file, f, indent=4, ensure_ascii=False)
64
+
65
+ return {'export_path': path_root}
66
+
67
+
68
+ def convert_data(data):
69
+ """Converts the data."""
70
+ return data
71
+
72
+
73
+ def before_convert(data):
74
+ """Preprocesses the data before conversion."""
75
+ return data
76
+
77
+
78
+ def after_convert(data):
79
+ """Post-processes the data after conversion."""
80
+ return data
81
+
82
+
83
+ def save_original_file(result, base_path, error_file_list):
84
+ """Saves the original file.
85
+
86
+ Args:
87
+ result (dict): Result data
88
+ base_path (Path): Save path
89
+ error_file_list (list): List of error files
90
+
91
+ Returns:
92
+ base_path (str): Save path
93
+ """
94
+ file_url = next(iter(result['files'].values()))['url']
95
+ file_name = get_original_file_path(result['files']).name
96
+ response = requests.get(file_url)
97
+ try:
98
+ with (base_path / file_name).open('wb') as file:
99
+ file.write(response.content)
100
+ except Exception as e:
101
+ error_file_list.append([file_name, str(e)])
102
+
103
+ return base_path
104
+
105
+
106
+ def save_as_json(result, base_path, error_file_list):
107
+ """Saves the data as a JSON file.
108
+
109
+ Args:
110
+ result (dict): Result data
111
+ base_path (Path): Save path
112
+ error_file_list (list): List of error files
113
+
114
+ Returns:
115
+ base_path (str): Save path
116
+ """
117
+ # Default save file name: original file name
118
+ file_name = get_original_file_path(result['files']).stem
119
+ json_data = result['data']
120
+ try:
121
+ with (base_path / f'{file_name}.json').open('w', encoding='utf-8') as f:
122
+ json.dump(json_data, f, indent=4, ensure_ascii=False)
123
+ except Exception as e:
124
+ error_file_list.append([f'{file_name}.json', str(e)])
125
+
126
+ return base_path
@@ -18,8 +18,13 @@ from synapse_sdk.utils.pydantic.validators import non_blank
18
18
 
19
19
  class TrainRun(Run):
20
20
  def log_metric(self, category, key, value, **metrics):
21
+ # TODO validate input via plugin config
21
22
  self.log('metric', {'category': category, 'key': key, 'value': value, 'metrics': metrics})
22
23
 
24
+ def log_visualization(self, category, group, image, **meta):
25
+ # TODO validate input via plugin config
26
+ self.log('visualization', {'category': category, 'group': group, **meta}, file=image)
27
+
23
28
 
24
29
  class Hyperparameter(BaseModel):
25
30
  batch_size: int
@@ -8,6 +8,9 @@ actions:
8
8
  epoch:
9
9
  - loss
10
10
  - miou
11
+ hyperparameters:
12
+ ui_schema: |
13
+ Dumped FormKit Schema for hyperparameters
11
14
  deployment:
12
15
  entrypoint: plugin.inference.MockNetInference
13
16
  inference:
@@ -113,8 +113,8 @@ class Run:
113
113
  def set_progress(self, current, total, category=''):
114
114
  self.logger.set_progress(current, total, category)
115
115
 
116
- def log(self, event, data):
117
- self.logger.log(event, data)
116
+ def log(self, event, data, file=None):
117
+ self.logger.log(event, data, file=file)
118
118
 
119
119
  def log_message(self, message, context=Context.INFO.value):
120
120
  self.logger.log('message', {'context': context, 'content': message})
synapse_sdk/utils/file.py CHANGED
@@ -1,6 +1,8 @@
1
1
  import asyncio
2
+ import base64
2
3
  import hashlib
3
4
  import json
5
+ import mimetypes
4
6
  import operator
5
7
  import zipfile
6
8
  from functools import reduce
@@ -182,3 +184,36 @@ def get_temp_path(sub_path=None):
182
184
  if sub_path:
183
185
  path = path / sub_path
184
186
  return path
187
+
188
+
189
+ def convert_file_to_base64(file_path):
190
+ """
191
+ Convert a file to base64 using pathlib.
192
+
193
+ Args:
194
+ file_path (str): Path to the file to convert
195
+
196
+ Returns:
197
+ str: Base64 encoded string of the file contents
198
+ """
199
+ # Convert string path to Path object
200
+ path = Path(file_path)
201
+
202
+ try:
203
+ # Read binary content of the file
204
+ binary_content = path.read_bytes()
205
+
206
+ # Convert to base64
207
+ base64_encoded = base64.b64encode(binary_content).decode('utf-8')
208
+
209
+ # Get the MIME type of the file
210
+ mime_type, _ = mimetypes.guess_type(path)
211
+ assert mime_type is not None, 'MIME type cannot be guessed'
212
+
213
+ # Convert bytes to string for readable output
214
+ return f'data:{mime_type};base64,{base64_encoded}'
215
+
216
+ except FileNotFoundError:
217
+ raise FileNotFoundError(f'File not found: {file_path}')
218
+ except Exception as e:
219
+ raise Exception(f'Error converting file to base64: {str(e)}')
@@ -0,0 +1,20 @@
1
+ from urllib.parse import urlparse
2
+
3
+ from synapse_sdk.i18n import gettext as _
4
+ from synapse_sdk.utils.storage.registry import STORAGE_PROVIDERS
5
+
6
+
7
+ def get_storage(connection_param: str | dict):
8
+ storage_scheme = None
9
+ if isinstance(connection_param, dict):
10
+ storage_scheme = connection_param['provider']
11
+ else:
12
+ storage_scheme = urlparse(connection_param).scheme
13
+
14
+ assert storage_scheme in STORAGE_PROVIDERS.keys(), _('지원하지 않는 저장소입니다.')
15
+ return STORAGE_PROVIDERS[storage_scheme](connection_param)
16
+
17
+
18
+ def get_pathlib(storage_config, path_root):
19
+ storage_class = get_storage(storage_config)
20
+ return storage_class.get_pathlib(path_root)
@@ -0,0 +1,42 @@
1
+ from urllib.parse import parse_qs, urlparse
2
+
3
+
4
+ class BaseStorage:
5
+ url = None
6
+ options = None
7
+ OPTION_CASTS = {}
8
+
9
+ def __init__(self, connection_params: str | dict):
10
+ self.url = None
11
+
12
+ if isinstance(connection_params, dict):
13
+ self.query_params = connection_params['configuration']
14
+ else:
15
+ self.url = urlparse(connection_params)
16
+ self.query_params = self.url_querystring_to_dict()
17
+
18
+ def url_querystring_to_dict(self):
19
+ query_string = self.url.query
20
+
21
+ query_dict = parse_qs(query_string)
22
+
23
+ for key, value in query_dict.items():
24
+ if len(value) == 1:
25
+ query_dict[key] = value[0]
26
+
27
+ return {
28
+ key: self.OPTION_CASTS[key](value) if key in self.OPTION_CASTS else value
29
+ for key, value in query_dict.items()
30
+ }
31
+
32
+ def upload(self, source, target):
33
+ raise NotImplementedError
34
+
35
+ def exists(self, target):
36
+ raise NotImplementedError
37
+
38
+ def get_url(self, target):
39
+ raise NotImplementedError
40
+
41
+ def get_pathlib(self, path):
42
+ raise NotImplementedError
@@ -0,0 +1,13 @@
1
+ from upath import UPath
2
+
3
+ from synapse_sdk.utils.storage.providers import BaseStorage
4
+
5
+
6
+ class GCPStorage(BaseStorage):
7
+ def __init__(self, url):
8
+ super().__init__(url)
9
+
10
+ self.upath = UPath(f'gs://{self.query_params["bucket_name"]}', token=self.query_params['credentials'])
11
+
12
+ def get_pathlib(self, path):
13
+ return self.upath.joinuri(path)
@@ -0,0 +1,43 @@
1
+ from upath import UPath
2
+
3
+ from synapse_sdk.utils.storage.providers import BaseStorage
4
+
5
+
6
+ class S3Storage(BaseStorage):
7
+ ENDPOINT_URL = 'https://s3.amazonaws.com'
8
+ DEFAULT_REGION = 'us-east-1'
9
+
10
+ def __init__(self, url):
11
+ super().__init__(url)
12
+
13
+ self.upath = self._get_upath()
14
+
15
+ def _get_upath(self):
16
+ upath_kwargs = {
17
+ 'key': self.query_params['access_key'],
18
+ 'secret': self.query_params['secret_key'],
19
+ 'client_kwargs': {'region_name': self.query_params.get('region_name')},
20
+ }
21
+
22
+ if self.query_params.get('endpoint_url'):
23
+ upath_kwargs['endpoint_url'] = self.query_params['endpoint_url']
24
+
25
+ return UPath(
26
+ f's3://{self.query_params["bucket_name"]}',
27
+ **upath_kwargs,
28
+ )
29
+
30
+ def upload(self, source, target):
31
+ with open(source, 'rb') as file:
32
+ self.upath.write_text(file.read(), target)
33
+
34
+ return self.get_url(target)
35
+
36
+ def exists(self, target):
37
+ return self.upath.exists(target)
38
+
39
+ def get_url(self, target):
40
+ return str(self.upath.joinuri(target))
41
+
42
+ def get_pathlib(self, path):
43
+ return self.upath.joinuri(path)
@@ -0,0 +1,16 @@
1
+ from upath import UPath
2
+
3
+ from synapse_sdk.utils.storage.providers import BaseStorage
4
+
5
+
6
+ class SFTPStorage(BaseStorage):
7
+ def get_pathlib(self, path):
8
+ credentials = self.query_params['params']
9
+ host = self.query_params['host']
10
+ root_path = self.query_params['root_path']
11
+
12
+ username = credentials['username']
13
+ password = credentials['password']
14
+ if path == '/':
15
+ path = ''
16
+ return UPath(f'sftp://{host}', username=username, password=password) / root_path / path
@@ -0,0 +1,11 @@
1
+ from synapse_sdk.utils.storage.providers.gcp import GCPStorage
2
+ from synapse_sdk.utils.storage.providers.s3 import S3Storage
3
+ from synapse_sdk.utils.storage.providers.sftp import SFTPStorage
4
+
5
+ STORAGE_PROVIDERS = {
6
+ 's3': S3Storage,
7
+ 'amazon_s3': S3Storage,
8
+ 'minio': S3Storage,
9
+ 'gcp': GCPStorage,
10
+ 'sftp': SFTPStorage,
11
+ }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: synapse-sdk
3
- Version: 1.0.0a28
3
+ Version: 1.0.0a30
4
4
  Summary: synapse sdk
5
5
  Author-email: datamaker <developer@datamaker.io>
6
6
  License: MIT
@@ -17,6 +17,8 @@ Requires-Dist: python-dotenv
17
17
  Requires-Dist: pyyaml
18
18
  Requires-Dist: pydantic
19
19
  Requires-Dist: pyjwt
20
+ Requires-Dist: universal-pathlib
21
+ Requires-Dist: fsspec[gcs,s3,sftp]
20
22
  Provides-Extra: all
21
23
  Requires-Dist: ray[all]; extra == "all"
22
24
 
@@ -4,7 +4,7 @@ locale/ko/LC_MESSAGES/messages.mo,sha256=7HJEJA0wKlN14xQ5VF4FCNet54tjw6mfWYj3IaB
4
4
  locale/ko/LC_MESSAGES/messages.po,sha256=TFii_RbURDH-Du_9ZQf3wNh-2briGk1IqY33-9GKrMU,1126
5
5
  synapse_sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  synapse_sdk/i18n.py,sha256=VXMR-Zm_1hTAg9iPk3YZNNq-T1Bhx1J2fEtRT6kyYbg,766
7
- synapse_sdk/loggers.py,sha256=RsDDOiOeUCih1XOkWQJseYdYCX_wt50AZJRe6aPf96Q,4004
7
+ synapse_sdk/loggers.py,sha256=OSTDDhEAvj8fiAuYNZqsZ9bygGM20sMC5yJ_nOaLDDU,4155
8
8
  synapse_sdk/types.py,sha256=khzn8KpgxFdn1SrpbcuX84m_Md1Mz_HIoUoPq8uok40,698
9
9
  synapse_sdk/cli/__init__.py,sha256=P-_FXCqb_nTVdQznuHop6kDXF_JuncZpeAmgHiGoILQ,152
10
10
  synapse_sdk/cli/alias/__init__.py,sha256=jDy8N_KupVy7n_jKKWhjQOj76-mR-uoVvMoyzObUkuI,405
@@ -16,12 +16,12 @@ synapse_sdk/cli/alias/list.py,sha256=fsKe5MVyl9ki3bwEwDj1nWAWfnNKOmkLO3bbbHOfsL4
16
16
  synapse_sdk/cli/alias/read.py,sha256=gkcSO6zAqvqHMduKIajvzftEgYphp4pIVjsIShgPoHk,349
17
17
  synapse_sdk/cli/alias/update.py,sha256=LOpkpe6ZP_HcOOwC5RkJHVxQuNcQHGnyN98tABjz754,531
18
18
  synapse_sdk/cli/alias/utils.py,sha256=9kWrDfRCFqlkCtLXSleMJ-PH6Q49R1FfnbfyY34ck5s,1540
19
- synapse_sdk/cli/plugin/__init__.py,sha256=P5WMpTe_cVvaXdZngk8_ugJynRdel9rxVicwi05U3zo,744
19
+ synapse_sdk/cli/plugin/__init__.py,sha256=fonDXnEc33Pz4ibFhp3WILLyElHDwMnXvaw0tZy6tak,784
20
20
  synapse_sdk/cli/plugin/create.py,sha256=HpYTpohV1NbSrULaVUlc4jWLWznPrx7glgydTM3sS5E,218
21
21
  synapse_sdk/cli/plugin/publish.py,sha256=sIl1wiuSC3lAUpE3rOF4UDKDy2G5EVLlelMjk2aT05g,1221
22
22
  synapse_sdk/cli/plugin/run.py,sha256=xz5LRm3zh8Y9DMjw5FFRFVRWSCWtYfZJskfCmrPikaQ,2598
23
23
  synapse_sdk/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- synapse_sdk/clients/base.py,sha256=q5jIsL2ygRq8ivLWH42E3njvx4RexjsjALZqhtGxbxM,3511
24
+ synapse_sdk/clients/base.py,sha256=l4_9BvqKykE9EiW9KygTRBf4tFUZLutG4CfwFIo2LRg,4710
25
25
  synapse_sdk/clients/exceptions.py,sha256=ylv7x10eOp4aA3a48jwonnvqvkiYwzJYXjkVkRTAjwk,220
26
26
  synapse_sdk/clients/utils.py,sha256=8pPJTdzHiRPSbZMoQYHAgR2BAMO6u_R_jMV6a2p34iQ,392
27
27
  synapse_sdk/clients/agent/__init__.py,sha256=Pz8_iTbIbnb7ywGJ3feqoZVmO2I3mEbwpWsISIxh0BU,1968
@@ -32,15 +32,15 @@ synapse_sdk/clients/backend/__init__.py,sha256=aozhPhvRTPHz1P90wxEay07B-Ct4vj_yT
32
32
  synapse_sdk/clients/backend/annotation.py,sha256=eZc5EidgR_RfMGwvv1r1_mLkPdRd8e52c4zuuMjMX34,979
33
33
  synapse_sdk/clients/backend/core.py,sha256=5XAOdo6JZ0drfk-FMPJ96SeTd9oja-VnTwzGXdvK7Bg,1027
34
34
  synapse_sdk/clients/backend/dataset.py,sha256=w7izflbTjHKysiDl7ia7MAO391_dzN2ofK40A7QwtBQ,1721
35
- synapse_sdk/clients/backend/integration.py,sha256=Jg_8fEmbrgYXfZZcG8cDtLxR6ugPmnbNhPDyRu_Uib0,2160
36
- synapse_sdk/clients/backend/ml.py,sha256=jlkqS9pI0S0Ubq4pWVeaaaPk-E0J-cZg5zkSX7GuQ_o,1063
35
+ synapse_sdk/clients/backend/integration.py,sha256=MCfeChpLySqlVRc1aZxCfDpQiRH--pfevkCdJDCNZEQ,2506
36
+ synapse_sdk/clients/backend/ml.py,sha256=JoPH9Ly2E3HJ7S5mdGLtcGq7ruQVVrYfWArogwZLlms,1193
37
37
  synapse_sdk/clients/ray/__init__.py,sha256=9ZSPXVVxlJ8Wp8ku7l021ENtPjVrGgQDgqifkkVAXgM,187
38
38
  synapse_sdk/clients/ray/core.py,sha256=a4wyCocAma2HAm-BHlbZnoVbpfdR-Aad2FM0z6vPFvw,731
39
39
  synapse_sdk/clients/ray/serve.py,sha256=rbCpXZYWf0oP8XJ9faa9QFNPYU7h8dltIG8xn9ZconY,907
40
40
  synapse_sdk/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  synapse_sdk/plugins/enums.py,sha256=s59P6Oz2WAK9IX-kLVhNOvNKYJifKlWBhPpZbc9-ttE,486
42
42
  synapse_sdk/plugins/exceptions.py,sha256=Qs7qODp_RRLO9y2otU2T4ryj5LFwIZODvSIXkAh91u0,691
43
- synapse_sdk/plugins/models.py,sha256=JFaU1aqzminwWasNKuRBYd5hrBTRHDOZkl2nqZ1l0IM,3687
43
+ synapse_sdk/plugins/models.py,sha256=7En1biVK_7kR8aI-3I-kJ-lXbveRRobppMGOeFd3ZpU,3709
44
44
  synapse_sdk/plugins/upload.py,sha256=VJOotYMayylOH0lNoAGeGHRkLdhP7jnC_A0rFQMvQpQ,3228
45
45
  synapse_sdk/plugins/utils.py,sha256=4_K6jIl0WrsXOEhFp94faMOriSsddOhIiaXcawYYUUA,3300
46
46
  synapse_sdk/plugins/categories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -56,7 +56,11 @@ synapse_sdk/plugins/categories/data_validation/templates/plugin/__init__.py,sha2
56
56
  synapse_sdk/plugins/categories/data_validation/templates/plugin/validation.py,sha256=90I5boUpEXvO3mEuKKBs528ls2A4h8Iw4ReOID2h00Y,139
57
57
  synapse_sdk/plugins/categories/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
58
  synapse_sdk/plugins/categories/export/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
- synapse_sdk/plugins/categories/export/actions/export.py,sha256=GboGbFAUFPFqEedqO65L8IbX3AczrIFDoHbHBPaFCN0,320
59
+ synapse_sdk/plugins/categories/export/actions/export.py,sha256=GNmsrJncf7WaxNfnN9I1sNiYYxMbqPhcbpnDcBQC1DM,3062
60
+ synapse_sdk/plugins/categories/export/actions/utils.py,sha256=ixilT_VtgwtbX9kbBTrIybbAyW1kPLhNEkOHABKmZ88,131
61
+ synapse_sdk/plugins/categories/export/templates/config.yaml,sha256=N7YmnFROb3s3M35SA9nmabyzoSb5O2t2TRPicwFNN2o,56
62
+ synapse_sdk/plugins/categories/export/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
+ synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=422HPdxCTor0PlOwSw7J7-HyD3i0kq_1D_VjKegkr8c,3904
60
64
  synapse_sdk/plugins/categories/import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
61
65
  synapse_sdk/plugins/categories/import/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
66
  synapse_sdk/plugins/categories/import/actions/import.py,sha256=URn6TOp081odMT5D4NlZ2XEcyKelJx8fxzdoKSkXSAI,320
@@ -65,10 +69,10 @@ synapse_sdk/plugins/categories/neural_net/actions/__init__.py,sha256=47DEQpj8HBS
65
69
  synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=Wmi7in_Mgizt1d5XcDR080h1CIMWKh2_mjub9N380qA,1917
66
70
  synapse_sdk/plugins/categories/neural_net/actions/inference.py,sha256=0a655ELqNVjPFZTJDiw4EUdcMCPGveUEKyoYqpwMFBU,1019
67
71
  synapse_sdk/plugins/categories/neural_net/actions/test.py,sha256=JY25eg-Fo6WbgtMkGoo_qNqoaZkp3AQNEypJmeGzEog,320
68
- synapse_sdk/plugins/categories/neural_net/actions/train.py,sha256=4zq2ryWjqDa6MyI2BGe3sAofMh2NeY3XAo-1gEFXBs4,5145
72
+ synapse_sdk/plugins/categories/neural_net/actions/train.py,sha256=CBg8ky2-bvS5pZlzuf49HcdpyronHkSugwq0UeHhiKg,5401
69
73
  synapse_sdk/plugins/categories/neural_net/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
74
  synapse_sdk/plugins/categories/neural_net/base/inference.py,sha256=R5DASI6-5vzsjDOYxqeGGMBjnav5qHF4hNJT8zNUR3I,1097
71
- synapse_sdk/plugins/categories/neural_net/templates/config.yaml,sha256=dXKB1hO53hDZB73xnxLVCNQl8Sm7svMmVmuMrOCQmEU,343
75
+ synapse_sdk/plugins/categories/neural_net/templates/config.yaml,sha256=vSTHgKzi7sHzsuUvhOT001hzR6JBpPCue5nECmruEnI,433
72
76
  synapse_sdk/plugins/categories/neural_net/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
77
  synapse_sdk/plugins/categories/neural_net/templates/plugin/inference.py,sha256=InfqKWJYi6sqiUnfPKHC5KYGhxckDaWZNQ202u-uVP4,366
74
78
  synapse_sdk/plugins/categories/neural_net/templates/plugin/test.py,sha256=kYyk7l4UtcDUAH4nkdVUGrHHHjxI4p1U13HSLnmGPyE,53
@@ -105,18 +109,23 @@ synapse_sdk/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
105
109
  synapse_sdk/shared/enums.py,sha256=WMZPag9deVF7VCXaQkLk7ly_uX1KwbNzRx9TdvgaeFE,138
106
110
  synapse_sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
107
111
  synapse_sdk/utils/debug.py,sha256=F7JlUwYjTFZAMRbBqKm6hxOIz-_IXYA8lBInOS4jbS4,100
108
- synapse_sdk/utils/file.py,sha256=eF1GDxjPaq2QOiKKCHSzgCkBRYSBSwP4rKOAApAQR3E,5661
112
+ synapse_sdk/utils/file.py,sha256=zP8eOZifGiYP9PyC4ivQwxs-ljbtXRtbWN4yOjZF6tc,6658
109
113
  synapse_sdk/utils/module_loading.py,sha256=chHpU-BZjtYaTBD_q0T7LcKWtqKvYBS4L0lPlKkoMQ8,1020
110
114
  synapse_sdk/utils/network.py,sha256=wg-oFM0gKK5REqIUO8d-x9yXJfqbnkSbbF0_qyxpwz4,412
111
- synapse_sdk/utils/storage.py,sha256=a8OVbd38ATr0El4G4kuV07lr_tJZrpIJBSy4GHb0qZ8,2581
112
115
  synapse_sdk/utils/string.py,sha256=rEwuZ9SAaZLcQ8TYiwNKr1h2u4CfnrQx7SUL8NWmChg,216
113
116
  synapse_sdk/utils/pydantic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
114
117
  synapse_sdk/utils/pydantic/config.py,sha256=1vYOcUI35GslfD1rrqhFkNXXJOXt4IDqOPSx9VWGfNE,123
115
118
  synapse_sdk/utils/pydantic/errors.py,sha256=0v0T12eQBr1KrFiEOBu6KMaPK4aPEGEC6etPJGoR5b4,1061
116
119
  synapse_sdk/utils/pydantic/validators.py,sha256=G47P8ObPhsePmd_QZDK8EdPnik2CbaYzr_N4Z6En8dc,193
117
- synapse_sdk-1.0.0a28.dist-info/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
118
- synapse_sdk-1.0.0a28.dist-info/METADATA,sha256=--5rhzhDRDXJ5YbfkpzJcfW7tUHyPqrx44W2EAez2P8,1070
119
- synapse_sdk-1.0.0a28.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
120
- synapse_sdk-1.0.0a28.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
121
- synapse_sdk-1.0.0a28.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
122
- synapse_sdk-1.0.0a28.dist-info/RECORD,,
120
+ synapse_sdk/utils/storage/__init__.py,sha256=oDRyXwwp9xxdCkNtHzDF2-cUwv6EMhKt4rWYbyAKaEk,689
121
+ synapse_sdk/utils/storage/registry.py,sha256=WaSN9SJR7s9sZgmTVl5k4mLFz-9R6X4ii82wefxs95A,335
122
+ synapse_sdk/utils/storage/providers/__init__.py,sha256=NM9yRIWcPkH53DeNHgIhH9zaDFK8SJv0KptP1Afulyw,1125
123
+ synapse_sdk/utils/storage/providers/gcp.py,sha256=i2BQCu1Kej1If9SuNr2_lEyTcr5M_ncGITZrL0u5wEA,363
124
+ synapse_sdk/utils/storage/providers/s3.py,sha256=r94aUGVNf8yxihU0lN62yaXdxSS7P783_sfD-hCiK40,1191
125
+ synapse_sdk/utils/storage/providers/sftp.py,sha256=TUQXkKJf0-fh8NhGC_1zzqI4autFNHZVCqngwkQ1aD4,523
126
+ synapse_sdk-1.0.0a30.dist-info/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
127
+ synapse_sdk-1.0.0a30.dist-info/METADATA,sha256=CZ9X66Gy-UkyIZic1g3FSh9kfCVJ-lWSkvON9qox20M,1138
128
+ synapse_sdk-1.0.0a30.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
129
+ synapse_sdk-1.0.0a30.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
130
+ synapse_sdk-1.0.0a30.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
131
+ synapse_sdk-1.0.0a30.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,91 +0,0 @@
1
- import os
2
- from urllib.parse import parse_qs, urlparse
3
-
4
- import boto3
5
- from botocore.exceptions import ClientError
6
-
7
-
8
- class BaseStorage:
9
- url = None
10
- options = None
11
- OPTION_CASTS = {}
12
-
13
- def __init__(self, url):
14
- self.url = urlparse(url)
15
- self.query_params = self.url_querystring_to_dict()
16
- self.options = self.get_options()
17
-
18
- def url_querystring_to_dict(self):
19
- query_string = self.url.query
20
-
21
- query_dict = parse_qs(query_string)
22
-
23
- for key, value in query_dict.items():
24
- if len(value) == 1:
25
- query_dict[key] = value[0]
26
-
27
- return {
28
- key: self.OPTION_CASTS[key](value) if key in self.OPTION_CASTS else value
29
- for key, value in query_dict.items()
30
- }
31
-
32
- def get_options(self):
33
- return None
34
-
35
- def upload(self, source, target):
36
- raise NotImplementedError
37
-
38
- def exists(self, target):
39
- raise NotImplementedError
40
-
41
- def get_url(self, target):
42
- raise NotImplementedError
43
-
44
-
45
- class S3Storage(BaseStorage):
46
- def __init__(self, url):
47
- super().__init__(url)
48
- self.client = boto3.client(
49
- 's3',
50
- endpoint_url=self.options['endpoint_url'],
51
- aws_access_key_id=self.options['access_key'],
52
- aws_secret_access_key=self.options['secret_key'],
53
- )
54
-
55
- def get_options(self):
56
- base_url = f'https://{self.url.hostname}'
57
- local_endpoint = self.query_params.get('local_endpoint')
58
- endpoint_url = f'http://{local_endpoint}' if local_endpoint else base_url
59
- return {
60
- 'base_url': base_url,
61
- 'endpoint_url': endpoint_url,
62
- 'bucket_name': self.url.path[1:],
63
- 'access_key': self.url.username,
64
- 'secret_key': self.url.password,
65
- **self.query_params,
66
- }
67
-
68
- def upload(self, source, target):
69
- object_name = os.path.join(self.options['location'], target)
70
- self.client.upload_file(source, self.options['bucket_name'], object_name)
71
- return self.get_url(target)
72
-
73
- def exists(self, target):
74
- try:
75
- self.client.head_object(Bucket=self.options['bucket_name'], Key=target)
76
- return True
77
- except ClientError:
78
- return False
79
-
80
- def get_url(self, target):
81
- return os.path.join(self.options['base_url'], self.options['bucket_name'], self.options['location'], target)
82
-
83
-
84
- STORAGE_STORAGES = {
85
- 's3': S3Storage,
86
- }
87
-
88
-
89
- def get_storage(url):
90
- storage_scheme = urlparse(url).scheme
91
- return STORAGE_STORAGES[storage_scheme](url)