synapse-sdk 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

Files changed (35) hide show
  1. synapse/__init__.py +0 -0
  2. synapse/client/__init__.py +89 -0
  3. synapse/client/exceptions.py +16 -0
  4. synapse/client/mixins/__init__.py +0 -0
  5. synapse/client/mixins/annotation.py +28 -0
  6. synapse/client/mixins/dataset.py +48 -0
  7. synapse/client/mixins/integration.py +12 -0
  8. synapse/client/mixins/ml.py +23 -0
  9. synapse/client/utils.py +10 -0
  10. synapse/config.py +8 -0
  11. synapse/loggers.py +48 -0
  12. synapse/plugins/__init__.py +32 -0
  13. synapse/plugins/categories/__init__.py +13 -0
  14. synapse/plugins/categories/base.py +49 -0
  15. synapse/plugins/categories/custom_script/__init__.py +0 -0
  16. synapse/plugins/categories/custom_script/actions/__init__.py +0 -0
  17. synapse/plugins/categories/export/__init__.py +0 -0
  18. synapse/plugins/categories/export/actions/__init__.py +0 -0
  19. synapse/plugins/categories/import/__init__.py +0 -0
  20. synapse/plugins/categories/import/actions/__init__.py +0 -0
  21. synapse/plugins/categories/neural_net/__init__.py +0 -0
  22. synapse/plugins/categories/neural_net/actions/__init__.py +0 -0
  23. synapse/plugins/categories/neural_net/actions/deployment.py +24 -0
  24. synapse/plugins/categories/neural_net/actions/test.py +6 -0
  25. synapse/plugins/categories/neural_net/actions/train.py +91 -0
  26. synapse/plugins/utils.py +26 -0
  27. synapse/utils/__init__.py +0 -0
  28. synapse/utils/file.py +58 -0
  29. synapse/utils/module_loading.py +29 -0
  30. synapse/utils/string.py +7 -0
  31. synapse_sdk-0.0.0.dist-info/LICENSE +21 -0
  32. synapse_sdk-0.0.0.dist-info/METADATA +16 -0
  33. synapse_sdk-0.0.0.dist-info/RECORD +35 -0
  34. synapse_sdk-0.0.0.dist-info/WHEEL +5 -0
  35. synapse_sdk-0.0.0.dist-info/top_level.txt +1 -0
synapse/__init__.py ADDED
File without changes
@@ -0,0 +1,89 @@
1
+ import os
2
+ import json
3
+ from pathlib import Path
4
+
5
+ import requests
6
+
7
+ from .exceptions import ClientError
8
+ from .mixins.annotation import AnnotationClientMixin
9
+ from .mixins.dataset import DatasetClientMixin
10
+ from .mixins.integration import IntegrationClientMixin
11
+ from .mixins.ml import MLClientMixin
12
+ from ..utils.file import files_url_to_path_from_objs
13
+
14
+
15
+ class Client(AnnotationClientMixin, DatasetClientMixin, IntegrationClientMixin, MLClientMixin):
16
+ base_url = None
17
+ token = None
18
+ workspace_code = None
19
+
20
+ def __init__(self, base_url, token, workspace_code=None):
21
+ self.base_url = base_url
22
+ self.token = token
23
+ if workspace_code:
24
+ self.workspace_code = workspace_code
25
+ requests_session = requests.Session()
26
+ self.requests_session = requests_session
27
+
28
+ def _get_url(self, path):
29
+ if not path.startswith(self.base_url):
30
+ return os.path.join(self.base_url, path)
31
+ return path
32
+
33
+ def _get_headers(self):
34
+ headers = {'Authorization': f'Token {self.token}'}
35
+ if self.workspace_code:
36
+ headers['DATAMAKER-Workspace'] = f'Token {self.workspace_code}'
37
+ return headers
38
+
39
+ def _request(self, method, path, **kwargs):
40
+ url = self._get_url(path)
41
+ headers = self._get_headers()
42
+
43
+ if method in ['post', 'put', 'patch']:
44
+ if kwargs.get('files') is not None:
45
+ for name, file in kwargs['files'].items():
46
+ kwargs['files'][name] = Path(str(file)).open(mode='rb')
47
+ else:
48
+ headers['Content-Type'] = 'application/json'
49
+ if 'data' in kwargs:
50
+ kwargs['data'] = json.dumps(kwargs['data'])
51
+
52
+ try:
53
+ response = getattr(self.requests_session, method)(url, headers=headers, **kwargs)
54
+ if not response.ok:
55
+ raise ClientError(
56
+ response.status_code, response.json() if response.status_code == 400 else response.reason
57
+ )
58
+ except requests.ConnectionError:
59
+ raise ClientError(408, 'Server is not responding')
60
+
61
+ return response.json()
62
+
63
+ def _get(self, path, payload=None, url_conversion=None):
64
+ response = self._request('get', path, params=payload)
65
+ if url_conversion:
66
+ if url_conversion['is_list']:
67
+ files_url_to_path_from_objs(response['results'], **url_conversion)
68
+ else:
69
+ files_url_to_path_from_objs(response, **url_conversion)
70
+ return response
71
+
72
+ def _post(self, path, payload=None, files=None, params=None):
73
+ return self._request('post', path, data=payload, files=files, params=params)
74
+
75
+ def _patch(self, path, payload=None, files=None, params=None):
76
+ return self._request('patch', path, data=payload, files=files, params=params)
77
+
78
+ def _list(self, path, payload=None, url_conversion=None, list_all=False):
79
+ response = self._get(path, payload, url_conversion)
80
+ if list_all:
81
+ return self._list_all(path, payload, url_conversion), response['count']
82
+ else:
83
+ return response
84
+
85
+ def _list_all(self, path, payload=None, url_conversion=None):
86
+ response = self._get(path, payload, url_conversion)
87
+ yield from response['results']
88
+ if response['next']:
89
+ yield from self._list_all(response['next'], payload, url_conversion)
@@ -0,0 +1,16 @@
1
+ class ClientError(Exception):
2
+ status = None
3
+ reason = None
4
+
5
+ def __init__(self, status, reason, *args):
6
+ self.status = status
7
+ self.reason = reason
8
+ super().__init__(status, reason, *args)
9
+
10
+ def as_validation_error(self):
11
+ if self.status == 400:
12
+ error = self.reason
13
+ else:
14
+ error = str(self)
15
+
16
+ return {'backend_errors': error}
File without changes
@@ -0,0 +1,28 @@
1
+ from ..utils import get_default_url_conversion
2
+
3
+
4
+ class AnnotationClientMixin:
5
+ def get_project(self, pk):
6
+ path = f'projects/{pk}/'
7
+ return self._get(path)
8
+
9
+ def get_label_tag(self, pk):
10
+ path = f'label_tags/{pk}/'
11
+ return self._get(path)
12
+
13
+ def list_label_tags(self, payload=None):
14
+ path = 'label_tags/'
15
+ return self._list(path, payload)
16
+
17
+ def list_labels(self, payload=None, url_conversion=None, list_all=False):
18
+ path = 'labels/'
19
+ url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
20
+ return self._list(path, payload, url_conversion, list_all)
21
+
22
+ def create_labels(self, data):
23
+ path = 'labels/'
24
+ return self._post(path, payload=data)
25
+
26
+ def set_tags_labels(self, data, params=None):
27
+ path = 'labels/set_tags/'
28
+ return self._post(path, payload=data, params=params)
@@ -0,0 +1,48 @@
1
+ from multiprocessing import Pool
2
+ from tqdm import tqdm
3
+ from ..utils import get_batched_list
4
+
5
+
6
+ class DatasetClientMixin:
7
+ def list_dataset(self):
8
+ path = 'datasets/'
9
+ return self._get(path)
10
+
11
+ def create_data_file(self, file_path):
12
+ path = 'data_files/'
13
+ return self._post(path, files={'file': file_path})
14
+
15
+ def create_data_units(self, data):
16
+ path = 'data_units/'
17
+ return self._post(path, payload=data)
18
+
19
+ def import_dataset(self, dataset_id, dataset, project_id=None, batch_size=1000, process_pool=10):
20
+ # TODO validate datset with schema
21
+
22
+ params = [(data, dataset_id) for data in dataset]
23
+
24
+ with Pool(processes=process_pool) as pool:
25
+ dataset = pool.starmap(self.import_data_file, tqdm(params))
26
+
27
+ batches = get_batched_list(dataset, batch_size)
28
+
29
+ for batch in tqdm(batches):
30
+ data_units = self.create_data_units(batch)
31
+
32
+ if project_id:
33
+ labels_data = []
34
+ for data, data_unit in zip(batch, data_units):
35
+ label_data = {'project': project_id, 'data_unit': data_unit['id']}
36
+ if 'ground_truth' in data:
37
+ label_data['ground_truth'] = data['ground_truth']
38
+
39
+ labels_data.append(label_data)
40
+
41
+ self.create_labels(labels_data)
42
+
43
+ def import_data_file(self, data, dataset_id):
44
+ for name, path in data['files'].items():
45
+ data_file = self.create_data_file(path)
46
+ data['dataset'] = dataset_id
47
+ data['files'][name] = {'checksum': data_file['checksum'], 'path': str(path)}
48
+ return data
@@ -0,0 +1,12 @@
1
+ class IntegrationClientMixin:
2
+ def get_plugin(self, pk):
3
+ path = f'plugins/{pk}/'
4
+ return self._get(path)
5
+
6
+ def create_logs(self, data):
7
+ path = 'logs/'
8
+ return self._post(path, payload=data)
9
+
10
+ def create_task(self, data):
11
+ path = 'agent_tasks/'
12
+ return self._post(path, payload=data)
@@ -0,0 +1,23 @@
1
+ from ..utils import get_default_url_conversion
2
+
3
+
4
+ class MLClientMixin:
5
+ def get_model(self, pk, payload=None, url_conversion=None):
6
+ path = f'models/{pk}/'
7
+ url_conversion = get_default_url_conversion(
8
+ url_conversion, files_fields=['files', 'parent.files'], is_list=False
9
+ )
10
+ return self._get(path, payload, url_conversion)
11
+
12
+ def create_model(self, data):
13
+ path = 'models/'
14
+ return self._post(path, payload=data)
15
+
16
+ def update_model(self, pk, data, files=None):
17
+ path = f'models/{pk}/'
18
+ return self._patch(path, payload=data, files=files)
19
+
20
+ def list_train_dataset(self, payload=None, url_conversion=None, list_all=False):
21
+ path = 'train_dataset/'
22
+ url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
23
+ return self._list(path, payload, url_conversion, list_all)
@@ -0,0 +1,10 @@
1
+ def get_default_url_conversion(url_conversion, **kwargs):
2
+ defaults = {'files_fields': [], 'coerce': None, 'is_list': True}
3
+ defaults.update(kwargs)
4
+ if url_conversion:
5
+ defaults.update(url_conversion)
6
+ return defaults
7
+
8
+
9
+ def get_batched_list(object_list, batch_size):
10
+ return [object_list[index : index + batch_size] for index in range(0, len(object_list), batch_size)]
synapse/config.py ADDED
@@ -0,0 +1,8 @@
1
+ try:
2
+ from constance import config as constance_config
3
+
4
+ config = constance_config
5
+ except ImportError:
6
+ config = None
7
+
8
+ __all__ = ['config']
synapse/loggers.py ADDED
@@ -0,0 +1,48 @@
1
+ import datetime
2
+
3
+ from synapse.client import ClientError
4
+
5
+
6
+ class BaseLogger:
7
+ progress_records = {}
8
+ logs_queue = []
9
+ client = None
10
+
11
+ def __init__(self, client=None, task=None):
12
+ self.client = client
13
+ self.task = task
14
+
15
+ def set_progress(self, current, total, category=''):
16
+ percent = 0
17
+ if total > 0:
18
+ percent = (current / total) * 100
19
+ percent = float(round(percent, 2))
20
+
21
+ self.progress_records[category] = {'current': current, 'total': total, 'percent': percent}
22
+ if self.task:
23
+ self.task.update_state(state='PROGRESS', meta=self.progress_records)
24
+ else:
25
+ print(self.progress_records)
26
+
27
+ def log(self, action, data):
28
+ log = {'action': action, 'data': data, 'datetime': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')}
29
+
30
+ if self.client and self.task:
31
+ log['task_id'] = self.task.request.id
32
+ self.logs_queue.append(log)
33
+ try:
34
+ self.client.create_logs(self.logs_queue)
35
+ self.logs_queue.clear()
36
+ except ClientError as e:
37
+ print(e)
38
+ else:
39
+ print(log)
40
+
41
+
42
+ class ConsoleLogger(BaseLogger):
43
+ def log(self, action, data):
44
+ print(action, data)
45
+
46
+
47
+ class SynapseLogger(BaseLogger):
48
+ pass
@@ -0,0 +1,32 @@
1
+ import argparse
2
+
3
+ from synapse.plugins.utils import get_action
4
+
5
+ action = None
6
+
7
+
8
+ def init():
9
+ global action
10
+ parser = argparse.ArgumentParser(description='synapse plugin runner')
11
+
12
+ # Add arguments
13
+ parser.add_argument('action', help='action to run on this plugin')
14
+ parser.add_argument('params', help='parameter of the action')
15
+
16
+ # Parse arguments
17
+ args = parser.parse_args()
18
+
19
+ # Access parsed arguments
20
+ action = args.action
21
+ params = args.params
22
+
23
+ action = get_action(action, params)
24
+
25
+
26
+ def run():
27
+ global action
28
+ assert action is not None
29
+ action.run()
30
+
31
+
32
+ __all__ = ['init', 'run']
@@ -0,0 +1,13 @@
1
+ from synapse.plugins.categories.neural_net.actions.deployment import DeploymentAction
2
+ from synapse.plugins.categories.neural_net.actions.inference import InferenceAction
3
+ from synapse.plugins.categories.neural_net.actions.test import TestAction
4
+ from synapse.plugins.categories.neural_net.actions.train import TrainAction
5
+
6
+ ACTIONS = {
7
+ 'neural_net': {
8
+ 'deployment': DeploymentAction,
9
+ 'inference': InferenceAction,
10
+ 'train': TrainAction,
11
+ 'test': TestAction,
12
+ },
13
+ }
@@ -0,0 +1,49 @@
1
+ from functools import cached_property
2
+
3
+ from synapse.loggers import ConsoleLogger
4
+ from synapse.plugins.utils import get_plugin_checksum
5
+
6
+
7
+ class Action:
8
+ params = None
9
+ config = None
10
+ client = None
11
+ logger = None
12
+
13
+ def __init__(self, params, config):
14
+ self.params = params
15
+ self.config = config
16
+
17
+ # TODO logger 지정 방식 개선
18
+ self.logger = ConsoleLogger()
19
+
20
+ @cached_property
21
+ def plugin_id(self):
22
+ code = self.config['code']
23
+ version = self.config['version']
24
+ return f'{code}@{version}'
25
+
26
+ @cached_property
27
+ def plugin_checksum(self):
28
+ return get_plugin_checksum(self.plugin_id)
29
+
30
+ def get_runtime_env(self):
31
+ return {
32
+ # TODO url기반 working_dir 가져오기
33
+ 'working_dir': ''
34
+ }
35
+
36
+ def run(self):
37
+ raise NotImplementedError
38
+
39
+ def set_progress(self, current, total, category=''):
40
+ self.logger.set_progress(current, total, category)
41
+
42
+ def log(self, action, data):
43
+ self.logger.log(action, data)
44
+
45
+ def log_event(self, message):
46
+ self.logger.log('event', {'content': message})
47
+
48
+ def end_log(self):
49
+ self.log_event('Plugin run is complete.')
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
@@ -0,0 +1,24 @@
1
+ from ray import serve
2
+
3
+ from synapse.plugins.categories.base import Action
4
+ from synapse.utils.module_loading import import_string
5
+
6
+
7
+ class DeploymentAction(Action):
8
+ deployment = None
9
+
10
+ def __init__(self, *args, **kwargs):
11
+ super().__init__(*args, **kwargs)
12
+ self.deployment = self.get_deployment()
13
+
14
+ def get_deployment(self):
15
+ entrypoint = self.config['actions']['deployment']['entrypoint']
16
+ deployment = import_string(entrypoint)
17
+ return serve.deployment(ray_actor_options=self.get_actor_options())(deployment)
18
+
19
+ def get_actor_options(self):
20
+ return {'runtime_env': self.get_runtime_env()}
21
+
22
+ def run(self):
23
+ serve.delete(self.plugin_id)
24
+ serve.run(self.deployment.bind(), name=self.plugin_id, route_prefix=f'/{self.plugin_checksum}')
@@ -0,0 +1,6 @@
1
+ from synapse.plugins.categories.base import Action
2
+
3
+
4
+ class TestAction(Action):
5
+ def run(self):
6
+ pass
@@ -0,0 +1,91 @@
1
+ from synapse.plugins.categories.base import Action
2
+ from synapse.utils.file import get_dict_from_file, files_url_to_path_from_objs
3
+ from synapse.utils.module_loading import import_string
4
+
5
+
6
+ class TrainAction(Action):
7
+ # TODO implement specifying which hardware to use (gpu-n, cpu)
8
+
9
+ def get_input_dataset_for_training(self, model_id=None):
10
+ """
11
+ :return:
12
+ {
13
+ "train": [
14
+ {
15
+ "files": {
16
+ "image": {
17
+ "path": "/path/to/image.jpg",
18
+ "meta": {
19
+ "width": 265,
20
+ "height": 190,
21
+ "created": 1651563526.0277045,
22
+ "file_size": 5191,
23
+ "last_modified": 1651563526.0277045
24
+ }
25
+ }
26
+ },
27
+ "ground_truth": {
28
+ ...label_data
29
+ }
30
+ },
31
+ ...
32
+ ],
33
+ "validation": ...,
34
+ "test": ...
35
+ }
36
+ """
37
+
38
+ client = self.logger.client
39
+ input_dataset = {}
40
+ category_int_to_str = {1: 'train', 2: 'validation', 3: 'test'}
41
+
42
+ if client:
43
+ train_dataset, count_dataset = client.list_train_dataset(
44
+ payload={'fields': ['category', 'files', 'ground_truth'], 'model': model_id}, list_all=True
45
+ )
46
+
47
+ for i, train_data in enumerate(train_dataset, start=1):
48
+ self.set_progress(i, count_dataset, category='dataset_download')
49
+ category = category_int_to_str[train_data.pop('category')]
50
+ try:
51
+ input_dataset[category].append(train_data)
52
+ except KeyError:
53
+ input_dataset[category] = [train_data]
54
+
55
+ else:
56
+ for category in category_int_to_str.values():
57
+ dataset_path = self.task['dataset'].get(category)
58
+ if dataset_path:
59
+ input_dataset[category] = get_dict_from_file(dataset_path)
60
+ files_url_to_path_from_objs(input_dataset[category], ['files'], is_list=True)
61
+
62
+ return input_dataset
63
+
64
+ def run_train(self):
65
+ hyperparameter = self.task['hyperparameter']
66
+ train = import_string(self.plugin['train']['entrypoint'])
67
+
68
+ # download dataset
69
+ self.log_event('Preparing dataset for training.')
70
+ input_dataset = self.get_input_dataset_for_training()
71
+
72
+ # train dataset
73
+ self.log_event('Starting model training.')
74
+
75
+ model_files = train(self, input_dataset, hyperparameter)
76
+
77
+ # upload model_data
78
+ self.log_event('Registering model data.')
79
+
80
+ self.end_log()
81
+ return model_files
82
+
83
+ def start(self):
84
+ action = self.task['action']
85
+ getattr(self, f'run_{action}')()
86
+
87
+ def log_metric(self, x, i, **kwargs):
88
+ self.log(x, {x: i, **kwargs})
89
+
90
+ def log_model(self, files, status=None):
91
+ pass
@@ -0,0 +1,26 @@
1
+ import json
2
+
3
+ from synapse.utils.file import get_dict_from_file
4
+ from synapse.utils.string import hash_text
5
+
6
+
7
+ def get_action(action, json_or_path):
8
+ from synapse.plugins.categories import ACTIONS
9
+
10
+ try:
11
+ params = json.loads(json_or_path)
12
+ except json.JSONDecodeError:
13
+ params = get_dict_from_file(json_or_path)
14
+ config = get_dict_from_file('config.yaml')
15
+ category = config['category']
16
+ return ACTIONS[category][action](params, config)
17
+
18
+
19
+ def get_available_actions(category):
20
+ from synapse.plugins.categories import ACTIONS
21
+
22
+ return list(ACTIONS[category].keys())
23
+
24
+
25
+ def get_plugin_checksum(plugin_id):
26
+ return hash_text(plugin_id)
File without changes
synapse/utils/file.py ADDED
@@ -0,0 +1,58 @@
1
+ import json
2
+ import operator
3
+ from functools import reduce
4
+ from pathlib import Path
5
+
6
+ import requests
7
+ import yaml
8
+
9
+
10
+ def download_file(url, path_download, name=None, coerce=None):
11
+ if name:
12
+ name += Path(url).suffix
13
+ else:
14
+ name = Path(url).name
15
+
16
+ path = path_download / name
17
+ if not path.is_file():
18
+ r = requests.get(url, allow_redirects=True)
19
+ open(str(path), 'wb').write(r.content)
20
+
21
+ if coerce:
22
+ path = coerce(path)
23
+
24
+ return path
25
+
26
+
27
+ def files_url_to_path(files, coerce=None):
28
+ path_download = Path('/tmp/datamaker') / 'media'
29
+ path_download.mkdir(parents=True, exist_ok=True)
30
+ for file_name in files:
31
+ if isinstance(files[file_name], str):
32
+ files[file_name] = download_file(files[file_name], path_download, coerce=coerce)
33
+ else:
34
+ files[file_name]['path'] = download_file(files[file_name].pop('url'), path_download, coerce=coerce)
35
+
36
+
37
+ def files_url_to_path_from_objs(objs, files_fields, coerce=None, is_list=False):
38
+ if not is_list:
39
+ objs = [objs]
40
+
41
+ for obj in objs:
42
+ for files_field in files_fields:
43
+ try:
44
+ files = reduce(operator.getitem, files_field.split('.'), obj)
45
+ files_url_to_path(files, coerce=coerce)
46
+ except KeyError:
47
+ pass
48
+
49
+
50
+ def get_dict_from_file(file_path):
51
+ if isinstance(file_path, str):
52
+ file_path = Path(file_path)
53
+
54
+ with open(file_path) as f:
55
+ if file_path.suffix == '.yaml':
56
+ return yaml.safe_load(f)
57
+ else:
58
+ return json.load(f)
@@ -0,0 +1,29 @@
1
+ import sys
2
+ from importlib import import_module
3
+
4
+
5
+ def cached_import(module_path, class_name):
6
+ # Check whether module is loaded and fully initialized.
7
+ if not (
8
+ (module := sys.modules.get(module_path))
9
+ and (spec := getattr(module, '__spec__', None))
10
+ and getattr(spec, '_initializing', False) is False
11
+ ):
12
+ module = import_module(module_path)
13
+ return getattr(module, class_name)
14
+
15
+
16
+ def import_string(dotted_path):
17
+ """
18
+ Import a dotted module path and return the attribute/class designated by the
19
+ last name in the path. Raise ImportError if the import failed.
20
+ """
21
+ try:
22
+ module_path, class_name = dotted_path.rsplit('.', 1)
23
+ except ValueError as err:
24
+ raise ImportError("%s doesn't look like a module path" % dotted_path) from err
25
+
26
+ try:
27
+ return cached_import(module_path, class_name)
28
+ except AttributeError as err:
29
+ raise ImportError('Module "%s" does not define a "%s" attribute/class' % (module_path, class_name)) from err
@@ -0,0 +1,7 @@
1
+ import hashlib
2
+
3
+
4
+ def hash_text(text):
5
+ md5_hash = hashlib.md5()
6
+ md5_hash.update(text.encode('utf-8'))
7
+ return md5_hash.hexdigest()
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2015 PerhapsSPY and other contributors.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,16 @@
1
+ Metadata-Version: 2.1
2
+ Name: synapse-sdk
3
+ Version: 0.0.0
4
+ Summary: synapse sdk
5
+ Author-email: datamaker <developer@datamaker.io>
6
+ License: MIT
7
+ Classifier: Programming Language :: Python :: 3
8
+ Requires-Python: >=3.11
9
+ Description-Content-Type: text/markdown
10
+ License-File: LICENSE
11
+ Requires-Dist: requests
12
+ Requires-Dist: tqdm
13
+ Requires-Dist: pyyaml
14
+ Requires-Dist: pydantic
15
+ Requires-Dist: ray[all]
16
+
@@ -0,0 +1,35 @@
1
+ synapse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ synapse/config.py,sha256=AHyTHtzkkCZb9DLu1_R99VOwPgWATZj4K7yzHaPijfA,149
3
+ synapse/loggers.py,sha256=xFZl1sHffhRShtZoTwlNG7zAtkrG9FDxZDMz8xdvzyg,1327
4
+ synapse/client/__init__.py,sha256=zX2xGXMkcXLQ0zs96K6oQYvNkMlh4P1Xpmw-fvg6MEE,3400
5
+ synapse/client/exceptions.py,sha256=LzgzPKRPhNUpLh-jBQHKze8c3CQoVA22jSfjUMBZCuU,405
6
+ synapse/client/utils.py,sha256=8pPJTdzHiRPSbZMoQYHAgR2BAMO6u_R_jMV6a2p34iQ,392
7
+ synapse/client/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ synapse/client/mixins/annotation.py,sha256=69xblc0RLokKlpSfIzoJ5dVRvw3VpPIVgbQKV3L5Xx4,905
9
+ synapse/client/mixins/dataset.py,sha256=KXsen7-vdSzdR09uxD88lT604ItJKuCOSLR3fEU5ud0,1666
10
+ synapse/client/mixins/integration.py,sha256=LfMgPsP_OBNCZmSTpEThlLcSnOHv1-H8EhhryPNriCE,336
11
+ synapse/client/mixins/ml.py,sha256=xkfO95k6p_nQVyai2zBO-z4b9nh6YIryN4iakDGiPsE,891
12
+ synapse/plugins/__init__.py,sha256=OCcxzbGsQ68zPbz3Nyd21_oqL1W9ocJU_e5wNbS9wUc,623
13
+ synapse/plugins/utils.py,sha256=G5Ife0tveVbKpMjAI7sBFTLO5Kul8sAS--SFhlMy_1M,669
14
+ synapse/plugins/categories/__init__.py,sha256=ejRSvoJUhz1SrBaH43nuyOjlydebg1o7hymvUDS1Pik,498
15
+ synapse/plugins/categories/base.py,sha256=20T-ZIH-MVYc3EgJgb804bR4_gven6rBBmGNw2XEdn4,1216
16
+ synapse/plugins/categories/custom_script/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ synapse/plugins/categories/custom_script/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
+ synapse/plugins/categories/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
+ synapse/plugins/categories/export/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ synapse/plugins/categories/import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
+ synapse/plugins/categories/import/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ synapse/plugins/categories/neural_net/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
+ synapse/plugins/categories/neural_net/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
+ synapse/plugins/categories/neural_net/actions/deployment.py,sha256=B86zjVbxLO7WsSqPGNgReTetAilt2wRj4VI3TS38UXo,805
25
+ synapse/plugins/categories/neural_net/actions/test.py,sha256=XX2dcwNAEkn7mFliwPb70cx7wn0LL_QcDQ7EW6n-H2w,111
26
+ synapse/plugins/categories/neural_net/actions/train.py,sha256=lQvqVyC8lqVcs8x6v4bD1vTQyDrUz15gSiodEOPGcQc,3100
27
+ synapse/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ synapse/utils/file.py,sha256=kwNoOAzJqT6v2fBHUhhs9sW2mdziUJmYHOxPzIodhmE,1578
29
+ synapse/utils/module_loading.py,sha256=chHpU-BZjtYaTBD_q0T7LcKWtqKvYBS4L0lPlKkoMQ8,1020
30
+ synapse/utils/string.py,sha256=FMH1vlh7BsDcawCiov3HtQEcjp3KSHmjcZ2wZ9EeGu0,141
31
+ synapse_sdk-0.0.0.dist-info/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
32
+ synapse_sdk-0.0.0.dist-info/METADATA,sha256=faVVxD94nPEHfNSmSZ2ZDYGtW7pMZcUkgHeMWyM5vpg,387
33
+ synapse_sdk-0.0.0.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
34
+ synapse_sdk-0.0.0.dist-info/top_level.txt,sha256=v_1YsqjmoSCzCKs7oIhzTNmWtSYoORiBMv1TJkOhx8A,8
35
+ synapse_sdk-0.0.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (75.3.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ synapse