synapse-sdk 1.0.0a0__py3-none-any.whl → 1.0.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

Files changed (73) hide show
  1. synapse_sdk/clients/agent/__init__.py +21 -0
  2. synapse_sdk/clients/agent/core.py +11 -0
  3. synapse_sdk/clients/agent/service.py +15 -0
  4. synapse_sdk/clients/backend/__init__.py +21 -0
  5. synapse_sdk/clients/backend/annotation.py +29 -0
  6. {synapse/client/mixins → synapse_sdk/clients/backend}/dataset.py +6 -4
  7. synapse_sdk/clients/backend/integration.py +36 -0
  8. synapse_sdk/clients/backend/ml.py +24 -0
  9. synapse/client/__init__.py → synapse_sdk/clients/base.py +22 -30
  10. {synapse → synapse_sdk}/loggers.py +1 -1
  11. synapse_sdk/plugins/__init__.py +13 -0
  12. {synapse → synapse_sdk}/plugins/categories/base.py +43 -45
  13. synapse_sdk/plugins/categories/data_validation/actions/validation.py +10 -0
  14. {synapse → synapse_sdk}/plugins/categories/decorators.py +2 -2
  15. synapse_sdk/plugins/categories/export/actions/export.py +10 -0
  16. synapse_sdk/plugins/categories/import/actions/import.py +10 -0
  17. {synapse → synapse_sdk}/plugins/categories/neural_net/actions/deployment.py +3 -3
  18. synapse_sdk/plugins/categories/neural_net/actions/inference.py +10 -0
  19. synapse_sdk/plugins/categories/neural_net/actions/test.py +10 -0
  20. {synapse → synapse_sdk}/plugins/categories/neural_net/actions/train.py +5 -5
  21. synapse_sdk/plugins/categories/post_annotation/actions/post_annotation.py +10 -0
  22. synapse_sdk/plugins/categories/pre_annotation/actions/pre_annotation.py +10 -0
  23. {synapse → synapse_sdk}/plugins/categories/registry.py +2 -2
  24. synapse_sdk/plugins/cli/__init__.py +19 -0
  25. synapse_sdk/plugins/cli/publish.py +36 -0
  26. synapse_sdk/plugins/cli/run.py +63 -0
  27. synapse_sdk/plugins/upload.py +82 -0
  28. {synapse → synapse_sdk}/plugins/utils.py +8 -4
  29. {synapse → synapse_sdk}/utils/file.py +13 -1
  30. synapse_sdk/utils/storage.py +91 -0
  31. {synapse → synapse_sdk}/utils/string.py +4 -0
  32. {synapse_sdk-1.0.0a0.dist-info → synapse_sdk-1.0.0a2.dist-info}/METADATA +2 -1
  33. synapse_sdk-1.0.0a2.dist-info/RECORD +58 -0
  34. {synapse_sdk-1.0.0a0.dist-info → synapse_sdk-1.0.0a2.dist-info}/WHEEL +1 -1
  35. synapse_sdk-1.0.0a2.dist-info/top_level.txt +1 -0
  36. synapse/client/mixins/annotation.py +0 -28
  37. synapse/client/mixins/integration.py +0 -12
  38. synapse/client/mixins/ml.py +0 -23
  39. synapse/config.py +0 -8
  40. synapse/plugins/__init__.py +0 -39
  41. synapse/plugins/categories/data_validation/actions/validation.py +0 -10
  42. synapse/plugins/categories/export/actions/export.py +0 -10
  43. synapse/plugins/categories/import/actions/import.py +0 -10
  44. synapse/plugins/categories/neural_net/actions/inference.py +0 -10
  45. synapse/plugins/categories/neural_net/actions/test.py +0 -10
  46. synapse/plugins/categories/post_annotation/actions/post_annotation.py +0 -10
  47. synapse/plugins/categories/pre_annotation/actions/pre_annotation.py +0 -10
  48. synapse/plugins/upload.py +0 -79
  49. synapse_sdk-1.0.0a0.dist-info/RECORD +0 -51
  50. synapse_sdk-1.0.0a0.dist-info/top_level.txt +0 -1
  51. {synapse → synapse_sdk}/__init__.py +0 -0
  52. {synapse/client/mixins → synapse_sdk/clients}/__init__.py +0 -0
  53. {synapse/client → synapse_sdk/clients}/exceptions.py +0 -0
  54. {synapse/client → synapse_sdk/clients}/utils.py +0 -0
  55. {synapse → synapse_sdk}/plugins/categories/__init__.py +0 -0
  56. {synapse → synapse_sdk}/plugins/categories/data_validation/__init__.py +0 -0
  57. {synapse → synapse_sdk}/plugins/categories/data_validation/actions/__init__.py +0 -0
  58. {synapse → synapse_sdk}/plugins/categories/export/__init__.py +0 -0
  59. {synapse → synapse_sdk}/plugins/categories/export/actions/__init__.py +0 -0
  60. {synapse → synapse_sdk}/plugins/categories/import/__init__.py +0 -0
  61. {synapse → synapse_sdk}/plugins/categories/import/actions/__init__.py +0 -0
  62. {synapse → synapse_sdk}/plugins/categories/neural_net/__init__.py +0 -0
  63. {synapse → synapse_sdk}/plugins/categories/neural_net/actions/__init__.py +0 -0
  64. {synapse → synapse_sdk}/plugins/categories/post_annotation/__init__.py +0 -0
  65. {synapse → synapse_sdk}/plugins/categories/post_annotation/actions/__init__.py +0 -0
  66. {synapse → synapse_sdk}/plugins/categories/pre_annotation/__init__.py +0 -0
  67. {synapse → synapse_sdk}/plugins/categories/pre_annotation/actions/__init__.py +0 -0
  68. {synapse → synapse_sdk}/plugins/enums.py +0 -0
  69. {synapse → synapse_sdk}/plugins/job.py +0 -0
  70. {synapse → synapse_sdk}/utils/__init__.py +0 -0
  71. {synapse → synapse_sdk}/utils/debug.py +0 -0
  72. {synapse → synapse_sdk}/utils/module_loading.py +0 -0
  73. {synapse_sdk-1.0.0a0.dist-info → synapse_sdk-1.0.0a2.dist-info}/LICENSE +0 -0
@@ -0,0 +1,63 @@
1
+ import os
2
+
3
+ import click
4
+
5
+ from synapse_sdk.clients.agent import AgentClient
6
+ from synapse_sdk.clients.backend import BackendClient
7
+ from synapse_sdk.plugins.utils import get_action, read_config
8
+
9
+
10
+ @click.command()
11
+ @click.argument('action')
12
+ @click.argument('params')
13
+ @click.option('--direct/--no-direct', default=False)
14
+ @click.option('--run-by', type=click.Choice(['script', 'agent', 'backend']), default='script')
15
+ @click.option('--agent-host')
16
+ @click.option('--agent-token')
17
+ @click.option('--host')
18
+ @click.option('--agent')
19
+ @click.option('--user-token')
20
+ @click.option('--tenant')
21
+ @click.pass_context
22
+ def run(ctx, action, params, direct, run_by, agent_host, agent_token, host, agent, user_token, tenant):
23
+ debug = ctx.obj['DEBUG']
24
+
25
+ if run_by == 'script':
26
+ run_by_script(action, params, direct, debug)
27
+ elif run_by == 'agent':
28
+ run_by_agent(action, params, agent_host, agent_token, user_token, tenant, debug)
29
+ elif run_by == 'backend':
30
+ run_by_backend(action, params, agent, host, user_token, tenant)
31
+
32
+
33
+ def run_by_script(action, params, direct, debug):
34
+ action = get_action(action, params, direct=direct, debug=debug)
35
+ result = action.run_action()
36
+
37
+ if debug:
38
+ click.echo(result)
39
+
40
+
41
+ def run_by_agent(action, params, agent_host, agent_token, user_token, tenant, debug):
42
+ client = AgentClient(agent_host, agent_token, user_token, tenant)
43
+ data = {'action': action, 'params': params}
44
+ if debug:
45
+ data.update({
46
+ 'plugin_path': os.getcwd(),
47
+ 'modules': os.getenv('SYNAPSE_DEBUG_MODULES', '').split(','),
48
+ })
49
+ result = client.run_debug_plugin_release(data=data)
50
+ else:
51
+ config = read_config()
52
+ result = client.run_plugin_release(code=f'{config["code"]}@{config["version"]}', data=data)
53
+
54
+ click.echo(result)
55
+
56
+
57
+ def run_by_backend(action, params, agent, host, user_token, tenant):
58
+ client = BackendClient(host, user_token, tenant=tenant)
59
+ config = read_config()
60
+ data = {'agent': agent, 'version': config['version'], 'action': action, 'params': params}
61
+ result = client.run_plugin(config['code'], data=data)
62
+
63
+ click.echo(result)
@@ -0,0 +1,82 @@
1
+ import re
2
+ import subprocess
3
+ import tempfile
4
+ from pathlib import Path
5
+
6
+ from synapse_sdk.utils.file import calculate_checksum, download_file
7
+ from synapse_sdk.utils.storage import get_storage
8
+
9
+
10
+ def archive(source_path, archive_path):
11
+ archive_path.parent.mkdir(parents=True, exist_ok=True)
12
+ command = f'git ls-files --others --exclude-standard --cached | zip -q --names-stdin {archive_path}'
13
+ subprocess.run(command, cwd=source_path, shell=True, check=True, stdout=subprocess.DEVNULL)
14
+
15
+
16
+ def download_and_upload(source_url, url):
17
+ storage = get_storage(url)
18
+ with tempfile.TemporaryDirectory() as temp_path:
19
+ file_path = str(download_file(source_url, temp_path))
20
+ checksum = calculate_checksum(file_path, prefix='dev')
21
+ # TODO 중복 체크
22
+ return storage.upload(file_path, f'{checksum}.zip')
23
+
24
+
25
+ def archive_and_upload(source_path, url):
26
+ storage = get_storage(url)
27
+ dist_path = Path(source_path, 'dist')
28
+ archive_path = dist_path / 'archive.zip'
29
+
30
+ archive(source_path, archive_path)
31
+ checksum = calculate_checksum(archive_path, prefix='dev')
32
+ checksum_archive_path = dist_path / f'{checksum}.zip'
33
+
34
+ if checksum_archive_path.exists():
35
+ # TODO 실제 스토리지 있는지 확인
36
+ return storage.get_url(checksum_archive_path.name)
37
+
38
+ archive_path.rename(checksum_archive_path)
39
+ for file_path in dist_path.glob('*.zip'):
40
+ if file_path.name != checksum_archive_path.name:
41
+ file_path.unlink()
42
+ return storage.upload(str(checksum_archive_path), checksum_archive_path.name)
43
+
44
+
45
+ def build_and_upload(source_path, url, virtualenv_path='.venv'):
46
+ storage = get_storage(url)
47
+ dist_path = Path(source_path, 'dist')
48
+ archive_path = dist_path / 'archive.zip'
49
+
50
+ archive(source_path, archive_path)
51
+ checksum = calculate_checksum(archive_path, prefix='dev')
52
+ checksum_archive_path = dist_path / f'{checksum}.zip'
53
+
54
+ if checksum_archive_path.exists():
55
+ # TODO 실제 스토리지 있는지 확인
56
+ wheel_path = next(dist_path.glob('*.whl'), None)
57
+ return storage.get_url(wheel_path.name)
58
+
59
+ # wheel file 빌드 진행
60
+ for file_path in dist_path.glob('*.whl'):
61
+ file_path.unlink()
62
+
63
+ print(f'Building {Path(source_path).name}...')
64
+ subprocess.run(
65
+ f'{virtualenv_path}/bin/python -m build --wheel',
66
+ cwd=source_path,
67
+ shell=True,
68
+ check=True,
69
+ stdout=subprocess.DEVNULL,
70
+ )
71
+ wheel_path = next(dist_path.glob('*.whl'), None)
72
+
73
+ archive_path.rename(checksum_archive_path)
74
+ for file_path in dist_path.glob('*.zip'):
75
+ if file_path.name != checksum_archive_path.name:
76
+ file_path.unlink()
77
+ return storage.upload(str(wheel_path), wheel_path.name)
78
+
79
+
80
+ def change_whl_version(whl_name, new_version):
81
+ pattern = r'^(?P<distribution>.+?)-(?P<version>[\d\.\w]+(\+[\w\.]+)?)(?P<rest>-.+\.whl)$'
82
+ return re.sub(pattern, rf'\g<distribution>-{new_version}\g<rest>', whl_name)
@@ -1,8 +1,8 @@
1
1
  import json
2
2
 
3
- from synapse.plugins.categories.registry import _REGISTERED_ACTIONS, register_actions
4
- from synapse.utils.file import get_dict_from_file
5
- from synapse.utils.string import hash_text
3
+ from synapse_sdk.plugins.categories.registry import _REGISTERED_ACTIONS, register_actions
4
+ from synapse_sdk.utils.file import get_dict_from_file
5
+ from synapse_sdk.utils.string import hash_text
6
6
 
7
7
 
8
8
  def get_action(action, params_data, *args, **kwargs):
@@ -21,7 +21,7 @@ def get_action(action, params_data, *args, **kwargs):
21
21
  else:
22
22
  config = config_data
23
23
  else:
24
- config = get_dict_from_file('config.yaml')
24
+ config = read_config()
25
25
  category = config['category']
26
26
  return get_action_class(category, action)(params, config, *args, **kwargs)
27
27
 
@@ -38,3 +38,7 @@ def get_available_actions(category):
38
38
 
39
39
  def get_plugin_checksum(plugin_id):
40
40
  return hash_text(plugin_id)
41
+
42
+
43
+ def read_config():
44
+ return get_dict_from_file('config.yaml')
@@ -1,3 +1,4 @@
1
+ import hashlib
1
2
  import json
2
3
  import operator
3
4
  from functools import reduce
@@ -13,7 +14,7 @@ def download_file(url, path_download, name=None, coerce=None):
13
14
  else:
14
15
  name = Path(url).name
15
16
 
16
- path = path_download / name
17
+ path = Path(path_download) / name
17
18
  if not path.is_file():
18
19
  r = requests.get(url, allow_redirects=True)
19
20
  open(str(path), 'wb').write(r.content)
@@ -56,3 +57,14 @@ def get_dict_from_file(file_path):
56
57
  return yaml.safe_load(f)
57
58
  else:
58
59
  return json.load(f)
60
+
61
+
62
+ def calculate_checksum(file_path, prefix=''):
63
+ md5_hash = hashlib.md5()
64
+ with open(file_path, 'rb') as f:
65
+ for byte_block in iter(lambda: f.read(4096), b''):
66
+ md5_hash.update(byte_block)
67
+ checksum = md5_hash.hexdigest()
68
+ if prefix:
69
+ return f'dev-{checksum}'
70
+ return checksum
@@ -0,0 +1,91 @@
1
+ import os
2
+ from urllib.parse import urlparse, parse_qs
3
+
4
+ import boto3
5
+ from botocore.exceptions import ClientError
6
+
7
+
8
+ class BaseStorage:
9
+ url = None
10
+ options = None
11
+ OPTION_CASTS = {}
12
+
13
+ def __init__(self, url):
14
+ self.url = urlparse(url)
15
+ self.query_params = self.url_querystring_to_dict()
16
+ self.options = self.get_options()
17
+
18
+ def url_querystring_to_dict(self):
19
+ query_string = self.url.query
20
+
21
+ query_dict = parse_qs(query_string)
22
+
23
+ for key, value in query_dict.items():
24
+ if len(value) == 1:
25
+ query_dict[key] = value[0]
26
+
27
+ return {
28
+ key: self.OPTION_CASTS[key](value) if key in self.OPTION_CASTS else value
29
+ for key, value in query_dict.items()
30
+ }
31
+
32
+ def get_options(self):
33
+ return None
34
+
35
+ def upload(self, source, target):
36
+ raise NotImplementedError
37
+
38
+ def exists(self, target):
39
+ raise NotImplementedError
40
+
41
+ def get_url(self, target):
42
+ raise NotImplementedError
43
+
44
+
45
+ class S3Storage(BaseStorage):
46
+ def __init__(self, url):
47
+ super().__init__(url)
48
+ self.client = boto3.client(
49
+ 's3',
50
+ endpoint_url=self.options['endpoint_url'],
51
+ aws_access_key_id=self.options['access_key'],
52
+ aws_secret_access_key=self.options['secret_key'],
53
+ )
54
+
55
+ def get_options(self):
56
+ base_url = f'https://{self.url.hostname}'
57
+ local_endpoint = self.query_params.get('local_endpoint')
58
+ endpoint_url = f'http://{local_endpoint}' if local_endpoint else base_url
59
+ return {
60
+ 'base_url': base_url,
61
+ 'endpoint_url': endpoint_url,
62
+ 'bucket_name': self.url.path[1:],
63
+ 'access_key': self.url.username,
64
+ 'secret_key': self.url.password,
65
+ **self.query_params,
66
+ }
67
+
68
+ def upload(self, source, target):
69
+ object_name = os.path.join(self.options['location'], target)
70
+ self.client.upload_file(source, self.options['bucket_name'], object_name)
71
+ return self.get_url(target)
72
+
73
+ def exists(self, target):
74
+ try:
75
+ self.client.head_object(Bucket=self.options['bucket_name'], Key=target)
76
+ return True
77
+ except ClientError:
78
+ return False
79
+
80
+ def get_url(self, target):
81
+ return os.path.join(self.options['base_url'], self.options['bucket_name'], self.options['location'], target)
82
+
83
+
84
+ STORAGE_STORAGES = {
85
+ 's3': S3Storage,
86
+ }
87
+
88
+
89
+ def get_storage(url):
90
+ storage_scheme = urlparse(url).scheme
91
+ return STORAGE_STORAGES[storage_scheme](url)
@@ -5,3 +5,7 @@ def hash_text(text):
5
5
  md5_hash = hashlib.md5()
6
6
  md5_hash.update(text.encode('utf-8'))
7
7
  return md5_hash.hexdigest()
8
+
9
+
10
+ def str_to_bool(value):
11
+ return value.lower() in ['true', '1', 'yes']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: synapse-sdk
3
- Version: 1.0.0a0
3
+ Version: 1.0.0a2
4
4
  Summary: synapse sdk
5
5
  Author-email: datamaker <developer@datamaker.io>
6
6
  License: MIT
@@ -9,6 +9,7 @@ Requires-Python: >=3.11
9
9
  Description-Content-Type: text/markdown
10
10
  License-File: LICENSE
11
11
  Requires-Dist: boto3
12
+ Requires-Dist: click
12
13
  Requires-Dist: requests
13
14
  Requires-Dist: tqdm
14
15
  Requires-Dist: python-dotenv
@@ -0,0 +1,58 @@
1
+ synapse_sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ synapse_sdk/loggers.py,sha256=_qypgMyXrfPlHFMKkMMRFll_jcx-lUBJdmtmdj4cDQY,1343
3
+ synapse_sdk/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ synapse_sdk/clients/base.py,sha256=IuZY9-me62lPRgmMjXPhqy8cVLbtVSyT9acj19KMQkU,2788
5
+ synapse_sdk/clients/exceptions.py,sha256=LzgzPKRPhNUpLh-jBQHKze8c3CQoVA22jSfjUMBZCuU,405
6
+ synapse_sdk/clients/utils.py,sha256=8pPJTdzHiRPSbZMoQYHAgR2BAMO6u_R_jMV6a2p34iQ,392
7
+ synapse_sdk/clients/agent/__init__.py,sha256=IEC6HqdvsyDSScNNU97e2amjS6SZoUSC_mLq3Lu-9vk,666
8
+ synapse_sdk/clients/agent/core.py,sha256=N4Fo-hFJPdNYHw9iBuxkgcIJKhi_5yazKqWQBUBHB_o,318
9
+ synapse_sdk/clients/agent/service.py,sha256=s7KuPK_DB1nr2VHrigttV1WyFonaGHNrPvU8loRxHcE,478
10
+ synapse_sdk/clients/backend/__init__.py,sha256=pAg2dODaKxZf1DBPA-_uzPRwV4mQ3qo8Q2LC9OW3K44,779
11
+ synapse_sdk/clients/backend/annotation.py,sha256=Zt1VA3fScYCxy_Ss1TEzqu7jYdNxlNBRYAjZfuEWOSI,989
12
+ synapse_sdk/clients/backend/dataset.py,sha256=abZr9FyVzgnEpk1POK6m1FfbzWKnqWTo7wyQwwS5VvM,1743
13
+ synapse_sdk/clients/backend/integration.py,sha256=ToW2ZR--qzQ30jVbFy2dlZb3jRZsbcGhkdXpu-UnHZQ,1042
14
+ synapse_sdk/clients/backend/ml.py,sha256=l4rGLBZgLUYQOBePvWAoNyz-yZgJuhC-1KCFeZOYDuQ,1012
15
+ synapse_sdk/plugins/__init__.py,sha256=9vsbYhxah4_ofTaG0x0qLFID_raHNkO57Y8A31Ws-lU,222
16
+ synapse_sdk/plugins/enums.py,sha256=lQZqO2bEeBKdk6q-SMjfOLDlgxv7BuIPk3fXeUFfHRs,327
17
+ synapse_sdk/plugins/job.py,sha256=UzFKA8o_F6RzY_PwyI4dlF3kSfmMG0xEYIyKLfdqSP8,91
18
+ synapse_sdk/plugins/upload.py,sha256=9DF-f0Or6ea4mJxuCmPlpYOG4EGeGqIRALc4ulrnQd4,2973
19
+ synapse_sdk/plugins/utils.py,sha256=RFxFtmjj-uBK03wUwLhtUecfn_IOKRJupudmsguc2Sc,1212
20
+ synapse_sdk/plugins/categories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
+ synapse_sdk/plugins/categories/base.py,sha256=Je4rJU936X-_uwPPjWYu3xev_3qUIEqCF1WWuoN-LQg,6202
22
+ synapse_sdk/plugins/categories/decorators.py,sha256=Gw6T-UHwpCKrSt596X-g2sZbY_Z1zbbogowClj7Pr5Q,518
23
+ synapse_sdk/plugins/categories/registry.py,sha256=KdQR8SUlLT-3kgYzDNWawS1uJnAhrcw2j4zFaTpilRs,636
24
+ synapse_sdk/plugins/categories/data_validation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
+ synapse_sdk/plugins/categories/data_validation/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ synapse_sdk/plugins/categories/data_validation/actions/validation.py,sha256=NbEGFytxjakawglZR_Sf9UAjQyuzRxpdwEI1GDbEBW0,338
27
+ synapse_sdk/plugins/categories/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ synapse_sdk/plugins/categories/export/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
+ synapse_sdk/plugins/categories/export/actions/export.py,sha256=zpvVB0Dac9ytshJrN2ouDpsGHMuJlap-Ymz4qEw_Hzo,320
30
+ synapse_sdk/plugins/categories/import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
+ synapse_sdk/plugins/categories/import/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
+ synapse_sdk/plugins/categories/import/actions/import.py,sha256=bkB8x-x7jJfcCnzbz5bOJJy7mPhTKYfIWUdmCoHvpdM,320
33
+ synapse_sdk/plugins/categories/neural_net/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
+ synapse_sdk/plugins/categories/neural_net/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
+ synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=afqgD_mzErcWdW3xdbrKIOL13fUPO0biZk8_l4l2j84,767
36
+ synapse_sdk/plugins/categories/neural_net/actions/inference.py,sha256=erM2z7aUTwyzJZqWBlxhTP8dm8cOraI_vUYAqcXkdSY,334
37
+ synapse_sdk/plugins/categories/neural_net/actions/test.py,sha256=dAW1zfodlUhoL-sD17tG-CQT0RBxIcHWJ8f1eeZ00M4,321
38
+ synapse_sdk/plugins/categories/neural_net/actions/train.py,sha256=sg-PDU_RnAtQGCwI6XnfC63lyPE2fRY6qEUv-_VKu8A,3283
39
+ synapse_sdk/plugins/categories/post_annotation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
+ synapse_sdk/plugins/categories/post_annotation/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ synapse_sdk/plugins/categories/post_annotation/actions/post_annotation.py,sha256=1tutwNDHpnrCPHzMTsMEk29WPajnZikjBE83j7Z-Xt0,347
42
+ synapse_sdk/plugins/categories/pre_annotation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
+ synapse_sdk/plugins/categories/pre_annotation/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
+ synapse_sdk/plugins/categories/pre_annotation/actions/pre_annotation.py,sha256=YYQt9HsgXlBclE4Sn0c7p1zqCxWHkIHAwyA-tbqrmPQ,344
45
+ synapse_sdk/plugins/cli/__init__.py,sha256=8ogaOhN-RbDNYHqziW8nLsNUxKkZwGkHBdKxTahcm3U,334
46
+ synapse_sdk/plugins/cli/publish.py,sha256=v9aMMMyZgftSLW63uO9ZKeGBJEBeVCfOSmsSrfpKOP4,1135
47
+ synapse_sdk/plugins/cli/run.py,sha256=LJtRl38bxjIls2R2UfTfY-hOBJXYJ_DZKmQV5HN6gtM,2150
48
+ synapse_sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
+ synapse_sdk/utils/debug.py,sha256=46sMFQLg_JSRUCymnT3wgszG1QsgrocRGiBjVX38r50,53
50
+ synapse_sdk/utils/file.py,sha256=Iptk_DCPsmJzqAABCD3vC6z1yG74fKb5x81LnUCZzYo,1916
51
+ synapse_sdk/utils/module_loading.py,sha256=chHpU-BZjtYaTBD_q0T7LcKWtqKvYBS4L0lPlKkoMQ8,1020
52
+ synapse_sdk/utils/storage.py,sha256=U3TScqQNgHQ89s0kUqQ8hm3npQAznIyRqzWDKR0YA3E,2581
53
+ synapse_sdk/utils/string.py,sha256=rEwuZ9SAaZLcQ8TYiwNKr1h2u4CfnrQx7SUL8NWmChg,216
54
+ synapse_sdk-1.0.0a2.dist-info/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
55
+ synapse_sdk-1.0.0a2.dist-info/METADATA,sha256=X7w6Ha_jF2RitKekQUxPLjkRixGZfMc4cvU1614ycEk,503
56
+ synapse_sdk-1.0.0a2.dist-info/WHEEL,sha256=a7TGlA-5DaHMRrarXjVbQagU3Man_dCnGIWMJr5kRWo,91
57
+ synapse_sdk-1.0.0a2.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
58
+ synapse_sdk-1.0.0a2.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.3.0)
2
+ Generator: setuptools (75.4.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1 @@
1
+ synapse_sdk
@@ -1,28 +0,0 @@
1
- from ..utils import get_default_url_conversion
2
-
3
-
4
- class AnnotationClientMixin:
5
- def get_project(self, pk):
6
- path = f'projects/{pk}/'
7
- return self._get(path)
8
-
9
- def get_label_tag(self, pk):
10
- path = f'label_tags/{pk}/'
11
- return self._get(path)
12
-
13
- def list_label_tags(self, payload=None):
14
- path = 'label_tags/'
15
- return self._list(path, payload)
16
-
17
- def list_labels(self, payload=None, url_conversion=None, list_all=False):
18
- path = 'labels/'
19
- url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
20
- return self._list(path, payload, url_conversion, list_all)
21
-
22
- def create_labels(self, data):
23
- path = 'labels/'
24
- return self._post(path, payload=data)
25
-
26
- def set_tags_labels(self, data, params=None):
27
- path = 'labels/set_tags/'
28
- return self._post(path, payload=data, params=params)
@@ -1,12 +0,0 @@
1
- class IntegrationClientMixin:
2
- def get_plugin(self, pk):
3
- path = f'plugins/{pk}/'
4
- return self._get(path)
5
-
6
- def create_logs(self, data):
7
- path = 'logs/'
8
- return self._post(path, payload=data)
9
-
10
- def create_task(self, data):
11
- path = 'agent_tasks/'
12
- return self._post(path, payload=data)
@@ -1,23 +0,0 @@
1
- from ..utils import get_default_url_conversion
2
-
3
-
4
- class MLClientMixin:
5
- def get_model(self, pk, payload=None, url_conversion=None):
6
- path = f'models/{pk}/'
7
- url_conversion = get_default_url_conversion(
8
- url_conversion, files_fields=['files', 'parent.files'], is_list=False
9
- )
10
- return self._get(path, payload, url_conversion)
11
-
12
- def create_model(self, data):
13
- path = 'models/'
14
- return self._post(path, payload=data)
15
-
16
- def update_model(self, pk, data, files=None):
17
- path = f'models/{pk}/'
18
- return self._patch(path, payload=data, files=files)
19
-
20
- def list_train_dataset(self, payload=None, url_conversion=None, list_all=False):
21
- path = 'train_dataset/'
22
- url_conversion = get_default_url_conversion(url_conversion, files_fields=['files'])
23
- return self._list(path, payload, url_conversion, list_all)
synapse/config.py DELETED
@@ -1,8 +0,0 @@
1
- try:
2
- from constance import config as constance_config
3
-
4
- config = constance_config
5
- except ImportError:
6
- config = None
7
-
8
- __all__ = ['config']
@@ -1,39 +0,0 @@
1
- import argparse
2
- import os
3
-
4
- from dotenv import load_dotenv
5
-
6
- from synapse.plugins.utils import get_action
7
-
8
-
9
- action = None
10
-
11
-
12
- def run():
13
- global action
14
- parser = argparse.ArgumentParser(description='synapse plugin runner')
15
-
16
- # Add arguments
17
- parser.add_argument('action', help='action to run on this plugin')
18
- parser.add_argument('params', help='parameter of the action')
19
- parser.add_argument('--direct', help='run without using ray', action='store_true')
20
- parser.add_argument('--debug', help='run with debug mode', action='store_true')
21
-
22
- # Parse arguments
23
- args = parser.parse_args()
24
-
25
- # Access parsed arguments
26
- action = args.action
27
- params = args.params
28
- direct = args.direct
29
- debug = args.debug
30
-
31
- load_dotenv(os.path.join(os.getcwd(), '.env'))
32
-
33
- action = get_action(action, params, direct=direct, debug=debug)
34
- result = action.run_action()
35
- if debug:
36
- print(result)
37
-
38
-
39
- __all__ = ['run']
@@ -1,10 +0,0 @@
1
- from synapse.plugins.categories.base import Action
2
- from synapse.plugins.categories.decorators import register_action
3
- from synapse.plugins.enums import RunMethod, PluginCategory
4
-
5
-
6
- @register_action
7
- class ValidationAction(Action):
8
- name = 'validation'
9
- category = PluginCategory.DATA_VALIDATION
10
- method = RunMethod.TASK
@@ -1,10 +0,0 @@
1
- from synapse.plugins.categories.base import Action
2
- from synapse.plugins.categories.decorators import register_action
3
- from synapse.plugins.enums import RunMethod, PluginCategory
4
-
5
-
6
- @register_action
7
- class ExportAction(Action):
8
- name = 'export'
9
- category = PluginCategory.EXPORT
10
- method = RunMethod.JOB
@@ -1,10 +0,0 @@
1
- from synapse.plugins.categories.base import Action
2
- from synapse.plugins.categories.decorators import register_action
3
- from synapse.plugins.enums import RunMethod, PluginCategory
4
-
5
-
6
- @register_action
7
- class ImportAction(Action):
8
- name = 'import'
9
- category = PluginCategory.IMPORT
10
- method = RunMethod.JOB
@@ -1,10 +0,0 @@
1
- from synapse.plugins.categories.base import Action
2
- from synapse.plugins.categories.decorators import register_action
3
- from synapse.plugins.enums import RunMethod, PluginCategory
4
-
5
-
6
- @register_action
7
- class InferenceAction(Action):
8
- name = 'inference'
9
- category = PluginCategory.NEURAL_NET
10
- method = RunMethod.RESTAPI
@@ -1,10 +0,0 @@
1
- from synapse.plugins.categories.base import Action
2
- from synapse.plugins.categories.decorators import register_action
3
- from synapse.plugins.enums import RunMethod, PluginCategory
4
-
5
-
6
- @register_action
7
- class TestAction(Action):
8
- name = 'test'
9
- category = PluginCategory.NEURAL_NET
10
- method = RunMethod.TASK
@@ -1,10 +0,0 @@
1
- from synapse.plugins.categories.base import Action
2
- from synapse.plugins.categories.decorators import register_action
3
- from synapse.plugins.enums import RunMethod, PluginCategory
4
-
5
-
6
- @register_action
7
- class PostAnnotationAction(Action):
8
- name = 'post_annotation'
9
- category = PluginCategory.POST_ANNOTATION
10
- method = RunMethod.TASK
@@ -1,10 +0,0 @@
1
- from synapse.plugins.categories.base import Action
2
- from synapse.plugins.categories.decorators import register_action
3
- from synapse.plugins.enums import RunMethod, PluginCategory
4
-
5
-
6
- @register_action
7
- class PreAnnotationAction(Action):
8
- name = 'pre_annotation'
9
- category = PluginCategory.PRE_ANNOTATION
10
- method = RunMethod.TASK
synapse/plugins/upload.py DELETED
@@ -1,79 +0,0 @@
1
- import os
2
- import re
3
- import subprocess
4
- import tempfile
5
- import hashlib
6
- from pathlib import Path
7
-
8
- import boto3
9
-
10
-
11
- def calculate_checksum(file_path):
12
- md5_hash = hashlib.md5()
13
- with open(file_path, 'rb') as f:
14
- for byte_block in iter(lambda: f.read(4096), b''):
15
- md5_hash.update(byte_block)
16
- checksum = md5_hash.hexdigest()
17
- return f'dev-{checksum}'
18
-
19
-
20
- def upload_to_s3(file_path, bucket_name, object_name, endpoint_url, access_key, secret_key):
21
- s3_client = boto3.client(
22
- 's3',
23
- endpoint_url=endpoint_url,
24
- aws_access_key_id=access_key,
25
- aws_secret_access_key=secret_key,
26
- )
27
- s3_client.upload_file(file_path, bucket_name, object_name)
28
-
29
-
30
- def upload_path(
31
- source_path,
32
- endpoint_url=None,
33
- bucket_name=None,
34
- access_key=None,
35
- secret_key=None,
36
- base_url=None,
37
- ):
38
- if not endpoint_url:
39
- endpoint_url = os.environ['PLUGIN_UPLOAD_S3_ENDPOINT_URL']
40
- if not bucket_name:
41
- bucket_name = os.environ['PLUGIN_UPLOAD_S3_BUCKET_NAME']
42
- if not access_key:
43
- access_key = os.environ['PLUGIN_UPLOAD_S3_ACCESS_KEY']
44
- if not secret_key:
45
- secret_key = os.environ['PLUGIN_UPLOAD_S3_SECRET_KEY']
46
- if not base_url:
47
- base_url = os.environ['PLUGIN_UPLOAD_S3_BASE_URL']
48
-
49
- with tempfile.TemporaryDirectory() as temp_dir:
50
- temp_archive_path = os.path.join(temp_dir, 'archive.zip')
51
- command = f'git ls-files --others --exclude-standard --cached | zip -q --names-stdin {temp_archive_path}'
52
-
53
- subprocess.run(command, cwd=source_path, shell=True, check=True)
54
-
55
- checksum = calculate_checksum(temp_archive_path)
56
- # TODO subpath param으로 받기
57
- s3_object_name = f'assets/{checksum}.zip'
58
-
59
- upload_to_s3(temp_archive_path, bucket_name, s3_object_name, endpoint_url, access_key, secret_key)
60
- return f'{base_url}/{bucket_name}/{s3_object_name}'
61
-
62
-
63
- def change_whl_version(whl_name, new_version):
64
- pattern = r'^(?P<distribution>.+?)-(?P<version>\d+(\.\d+)*)(?P<rest>-.+\.whl)$'
65
- return re.sub(pattern, rf'\g<distribution>-{new_version}\g<rest>', whl_name)
66
-
67
-
68
- def build_and_upload(source_path, endpoint_url, bucket_name, access_key, secret_key, base_url, virtualenv_path='.venv'):
69
- # TODO 이미 빌드한 whl이 있으면 skip
70
- subprocess.run(f'{virtualenv_path}/bin/python -m build --wheel', cwd=source_path, shell=True, check=True)
71
-
72
- whl_file = next(Path(source_path, 'dist').glob('*.whl'), None)
73
- checksum = calculate_checksum(whl_file)
74
-
75
- # TODO subpath param으로 받기
76
- s3_object_name = f'assets/{change_whl_version(whl_file.name, checksum)}'
77
-
78
- upload_to_s3(str(whl_file), bucket_name, s3_object_name, endpoint_url, access_key, secret_key)
79
- return f'{base_url}/{bucket_name}/{s3_object_name}'