synapse-sdk 1.0.0a38__py3-none-any.whl → 1.0.0a40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

@@ -1,7 +1,4 @@
1
- from packaging import version
2
-
3
1
  from synapse_sdk.clients.exceptions import ClientError
4
- from synapse_sdk.i18n import gettext as _
5
2
  from synapse_sdk.plugins.categories.base import Action
6
3
  from synapse_sdk.plugins.categories.decorators import register_action
7
4
  from synapse_sdk.plugins.enums import PluginCategory, RunMethod
@@ -30,28 +27,9 @@ class DeploymentAction(Action):
30
27
 
31
28
  ray_actor_options = self.get_actor_options()
32
29
 
33
- if self.is_gradio_deployment:
34
- from ray.serve.gradio_integrations import GradioServer
35
-
36
- self.assert_gradio_version()
37
-
38
- # GradioIngress differs from serve.ingress(app), thus the difference in self.entrypoint callable
39
- try:
40
- entrypoint = self.entrypoint().app
41
- except (TypeError, ImportError):
42
- raise ClientError(
43
- 400,
44
- _(
45
- 'Gradio app is not callable.'
46
- 'Please ensure that your Deployment class defines a callable `app` function'
47
- ),
48
- )
49
-
50
- deployment = GradioServer.options(ray_actor_options=ray_actor_options).bind(entrypoint)
51
- else:
52
- deployment = serve.deployment(ray_actor_options=ray_actor_options)(
53
- serve.ingress(app)(self.entrypoint)
54
- ).bind(self.envs['SYNAPSE_PLUGIN_RUN_HOST'])
30
+ deployment = serve.deployment(ray_actor_options=ray_actor_options)(serve.ingress(app)(self.entrypoint)).bind(
31
+ self.envs['SYNAPSE_PLUGIN_RUN_HOST']
32
+ )
55
33
 
56
34
  serve.delete(self.plugin_release.code)
57
35
 
@@ -78,30 +56,3 @@ class DeploymentAction(Action):
78
56
  except ClientError:
79
57
  pass
80
58
  return None
81
-
82
- @property
83
- def is_gradio_deployment(self):
84
- return self.config.get('gradio_app', False)
85
-
86
- def assert_gradio_version(self):
87
- """Assert gradio version is not greater than 3.50.2.
88
- Ray Serve cannot pickle gradio endpoints, thus gradio version greater than 3.50.2 is not supported (SSE Issues)
89
- """
90
- GRADIO_VERSION_MAX_ALLOWED = '3.50.2'
91
-
92
- gradio_installed = False
93
- gradio_version = None
94
- for req in self.requirements:
95
- if req.startswith('gradio=='):
96
- gradio_installed = True
97
- gradio_version = req.split('==')[1]
98
- break
99
-
100
- assert gradio_installed, _(
101
- 'Gradio is not installed or version is not specified. Please install gradio==3.50.2 to use this feature.'
102
- )
103
-
104
- if version.parse(gradio_version) > version.parse(GRADIO_VERSION_MAX_ALLOWED):
105
- raise AssertionError(
106
- f'Gradio version {gradio_version} is greater than maximum allowed version {GRADIO_VERSION_MAX_ALLOWED}'
107
- )
@@ -0,0 +1,131 @@
1
+ import contextlib
2
+ import subprocess
3
+ from functools import cached_property
4
+ from pathlib import Path
5
+
6
+ from synapse_sdk.plugins.categories.base import Action
7
+ from synapse_sdk.plugins.categories.decorators import register_action
8
+ from synapse_sdk.plugins.enums import PluginCategory, RunMethod
9
+ from synapse_sdk.utils.network import get_available_ports_host
10
+
11
+
12
+ @register_action
13
+ class GradioAction(Action):
14
+ name = 'gradio'
15
+ category = PluginCategory.NEURAL_NET
16
+ method = RunMethod.JOB
17
+
18
+ @property
19
+ def working_directory(self):
20
+ dir = Path.cwd() / self.config['directory'].replace('.', '/')
21
+ assert dir.is_dir(), f'Working directory {dir} does not exist.'
22
+ return dir
23
+
24
+ @property
25
+ def _requirements(self):
26
+ return self.config.get('requirements', ['gradio>=5'])
27
+
28
+ @property
29
+ def tag(self):
30
+ _tag = f'{self.plugin_release.code}-{self.plugin_release.checksum}'
31
+ return _tag.replace('@', '-')
32
+
33
+ @cached_property
34
+ def deploy_port(self):
35
+ return get_available_ports_host()
36
+
37
+ def deploy(self):
38
+ self.run.log('deploy', 'Start deploying')
39
+
40
+ try:
41
+ # Write Dockerfile and requirements.txt
42
+ path_dockerfile = self.write_dockerfile_template()
43
+ self.write_requirements(path_dockerfile.parent / 'requirements.txt')
44
+
45
+ # Build docker image
46
+ self.build_docker_image(path_dockerfile)
47
+
48
+ # Run docker image
49
+ self.run_docker_image()
50
+ except Exception as e:
51
+ self.run.log('deploy', f'Error: {e}')
52
+ raise e
53
+
54
+ def start(self):
55
+ self.deploy()
56
+ return {'endpoint': f'http://localhost:{self.deploy_port}'}
57
+
58
+ def write_dockerfile_template(self):
59
+ dockerfile_path = self.working_directory / 'Dockerfile'
60
+
61
+ with open(dockerfile_path, 'w') as f:
62
+ f.write("""FROM python:3.10
63
+ WORKDIR /home/user/app
64
+
65
+ RUN pip install --no-cache-dir pip -U && \\
66
+ pip install --no-cache-dir uvicorn
67
+
68
+ RUN apt-get update && \\
69
+ apt-get install -y curl && \\
70
+ curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \\
71
+ apt-get install -y nodejs && \\
72
+ rm -rf /var/lib/apt/lists/* && \\
73
+ apt-get clean
74
+
75
+ COPY . /home/user/app
76
+
77
+ RUN pip install --no-cache-dir -r requirements.txt
78
+
79
+ EXPOSE 7860
80
+
81
+ CMD ["python", "app.py"]
82
+ """)
83
+ return dockerfile_path
84
+
85
+ def write_requirements(self, path):
86
+ with open(path, 'w') as f:
87
+ f.write('\n'.join(self._requirements))
88
+
89
+ def build_docker_image(self, path_dockerfile):
90
+ self.run.log('deploy', 'Start building docker image')
91
+ result = subprocess.run(
92
+ [
93
+ 'docker',
94
+ 'build',
95
+ '-t',
96
+ self.tag,
97
+ '-f',
98
+ str(path_dockerfile),
99
+ '.',
100
+ ],
101
+ cwd=self.working_directory,
102
+ check=True,
103
+ )
104
+ print(result)
105
+
106
+ def run_docker_image(self):
107
+ self.run.log('deploy', 'Start running docker image')
108
+
109
+ # Check for existing container
110
+ self.run.log('deploy', 'Check for existing container')
111
+ with contextlib.suppress(subprocess.CalledProcessError):
112
+ subprocess.run(['docker', 'stop', self.tag], check=True)
113
+ subprocess.run(['docker', 'rm', self.tag], check=True)
114
+
115
+ # Run docker image
116
+ self.run.log('deploy', 'Starting docker container')
117
+ subprocess.run(
118
+ [
119
+ 'docker',
120
+ 'run',
121
+ '-d',
122
+ '--name',
123
+ self.tag,
124
+ '-p',
125
+ f'{self.deploy_port}:7860',
126
+ '-p',
127
+ '8991-8999:8991-8999',
128
+ self.tag,
129
+ ],
130
+ check=True,
131
+ )
synapse_sdk/utils/file.py CHANGED
@@ -197,6 +197,10 @@ def convert_file_to_base64(file_path):
197
197
  Returns:
198
198
  str: Base64 encoded string of the file contents
199
199
  """
200
+ # FIXME base64 is sent sometimes.
201
+ if file_path.startswith('data:'):
202
+ return file_path
203
+
200
204
  # Convert string path to Path object
201
205
  path = Path(file_path)
202
206
 
@@ -0,0 +1,110 @@
1
+ import logging
2
+ import os
3
+ import tempfile
4
+ import threading
5
+ import time
6
+ from contextlib import contextmanager
7
+ from http.server import HTTPServer, SimpleHTTPRequestHandler
8
+
9
+ from synapse_sdk.utils.network import get_available_ports_host
10
+
11
+
12
+ class SingleFileHttpServer(SimpleHTTPRequestHandler):
13
+ """
14
+ Custom HTTP request handler that serves a single specified file
15
+ regardless of the request path.
16
+ """
17
+
18
+ def __init__(self, *args, file_path=None, content_type=None, **kwargs):
19
+ self.file_path = file_path
20
+ self.content_type = content_type
21
+ super().__init__(*args, **kwargs)
22
+
23
+ def do_GET(self):
24
+ """Handle GET requests by serving the single file."""
25
+ try:
26
+ # Always serve the specified file regardless of the path requested
27
+ self.send_response(200)
28
+ if self.content_type:
29
+ self.send_header('Content-type', self.content_type)
30
+ self.send_header('Content-Length', str(os.path.getsize(self.file_path)))
31
+ self.end_headers()
32
+
33
+ with open(self.file_path, 'rb') as file:
34
+ self.wfile.write(file.read())
35
+
36
+ except Exception as e:
37
+ self.send_error(500, str(e))
38
+
39
+
40
+ @contextmanager
41
+ def temp_file_server(image=None, file_path=None, format='JPEG', host='localhost', port=None, content_type=None):
42
+ """
43
+ Context manager that serves a file temporarily via HTTP.
44
+
45
+ Args:
46
+ image: A PIL Image object to serve (optional)
47
+ file_path: Path to an existing file to serve (optional - used if image not provided)
48
+ format: Image format when saving a PIL Image (default: "JPEG")
49
+ host: Host to serve on (default: "localhost")
50
+ port: Port to serve on (default: auto-selected free port)
51
+ content_type: Content type header (default: auto-detected based on format)
52
+
53
+ Returns:
54
+ URL where the file is being served
55
+
56
+ Usage:
57
+ with temp_file_serve(image=my_pillow_img) as url:
58
+ # use url to access the image
59
+ print(f"Image available at: {url}")
60
+ """
61
+ if image is None and file_path is None:
62
+ raise ValueError('Either image or file_path must be provided')
63
+
64
+ # Use a free port if none specified
65
+ if port is None:
66
+ port = get_available_ports_host(start_port=8991, end_port=8999)
67
+
68
+ temp_dir = None
69
+
70
+ try:
71
+ if image is not None:
72
+ temp_dir = tempfile.mkdtemp()
73
+ ext_map = {'JPEG': '.jpg', 'PNG': '.png', 'GIF': '.gif', 'WEBP': '.webp'}
74
+ content_type_map = {'JPEG': 'image/jpeg', 'PNG': 'image/png', 'GIF': 'image/gif', 'WEBP': 'image/webp'}
75
+
76
+ ext = ext_map.get(format, '.jpg')
77
+ if content_type is None:
78
+ content_type = content_type_map.get(format, 'image/jpeg')
79
+
80
+ temp_file_path = os.path.join(temp_dir, f'temp_image{ext}')
81
+ image.save(temp_file_path, format=format)
82
+ file_path = temp_file_path
83
+
84
+ def handler(*args, **kwargs):
85
+ return SingleFileHttpServer(*args, file_path=file_path, content_type=content_type, **kwargs)
86
+
87
+ server = HTTPServer((host, port), handler)
88
+
89
+ server_thread = threading.Thread(target=server.serve_forever)
90
+ server_thread.daemon = True
91
+ server_thread.start()
92
+
93
+ time.sleep(0.1)
94
+
95
+ url = f'http://{host}:{port}'
96
+
97
+ try:
98
+ yield url
99
+ finally:
100
+ server.shutdown()
101
+ server.server_close()
102
+
103
+ finally:
104
+ if temp_dir:
105
+ try:
106
+ if temp_file_path and os.path.exists(temp_file_path):
107
+ os.unlink(temp_file_path)
108
+ os.rmdir(temp_dir)
109
+ except Exception as e:
110
+ logging.warning(f'Error cleaning up temporary files: {e}')
@@ -14,3 +14,24 @@ def clean_url(url, remove_query_params=True, remove_fragment=True):
14
14
  query,
15
15
  fragment,
16
16
  ))
17
+
18
+
19
+ def get_available_ports_host(start_port=8900, end_port=8990):
20
+ import nmap
21
+
22
+ nm = nmap.PortScanner()
23
+
24
+ scan_range = f'{start_port}-{end_port}'
25
+ nm.scan(hosts='host.docker.internal', arguments=f'-p {scan_range}')
26
+
27
+ try:
28
+ open_ports = nm['host.docker.internal']['tcp'].keys()
29
+ open_ports = [int(port) for port in open_ports]
30
+ except KeyError:
31
+ open_ports = []
32
+
33
+ for port in range(start_port, end_port + 1):
34
+ if port not in open_ports:
35
+ return port
36
+
37
+ raise IOError(f'No free ports available in range {start_port}-{end_port}')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: synapse-sdk
3
- Version: 1.0.0a38
3
+ Version: 1.0.0a40
4
4
  Summary: synapse sdk
5
5
  Author-email: datamaker <developer@datamaker.io>
6
6
  License: MIT
@@ -21,6 +21,7 @@ Requires-Dist: universal-pathlib
21
21
  Requires-Dist: fsspec[gcs,s3,sftp]
22
22
  Provides-Extra: all
23
23
  Requires-Dist: ray[all]; extra == "all"
24
+ Requires-Dist: python-nmap; extra == "all"
24
25
  Dynamic: license-file
25
26
 
26
27
  This is the SDK to develop synapse plugins
@@ -67,7 +67,8 @@ synapse_sdk/plugins/categories/export/templates/plugin/__init__.py,sha256=47DEQp
67
67
  synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=39XLGo8ui5FscbwZyX3JwmrJqGGvOYrY3FMYDKXwTOQ,5192
68
68
  synapse_sdk/plugins/categories/neural_net/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
69
  synapse_sdk/plugins/categories/neural_net/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
- synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=oetIwZoee5vxriPX3r1onmxgwojUyaRTlnBIdaQ1zk8,3895
70
+ synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=y2LrS-pwazqRI5O0q1NUy45NQYsBj6ykbrXnDMs_fqE,1987
71
+ synapse_sdk/plugins/categories/neural_net/actions/gradio.py,sha256=pcrMr3pmNJmVjS2wRvyGmCvIoxYKSQJAheXqJBNRfm4,3790
71
72
  synapse_sdk/plugins/categories/neural_net/actions/inference.py,sha256=0a655ELqNVjPFZTJDiw4EUdcMCPGveUEKyoYqpwMFBU,1019
72
73
  synapse_sdk/plugins/categories/neural_net/actions/test.py,sha256=JY25eg-Fo6WbgtMkGoo_qNqoaZkp3AQNEypJmeGzEog,320
73
74
  synapse_sdk/plugins/categories/neural_net/actions/train.py,sha256=kve6iTCg2kUeavMQTR2JFuoYDu-QWZFFlB58ZICQtdM,5406
@@ -118,9 +119,10 @@ synapse_sdk/shared/enums.py,sha256=WMZPag9deVF7VCXaQkLk7ly_uX1KwbNzRx9TdvgaeFE,1
118
119
  synapse_sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
119
120
  synapse_sdk/utils/dataset.py,sha256=zWTzFmv589izFr62BDuApi3r5FpTsdm-5AmriC0AEdM,1865
120
121
  synapse_sdk/utils/debug.py,sha256=F7JlUwYjTFZAMRbBqKm6hxOIz-_IXYA8lBInOS4jbS4,100
121
- synapse_sdk/utils/file.py,sha256=Qb5FihoX1J0wsF2UAckc0d0c3IMHn0NrX9Vt3cXAwt4,6732
122
+ synapse_sdk/utils/file.py,sha256=wWBQAx0cB5a-fjfRMeJV-KjBil1ZyKRz-vXno3xBSoo,6834
123
+ synapse_sdk/utils/http.py,sha256=GqYONHfovwBmP4p3ZczVCNvn4oApx2QzfzGIEiICqJo,3770
122
124
  synapse_sdk/utils/module_loading.py,sha256=chHpU-BZjtYaTBD_q0T7LcKWtqKvYBS4L0lPlKkoMQ8,1020
123
- synapse_sdk/utils/network.py,sha256=wg-oFM0gKK5REqIUO8d-x9yXJfqbnkSbbF0_qyxpwz4,412
125
+ synapse_sdk/utils/network.py,sha256=WI8qn6KlKpHdMi45V57ofKJB8zusJrbQsxT74LwVfsY,1000
124
126
  synapse_sdk/utils/string.py,sha256=rEwuZ9SAaZLcQ8TYiwNKr1h2u4CfnrQx7SUL8NWmChg,216
125
127
  synapse_sdk/utils/pydantic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
126
128
  synapse_sdk/utils/pydantic/config.py,sha256=1vYOcUI35GslfD1rrqhFkNXXJOXt4IDqOPSx9VWGfNE,123
@@ -132,9 +134,9 @@ synapse_sdk/utils/storage/providers/__init__.py,sha256=x7RGwZryT2FpVxS7fGWryRVpq
132
134
  synapse_sdk/utils/storage/providers/gcp.py,sha256=i2BQCu1Kej1If9SuNr2_lEyTcr5M_ncGITZrL0u5wEA,363
133
135
  synapse_sdk/utils/storage/providers/s3.py,sha256=W94rQvhGRXti3R4mYP7gmU5pcyCQpGFIBLvxxqLVdRM,2231
134
136
  synapse_sdk/utils/storage/providers/sftp.py,sha256=_8s9hf0JXIO21gvm-JVS00FbLsbtvly4c-ETLRax68A,1426
135
- synapse_sdk-1.0.0a38.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
136
- synapse_sdk-1.0.0a38.dist-info/METADATA,sha256=GxLFIbbXbVYUjusRE7MAZefelK_XNYz0oGTOOCNuybU,1160
137
- synapse_sdk-1.0.0a38.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
138
- synapse_sdk-1.0.0a38.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
139
- synapse_sdk-1.0.0a38.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
140
- synapse_sdk-1.0.0a38.dist-info/RECORD,,
137
+ synapse_sdk-1.0.0a40.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
138
+ synapse_sdk-1.0.0a40.dist-info/METADATA,sha256=P24axYuG6BpxcsvfyKnMuZh6NpNBQCaN3gcRnggwxDg,1203
139
+ synapse_sdk-1.0.0a40.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
140
+ synapse_sdk-1.0.0a40.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
141
+ synapse_sdk-1.0.0a40.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
142
+ synapse_sdk-1.0.0a40.dist-info/RECORD,,