synapse-sdk 1.0.0a36__py3-none-any.whl → 1.0.0a38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

@@ -11,6 +11,7 @@ from synapse_sdk.utils.file import files_url_to_path_from_objs
11
11
  class BaseClient:
12
12
  name = None
13
13
  base_url = None
14
+ page_size = 100
14
15
 
15
16
  def __init__(self, base_url):
16
17
  self.base_url = base_url
@@ -146,7 +147,8 @@ class BaseClient:
146
147
  else:
147
148
  return response
148
149
 
149
- def _list_all(self, path, url_conversion=None, params=None, **kwargs):
150
+ def _list_all(self, path, url_conversion=None, params={}, **kwargs):
151
+ params['page_size'] = self.page_size
150
152
  response = self._get(path, url_conversion, params=params, **kwargs)
151
153
  yield from response['results']
152
154
  if response['next']:
@@ -133,6 +133,11 @@ class Action:
133
133
  def plugin_package_manager(self):
134
134
  return self.plugin_config.get('package_manager', 'pip')
135
135
 
136
+ @property
137
+ def package_manager_options(self):
138
+ options = {'pip': {}, 'uv': {'uv_pip_install_options': []}}
139
+ return options[self.plugin_package_manager]
140
+
136
141
  def get_run(self):
137
142
  context = {
138
143
  'plugin_release': self.plugin_release,
@@ -147,13 +152,16 @@ class Action:
147
152
  return {env: os.environ[env] for env in self.REQUIRED_ENVS if env in os.environ}
148
153
 
149
154
  def get_runtime_env(self):
150
- runtime_env = {self.plugin_package_manager: [], 'working_dir': self.plugin_url}
155
+ runtime_env = {self.plugin_package_manager: {'packages': []}, 'working_dir': self.plugin_url}
151
156
 
152
157
  if self.requirements:
153
- runtime_env[self.plugin_package_manager] += self.requirements
158
+ runtime_env[self.plugin_package_manager]['packages'] += self.requirements
154
159
 
155
160
  if self.debug:
156
- runtime_env[self.plugin_package_manager] += self.debug_modules
161
+ runtime_env[self.plugin_package_manager]['packages'] += self.debug_modules
162
+
163
+ for key, value in self.package_manager_options.items():
164
+ runtime_env[self.plugin_package_manager][key] = value
157
165
 
158
166
  # 맨 마지막에 진행되어야 함
159
167
  runtime_env['env_vars'] = self.envs
@@ -1,17 +1,77 @@
1
+ import json
1
2
  from abc import ABC, abstractmethod
2
- from typing import Any, Literal
3
+ from datetime import datetime
4
+ from typing import Annotated, Any, Literal
3
5
 
4
- from pydantic import BaseModel, field_validator
6
+ from pydantic import AfterValidator, BaseModel, field_validator
5
7
  from pydantic_core import PydanticCustomError
6
8
 
7
9
  from synapse_sdk.clients.exceptions import ClientError
8
10
  from synapse_sdk.i18n import gettext as _
9
11
  from synapse_sdk.plugins.categories.base import Action
10
12
  from synapse_sdk.plugins.categories.decorators import register_action
13
+ from synapse_sdk.plugins.categories.export.enums import ExportStatus
11
14
  from synapse_sdk.plugins.enums import PluginCategory, RunMethod
15
+ from synapse_sdk.plugins.models import Run
16
+ from synapse_sdk.utils.pydantic.validators import non_blank
12
17
  from synapse_sdk.utils.storage import get_pathlib
13
18
 
14
19
 
20
+ class ExportRun(Run):
21
+ class DataFileLog(BaseModel):
22
+ """Data file log model."""
23
+
24
+ target_id: int
25
+ data_file_info: str | None
26
+ status: ExportStatus
27
+ error: str | None = None
28
+ created: str
29
+
30
+ def log_file(
31
+ self, log_type: str, target_id: int, data_file_info: dict, status: ExportStatus, error: str | None = None
32
+ ):
33
+ """Log export file information.
34
+
35
+ Args:
36
+ log_type (str): The type of log ('export_data_file' or 'export_origin_file').
37
+ target_id (int): The ID of the data file.
38
+ data_file_info (dict): The JSON info of the data file.
39
+ status (ExportStatus): The status of the data file.
40
+ error (str | None): The error message, if any.
41
+ """
42
+ now = datetime.now().isoformat()
43
+ self.log(
44
+ log_type,
45
+ self.DataFileLog(
46
+ target_id=target_id,
47
+ data_file_info=json.dumps(data_file_info),
48
+ status=status.value,
49
+ error=error,
50
+ created=now,
51
+ ).model_dump(),
52
+ )
53
+
54
+ def export_log_json_file(
55
+ self,
56
+ target_id: int,
57
+ data_file_info: dict,
58
+ status: ExportStatus = ExportStatus.STAND_BY,
59
+ error: str | None = None,
60
+ ):
61
+ """Log export json data file."""
62
+ self.log_file('export_data_file', target_id, data_file_info, status, error)
63
+
64
+ def export_log_original_file(
65
+ self,
66
+ target_id: int,
67
+ data_file_info: dict,
68
+ status: ExportStatus = ExportStatus.STAND_BY,
69
+ error: str | None = None,
70
+ ):
71
+ """Log export origin data file."""
72
+ self.log_file('export_origin_file', target_id, data_file_info, status, error)
73
+
74
+
15
75
  class ExportTargetHandler(ABC):
16
76
  """
17
77
  Abstract base class for handling export targets.
@@ -153,6 +213,8 @@ class ExportParams(BaseModel):
153
213
  Parameters for the export action.
154
214
 
155
215
  Attributes:
216
+ name (str): The name of the action.
217
+ description (str | None): The description of the action.
156
218
  storage (int): The storage ID to save the exported data.
157
219
  save_original_file (bool): Whether to save the original file.
158
220
  path (str): The path to save the exported data.
@@ -160,6 +222,8 @@ class ExportParams(BaseModel):
160
222
  filter (dict): The filter criteria to apply.
161
223
  """
162
224
 
225
+ name: Annotated[str, AfterValidator(non_blank)]
226
+ description: str | None = None
163
227
  storage: int
164
228
  save_original_file: bool = True
165
229
  path: str
@@ -193,6 +257,7 @@ class ExportAction(Action):
193
257
  category = PluginCategory.EXPORT
194
258
  method = RunMethod.JOB
195
259
  params_model = ExportParams
260
+ run_class = ExportRun
196
261
  progress_categories = {
197
262
  'dataset_conversion': {
198
263
  'proportion': 100,
@@ -0,0 +1,7 @@
1
+ from enum import Enum
2
+
3
+
4
+ class ExportStatus(str, Enum):
5
+ SUCCESS = 'success'
6
+ FAILED = 'failed'
7
+ STAND_BY = 'stand_by'
@@ -3,6 +3,8 @@ from pathlib import Path
3
3
 
4
4
  import requests
5
5
 
6
+ from synapse_sdk.plugins.categories.export.enums import ExportStatus
7
+
6
8
 
7
9
  def export(run, export_items, path_root, **params):
8
10
  """Executes the export task.
@@ -54,12 +56,12 @@ def export(run, export_items, path_root, **params):
54
56
  if save_original_file_flag:
55
57
  if no == 1:
56
58
  run.log_message('Saving original file.')
57
- save_original_file(final_data, origin_files_output_path, errors_original_file_list)
59
+ save_original_file(run, final_data, origin_files_output_path, errors_original_file_list)
58
60
 
59
61
  # Extract data as JSON files
60
62
  if no == 1:
61
63
  run.log_message('Saving json file.')
62
- save_as_json(final_data, json_output_path, errors_json_file_list)
64
+ save_as_json(run, final_data, json_output_path, errors_json_file_list)
63
65
 
64
66
  run.end_log()
65
67
 
@@ -100,10 +102,11 @@ def get_original_file_pathlib(files):
100
102
  return Path(files['meta']['path_original'])
101
103
 
102
104
 
103
- def save_original_file(result, base_path, error_file_list):
105
+ def save_original_file(run, result, base_path, error_file_list):
104
106
  """Saves the original file.
105
107
 
106
108
  Args:
109
+ run : Execution object
107
110
  result (dict): API response data containing file information.
108
111
  base_path (Path): The directory where the file will be saved.
109
112
  error_file_list (list): A list to store error files.
@@ -111,17 +114,25 @@ def save_original_file(result, base_path, error_file_list):
111
114
  file_url = result['files']['url']
112
115
  file_name = get_original_file_pathlib(result['files']).name
113
116
  response = requests.get(file_url)
117
+ file_info = {'file_name': file_name}
118
+ error_msg = ''
114
119
  try:
115
120
  with (base_path / file_name).open('wb') as file:
116
121
  file.write(response.content)
122
+ status = ExportStatus.SUCCESS
117
123
  except Exception as e:
118
- error_file_list.append([file_name, str(e)])
124
+ error_msg = str(e)
125
+ error_file_list.append([file_name, error_msg])
126
+ status = ExportStatus.FAILED
127
+
128
+ run.export_log_original_file(result['id'], file_info, status, error_msg)
119
129
 
120
130
 
121
- def save_as_json(result, base_path, error_file_list):
131
+ def save_as_json(run, result, base_path, error_file_list):
122
132
  """Saves the data as a JSON file.
123
133
 
124
134
  Args:
135
+ run : Execution object
125
136
  result (dict): API response data containing file information.
126
137
  base_path (Path): The directory where the file will be saved.
127
138
  error_file_list (list): A list to store error files.
@@ -129,8 +140,15 @@ def save_as_json(result, base_path, error_file_list):
129
140
  # Default save file name: original file name
130
141
  file_name = get_original_file_pathlib(result['files']).stem
131
142
  json_data = result['data']
143
+ file_info = {'file_name': file_name}
144
+ error_msg = ''
132
145
  try:
133
146
  with (base_path / f'{file_name}.json').open('w', encoding='utf-8') as f:
134
147
  json.dump(json_data, f, indent=4, ensure_ascii=False)
148
+ status = ExportStatus.SUCCESS
135
149
  except Exception as e:
150
+ error_msg = str(e)
136
151
  error_file_list.append([f'{file_name}.json', str(e)])
152
+ status = ExportStatus.FAILED
153
+
154
+ run.export_log_json_file(result['id'], file_info, status, error_msg)
@@ -0,0 +1,268 @@
1
+ import copy
2
+ import tempfile
3
+ from pathlib import Path
4
+ from typing import Annotated, Optional
5
+
6
+ from pydantic import AfterValidator, BaseModel, field_validator
7
+ from pydantic_core import PydanticCustomError
8
+
9
+ from synapse_sdk.clients.exceptions import ClientError
10
+ from synapse_sdk.plugins.categories.decorators import register_action
11
+ from synapse_sdk.plugins.categories.neural_net.actions.train import TrainAction, TrainRun
12
+ from synapse_sdk.plugins.enums import PluginCategory, RunMethod
13
+ from synapse_sdk.utils.file import archive
14
+ from synapse_sdk.utils.module_loading import import_string
15
+ from synapse_sdk.utils.pydantic.validators import non_blank
16
+
17
+
18
+ class TuneRun(TrainRun):
19
+ is_tune = True
20
+ completed_samples = 0
21
+ num_samples = 0
22
+ checkpoint_output = None
23
+
24
+
25
+ class TuneConfig(BaseModel):
26
+ mode: Optional[str] = None
27
+ metric: Optional[str] = None
28
+ num_samples: int = 1
29
+ max_concurrent_trials: Optional[int] = None
30
+
31
+
32
+ class TuneParams(BaseModel):
33
+ name: Annotated[str, AfterValidator(non_blank)]
34
+ description: str
35
+ checkpoint: int | None
36
+ dataset: int
37
+ tune_config: TuneConfig
38
+
39
+ @field_validator('name')
40
+ @staticmethod
41
+ def unique_name(value, info):
42
+ action = info.context['action']
43
+ client = action.client
44
+ try:
45
+ job_exists = client.exists(
46
+ 'list_jobs',
47
+ params={
48
+ 'ids_ex': action.job_id,
49
+ 'category': 'neural_net',
50
+ 'job__action': 'tune',
51
+ 'is_active': True,
52
+ 'params': f'name:{value}',
53
+ },
54
+ )
55
+ assert not job_exists, '존재하는 튜닝 작업 이름입니다.'
56
+ except ClientError:
57
+ raise PydanticCustomError('client_error', '')
58
+ return value
59
+
60
+
61
+ @register_action
62
+ class TuneAction(TrainAction):
63
+ """
64
+ **Must read** Important notes before using Tune:
65
+
66
+ 1. Path to the model output (which is the return value of your train function)
67
+ should be set to the checkpoint_output attribute of the run object **before**
68
+ starting the training.
69
+ 2. Before exiting the training function, report the results to Tune.
70
+ 3. When using own tune.py, take note of the difference in the order of parameters.
71
+ tune() function starts with hyperparameter, run, dataset, checkpoint, **kwargs
72
+ whereas the train() function starts with run, dataset, hyperparameter, checkpoint, **kwargs.
73
+ ----
74
+ 1)
75
+ Set the output path for the checkpoint to export best model
76
+
77
+ output_path = Path('path/to/your/weights')
78
+ run.checkpoint_output = str(output_path)
79
+
80
+ 2)
81
+ Before exiting the training function, report the results to Tune.
82
+ The results_dict should contain the metrics you want to report.
83
+
84
+ Example: (In train function)
85
+ results_dict = {
86
+ "accuracy": accuracy,
87
+ "loss": loss,
88
+ # Add other metrics as needed
89
+ }
90
+ if hasattr(self.dm_run, 'is_tune') and self.dm_run.is_tune:
91
+ tune.report(results_dict, checkpoint=tune.Checkpoint.from_directory(self.dm_run.checkpoint_output))
92
+
93
+
94
+ 3)
95
+ tune() function takes hyperparameter, run, dataset, checkpoint, **kwargs in that order
96
+ whereas train() function takes run, dataset, hyperparameter, checkpoint, **kwargs in that order.
97
+
98
+ --------------------------------------------------------------------------------------------------------
99
+
100
+ **중요** Tune 사용 전 반드시 읽어야 할 사항들
101
+
102
+ 1. 본 플러그인의 train 함수에서, 학습을 진행하기 코드 전에
103
+ 결과 모델 파일의 경로(train함수의 리턴 값)을 checkpoint_output 속성에 설정해야 합니다.
104
+ 2. 학습이 종료되기 전에, 결과를 Tune에 보고해야 합니다.
105
+ 3. 플러그인에서 tune.py를 직접 생성해서 사용할 시, 매개변수의 순서가 다릅니다.
106
+
107
+ ----
108
+ 1)
109
+ 체크포인트를 설정할 경로를 지정합니다.
110
+ output_path = Path('path/to/your/weights')
111
+ run.checkpoint_output = str(output_path)
112
+
113
+ 2)
114
+ 학습이 종료되기 전에, 결과를 Tune에 보고합니다.
115
+ results_dict = {
116
+ "accuracy": accuracy,
117
+ "loss": loss,
118
+ # 필요한 다른 메트릭 추가
119
+ }
120
+ if hasattr(self.dm_run, 'is_tune') and self.dm_run.is_tune:
121
+ tune.report(results_dict, checkpoint=tune.Checkpoint.from_directory(self.dm_run.checkpoint_output))
122
+
123
+ 3)
124
+ tune() 함수는 hyperparameter, run, dataset, checkpoint, **kwargs 순서이고
125
+ train() 함수는 run, dataset, hyperparameter, checkpoint, **kwargs 순서입니다.
126
+ """
127
+
128
+ name = 'tune'
129
+ category = PluginCategory.NEURAL_NET
130
+ method = RunMethod.JOB
131
+ run_class = TuneRun
132
+ params_model = TuneParams
133
+ progress_categories = {
134
+ 'dataset': {
135
+ 'proportion': 5,
136
+ },
137
+ 'trials': {
138
+ 'proportion': 90,
139
+ },
140
+ 'model_upload': {
141
+ 'proportion': 5,
142
+ },
143
+ }
144
+
145
+ def start(self):
146
+ from ray import tune
147
+
148
+ # download dataset
149
+ self.run.log_message('Preparing dataset for hyperparameter tuning.')
150
+ input_dataset = self.get_dataset()
151
+
152
+ # retrieve checkpoint
153
+ checkpoint = None
154
+ if self.params['checkpoint']:
155
+ self.run.log_message('Retrieving checkpoint.')
156
+ checkpoint = self.get_model(self.params['checkpoint'])
157
+
158
+ # train dataset
159
+ self.run.log_message('Starting training for hyperparameter tuning.')
160
+
161
+ # Save num_samples to TuneRun for logging
162
+ self.run.num_samples = self.params['tune_config']['num_samples']
163
+
164
+ entrypoint = self.entrypoint
165
+ if not self._tune_override_exists():
166
+ # entrypoint must be train entrypoint
167
+ def _tune(param_space, run, dataset, checkpoint=None, **kwargs):
168
+ return entrypoint(run, dataset, param_space, checkpoint, **kwargs)
169
+
170
+ entrypoint = _tune
171
+
172
+ trainable = tune.with_parameters(entrypoint, run=self.run, dataset=input_dataset, checkpoint=checkpoint)
173
+ tune_config = self.params['tune_config']
174
+
175
+ hyperparameter = self.params['hyperparameter']
176
+ param_space = self.convert_tune_params(hyperparameter)
177
+ temp_path = tempfile.TemporaryDirectory()
178
+
179
+ tuner = tune.Tuner(
180
+ tune.with_resources(trainable, resources=self.tune_resources),
181
+ tune_config=tune.TuneConfig(**tune_config),
182
+ run_config=tune.RunConfig(
183
+ name=f'synapse_tune_hpo_{self.job_id}',
184
+ log_to_file=('stdout.log', 'stderr.log'),
185
+ storage_path=temp_path.name,
186
+ ),
187
+ param_space=param_space,
188
+ )
189
+ result = tuner.fit()
190
+
191
+ best_result = result.get_best_result()
192
+
193
+ # upload model_data
194
+ self.run.log_message('Registering best model data.')
195
+ self.run.set_progress(0, 1, category='model_upload')
196
+ self.create_model_from_result(best_result)
197
+ self.run.set_progress(1, 1, category='model_upload')
198
+
199
+ self.run.end_log()
200
+
201
+ return {'best_result': best_result.config}
202
+
203
+ @property
204
+ def tune_resources(self):
205
+ resources = {}
206
+ for option in ['num_cpus', 'num_gpus']:
207
+ option_value = self.params.get(option)
208
+ if option_value:
209
+ # Remove the 'num_' prefix and trailing s from the option name
210
+ resources[(lambda s: s[4:-1])(option)] = option_value
211
+ return resources
212
+
213
+ def create_model_from_result(self, result):
214
+ params = copy.deepcopy(self.params)
215
+ configuration_fields = ['hyperparameter']
216
+ configuration = {field: params.pop(field) for field in configuration_fields}
217
+
218
+ with tempfile.TemporaryDirectory() as temp_path:
219
+ archive_path = Path(temp_path, 'archive.zip')
220
+
221
+ # Archive tune results
222
+ # https://docs.ray.io/en/latest/tune/tutorials/tune_get_data_in_and_out.html#getting-data-out-of-tune-using-checkpoints-other-artifacts
223
+ archive(result.path, archive_path)
224
+
225
+ return self.client.create_model({
226
+ 'plugin': self.plugin_release.plugin,
227
+ 'version': self.plugin_release.version,
228
+ 'file': str(archive_path),
229
+ 'configuration': configuration,
230
+ **params,
231
+ })
232
+
233
+ @staticmethod
234
+ def convert_tune_params(param_list):
235
+ """
236
+ Convert YAML hyperparameter configuration to Ray Tune parameter dictionary.
237
+
238
+ Args:
239
+ param_list (list): List of hyperparameter configurations.
240
+
241
+ Returns:
242
+ dict: Ray Tune parameter dictionary
243
+ """
244
+ from ray import tune
245
+
246
+ param_space = {}
247
+
248
+ for param in param_list:
249
+ name = param['name']
250
+ param_type = param['type']
251
+
252
+ if param_type == 'loguniform':
253
+ param_space[name] = tune.loguniform(param['min'], param['max'])
254
+ elif param_type == 'choice':
255
+ param_space[name] = tune.choice(param['options'])
256
+ elif param_type == 'randint':
257
+ param_space[name] = tune.randint(param['min'], param['max'])
258
+ # Add more type handlers as needed
259
+
260
+ return param_space
261
+
262
+ @staticmethod
263
+ def _tune_override_exists(module_path='plugin.tune') -> bool:
264
+ try:
265
+ import_string(module_path)
266
+ return True
267
+ except ImportError:
268
+ return False
@@ -1,3 +1,6 @@
1
+ import json
2
+ from datetime import datetime
3
+ from enum import Enum
1
4
  from typing import Annotated, Dict, List
2
5
 
3
6
  from pydantic import AfterValidator, BaseModel, field_validator
@@ -16,8 +19,69 @@ from synapse_sdk.utils.pydantic.validators import non_blank
16
19
  from synapse_sdk.utils.storage import get_pathlib
17
20
 
18
21
 
22
+ class UploadStatus(str, Enum):
23
+ SUCCESS = 'success'
24
+ FAILED = 'failed'
25
+
26
+
19
27
  class UploadRun(Run):
20
- pass
28
+ class DataFileLog(BaseModel):
29
+ """Data file log model."""
30
+
31
+ data_file_info: str | None
32
+ status: UploadStatus
33
+ created: str
34
+
35
+ class DataUnitLog(BaseModel):
36
+ """Data unit log model."""
37
+
38
+ data_unit_id: int | None
39
+ status: UploadStatus
40
+ created: str
41
+
42
+ class TaskLog(BaseModel):
43
+ """Task log model."""
44
+
45
+ task_id: int | None
46
+ status: UploadStatus
47
+ created: str
48
+
49
+ def log_data_file(self, data_file_info: dict, status: UploadStatus):
50
+ """Upload data_file log.
51
+
52
+ Args:
53
+ data_file_info (dict): The json info of the data file.
54
+ checksum (str): The checksum of the data file.
55
+ status (DataUnitStatus): The status of the data unit.
56
+ """
57
+ now = datetime.now().isoformat()
58
+ self.log(
59
+ 'upload_data_file',
60
+ self.DataFileLog(data_file_info=json.dumps(data_file_info), status=status.value, created=now).model_dump(),
61
+ )
62
+
63
+ def log_data_unit(self, data_unit_id: int, status: UploadStatus):
64
+ """Upload data_unit log.
65
+
66
+ Args:
67
+ data_unit_id (int): The ID of the data unit.
68
+ status (DataUnitStatus): The status of the data unit.
69
+ """
70
+ now = datetime.now().isoformat()
71
+ self.log(
72
+ 'upload_data_unit',
73
+ self.DataUnitLog(data_unit_id=data_unit_id, status=status.value, created=now).model_dump(),
74
+ )
75
+
76
+ def log_task(self, task_id: int, status: UploadStatus):
77
+ """Upload task log.
78
+
79
+ Args:
80
+ task_id (int): The ID of the task.
81
+ status (UploadStatus): The status of the task.
82
+ """
83
+ now = datetime.now().isoformat()
84
+ self.log('upload_task', self.TaskLog(task_id=task_id, status=status.value, created=now).model_dump())
21
85
 
22
86
 
23
87
  class UploadParams(BaseModel):
@@ -108,6 +172,7 @@ class UploadAction(Action):
108
172
  name = 'upload'
109
173
  category = PluginCategory.UPLOAD
110
174
  method = RunMethod.JOB
175
+ run_class = UploadRun
111
176
  progress_categories = {
112
177
  'analyze_collection': {
113
178
  'proportion': 5,
@@ -144,9 +209,7 @@ class UploadAction(Action):
144
209
  uploader = self.get_uploader(pathlib_cwd)
145
210
 
146
211
  # Analyze Collection file specifications to determine the data structure for upload.
147
- self.run.set_progress(0, 1, category='analyze_collection')
148
212
  file_specification_template = self._analyze_collection()
149
- self.run.set_progress(1, 1, category='analyze_collection')
150
213
 
151
214
  # Setup result dict.
152
215
  result = {}
@@ -155,57 +218,48 @@ class UploadAction(Action):
155
218
  organized_files = uploader.handle_upload_files()
156
219
  if not self._validate_organized_files(file_specification_template, organized_files):
157
220
  self.run.log_message('Validate organized files failed.')
221
+ self.run.end_log()
158
222
  return result
159
223
 
160
224
  # Upload files to synapse-backend.
161
225
  organized_files_count = len(organized_files)
162
226
  if not organized_files_count:
163
227
  self.run.log_message('Files not found on the path.', context=Context.WARNING.value)
228
+ self.run.end_log()
164
229
  return result
165
-
166
- self.run.set_progress(0, organized_files_count, category='upload_data_files')
167
- self.run.log_message('Uploading data files...')
168
- result['uploaded_files'] = self._upload_files(organized_files)
169
- self.run.set_progress(organized_files_count, organized_files_count, category='upload_data_files')
170
- self.run.log_message('Upload data files completed.')
230
+ uploaded_files = self._upload_files(organized_files, organized_files_count)
231
+ result['uploaded_files_count'] = len(uploaded_files)
171
232
 
172
233
  # Generate data units for the uploaded data.
173
- upload_result_count = len(result['uploaded_files'])
234
+ upload_result_count = len(uploaded_files)
174
235
  if not upload_result_count:
175
236
  self.run.log_message('No files were uploaded.', context=Context.WARNING.value)
237
+ self.run.end_log()
176
238
  return result
177
-
178
- self.run.set_progress(0, upload_result_count, category='generate_data_units')
179
- generated_data_units = self._generate_data_units(result['uploaded_files'])
180
- result['generated_data_units'] = generated_data_units
181
- self.run.set_progress(upload_result_count, upload_result_count, category='generate_data_units')
239
+ generated_data_units = self._generate_data_units(uploaded_files, upload_result_count)
240
+ result['generated_data_units_count'] = len(generated_data_units)
182
241
 
183
242
  # Setup task with uploaded synapse-backend data units.
184
243
  if not len(generated_data_units):
185
244
  self.run.log_message('No data units were generated.', context=Context.WARNING.value)
245
+ self.run.end_log()
186
246
  return result
187
247
 
188
- self.run.set_progress(0, 1, category='generate_tasks')
189
248
  if self.config['options']['allow_generate_tasks'] and self.params['is_generate_tasks']:
190
- self.run.log_message('Generating tasks with data files...')
191
- self._generate_tasks(generated_data_units)
192
- self.run.log_message('Generating tasks completed')
249
+ generated_tasks = self._generate_tasks(generated_data_units)
250
+ result['generated_tasks_count'] = len(generated_tasks)
193
251
  else:
194
252
  self.run.log_message('Generating tasks process has passed.')
195
253
 
196
- self.run.set_progress(1, 1, category='generate_tasks')
197
-
198
254
  # Generate ground truths for the uploaded data.
199
255
  # TODO: Need to add ground truths generation logic later.
200
- self.run.set_progress(0, 1, category='generate_ground_truths')
201
256
  if self.config['options']['allow_generate_ground_truths'] and self.params['is_generate_ground_truths']:
202
- self.run.log_message('Generating ground truths...')
203
- self._generate_ground_truths()
204
- self.run.log_message('Generating ground truths completed')
257
+ generated_ground_truths = self._generate_ground_truths()
258
+ result['generated_ground_truths_count'] = len(generated_ground_truths)
205
259
  else:
206
260
  self.run.log_message('Generating ground truths process has passed.')
207
- self.run.set_progress(1, 1, category='generate_ground_truths')
208
261
 
262
+ self.run.end_log()
209
263
  return result
210
264
 
211
265
  def _analyze_collection(self) -> Dict:
@@ -214,9 +268,17 @@ class UploadAction(Action):
214
268
  Returns:
215
269
  Dict: The file specifications of the collection.
216
270
  """
271
+
272
+ # Initialize progress
273
+ self.run.set_progress(0, 1, category='analyze_collection')
274
+
217
275
  client = self.run.client
218
276
  collection_id = self.params['collection']
219
277
  collection = client.get_dataset(collection_id)
278
+
279
+ # Finish progress
280
+ self.run.set_progress(1, 1, category='analyze_collection')
281
+
220
282
  return collection['file_specifications']
221
283
 
222
284
  def _validate_organized_files(self, file_specification_template: Dict, organized_files: List) -> bool:
@@ -224,24 +286,35 @@ class UploadAction(Action):
224
286
  validator = FileSpecificationValidator(file_specification_template, organized_files)
225
287
  return validator.validate()
226
288
 
227
- def _upload_files(self, organized_files) -> List:
289
+ def _upload_files(self, organized_files, organized_files_count: int) -> List:
228
290
  """Upload files to synapse-backend.
229
291
 
230
292
  Returns:
231
293
  Dict: The result of the upload.
232
294
  """
295
+ # Initialize progress
296
+ self.run.set_progress(0, organized_files_count, category='upload_data_files')
297
+ self.run.log_message('Uploading data files...')
298
+
233
299
  client = self.run.client
234
300
  collection_id = self.params['collection']
235
301
  upload_result = []
236
302
  organized_files_count = len(organized_files)
237
303
  current_progress = 0
238
304
  for organized_file in organized_files:
239
- upload_result.append(client.upload_data_file(organized_file, collection_id))
305
+ uploaded_data_file = client.upload_data_file(organized_file, collection_id)
306
+ self.run.log_data_file(organized_file, UploadStatus.SUCCESS)
307
+ upload_result.append(uploaded_data_file)
240
308
  self.run.set_progress(current_progress, organized_files_count, category='upload_data_files')
241
309
  current_progress += 1
310
+
311
+ # Finish progress
312
+ self.run.set_progress(organized_files_count, organized_files_count, category='upload_data_files')
313
+ self.run.log_message('Upload data files completed.')
314
+
242
315
  return upload_result
243
316
 
244
- def _generate_data_units(self, uploaded_files: List) -> List:
317
+ def _generate_data_units(self, uploaded_files: List, upload_result_count: int) -> List:
245
318
  """Generate data units for the uploaded data.
246
319
 
247
320
  TODO: make batch size configurable.
@@ -249,23 +322,36 @@ class UploadAction(Action):
249
322
  Returns:
250
323
  Dict: The result of the generate data units process.
251
324
  """
325
+ # Initialize progress
326
+ self.run.set_progress(0, upload_result_count, category='generate_data_units')
327
+
252
328
  client = self.run.client
253
329
 
254
- generation_result = []
330
+ generated_data_units = []
255
331
  current_progress = 0
256
332
  batches = get_batched_list(uploaded_files, 100)
257
333
  batches_count = len(batches)
258
334
  for batch in batches:
259
- generation_result.append(client.create_data_units(batch))
335
+ created_data_units = client.create_data_units(batch)
336
+ generated_data_units.append(created_data_units)
260
337
  self.run.set_progress(current_progress, batches_count, category='generate_data_units')
261
338
  current_progress += 1
262
- return generation_result
339
+ for created_data_unit in created_data_units:
340
+ self.run.log_data_unit(created_data_unit['id'], UploadStatus.SUCCESS)
341
+
342
+ # Finish progress
343
+ self.run.set_progress(upload_result_count, upload_result_count, category='generate_data_units')
263
344
 
264
- def _generate_tasks(self, generated_data_units: List):
345
+ return sum(generated_data_units, [])
346
+
347
+ def _generate_tasks(self, generated_data_units: List) -> List:
265
348
  """Setup task with uploaded synapse-backend data units.
266
349
 
267
350
  TODO: make batch size configurable.
268
351
  """
352
+ # Initialize progress
353
+ self.run.set_progress(0, 1, category='generate_tasks')
354
+ self.run.log_message('Generating tasks with data files...')
269
355
 
270
356
  # Prepare batches for processing
271
357
  client = self.run.client
@@ -273,21 +359,37 @@ class UploadAction(Action):
273
359
  current_progress = 0
274
360
 
275
361
  # Generate tasks
362
+ generated_tasks = []
276
363
  generated_data_units_count = len(generated_data_units)
277
- for data_units in generated_data_units:
364
+ for data_unit in generated_data_units:
278
365
  tasks_data = []
279
- for data_unit in data_units:
280
- task_data = {'project': project_id, 'data_unit': data_unit['id']}
281
- tasks_data.append(task_data)
282
-
366
+ task_data = {'project': project_id, 'data_unit': data_unit['id']}
367
+ tasks_data.append(task_data)
283
368
  if tasks_data:
284
- client.create_tasks(tasks_data)
369
+ created_tasks = client.create_tasks(tasks_data)
370
+ created_task_ids = [created_task['id'] for created_task in created_tasks]
371
+ generated_tasks.append(created_task_ids)
372
+ for created_task_id in created_task_ids:
373
+ self.run.log_task(created_task_id, UploadStatus.SUCCESS)
285
374
 
286
375
  self.run.set_progress(current_progress, generated_data_units_count, category='generate_tasks')
287
376
  current_progress += 1
288
377
 
378
+ # Finish progress
379
+ self.run.log_message('Generating tasks completed')
380
+ self.run.set_progress(1, 1, category='generate_tasks')
381
+
382
+ return sum(generated_tasks, [])
383
+
289
384
  def _generate_ground_truths(self):
290
385
  """Generate ground truths for the uploaded data.
291
386
 
292
387
  TODO: Need to add ground truths generation logic later.
293
388
  """
389
+ # Initialize progress
390
+ self.run.set_progress(0, 1, category='generate_ground_truths')
391
+ self.run.log_message('Generating ground truths...')
392
+
393
+ # Finish progress
394
+ self.run.log_message('Generating ground truths completed')
395
+ self.run.set_progress(1, 1, category='generate_ground_truths')
@@ -45,6 +45,11 @@ class PluginRelease:
45
45
  def package_manager(self):
46
46
  return self.config.get('package_manager', 'pip')
47
47
 
48
+ @cached_property
49
+ def package_manager_options(self):
50
+ options = {'pip': {}, 'uv': {'uv_pip_install_options': []}}
51
+ return options[self.package_manager]
52
+
48
53
  @cached_property
49
54
  def checksum(self):
50
55
  return hash_text(self.code)
@@ -69,7 +74,10 @@ class PluginRelease:
69
74
 
70
75
  warm_up.options(
71
76
  runtime_env={
72
- self.package_manager: ['-r ${RAY_RUNTIME_ENV_CREATE_WORKING_DIR}/requirements.txt'],
77
+ self.package_manager: {
78
+ 'packages': ['-r ${RAY_RUNTIME_ENV_CREATE_WORKING_DIR}/requirements.txt']
79
+ ** self.package_manager_options
80
+ },
73
81
  'working_dir': self.get_url(self.envs['SYNAPSE_PLUGIN_STORAGE']),
74
82
  },
75
83
  scheduling_strategy=strategy,
synapse_sdk/utils/file.py CHANGED
@@ -150,11 +150,12 @@ def calculate_checksum(file_path, prefix=''):
150
150
  return checksum
151
151
 
152
152
 
153
- def archive(input_path, output_path):
153
+ def archive(input_path, output_path, append=False):
154
154
  input_path = Path(input_path)
155
155
  output_path = Path(output_path)
156
156
 
157
- with zipfile.ZipFile(output_path, mode='w', compression=zipfile.ZIP_DEFLATED) as zipf:
157
+ mode = 'a' if append and output_path.exists() else 'w'
158
+ with zipfile.ZipFile(output_path, mode=mode, compression=zipfile.ZIP_DEFLATED) as zipf:
158
159
  if input_path.is_file():
159
160
  zipf.write(input_path, input_path.name)
160
161
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: synapse-sdk
3
- Version: 1.0.0a36
3
+ Version: 1.0.0a38
4
4
  Summary: synapse sdk
5
5
  Author-email: datamaker <developer@datamaker.io>
6
6
  License: MIT
@@ -21,7 +21,7 @@ synapse_sdk/cli/plugin/create.py,sha256=HpYTpohV1NbSrULaVUlc4jWLWznPrx7glgydTM3s
21
21
  synapse_sdk/cli/plugin/publish.py,sha256=sIl1wiuSC3lAUpE3rOF4UDKDy2G5EVLlelMjk2aT05g,1221
22
22
  synapse_sdk/cli/plugin/run.py,sha256=xz5LRm3zh8Y9DMjw5FFRFVRWSCWtYfZJskfCmrPikaQ,2598
23
23
  synapse_sdk/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- synapse_sdk/clients/base.py,sha256=RaQN_41NxFKtVsVKB7I7hdKJAR8ScuyCoK_x1D-U0c0,6399
24
+ synapse_sdk/clients/base.py,sha256=emtffTGGtPOSz6tT_NCL9cGE7ZEOv9pTHyc9BRfoe2s,6462
25
25
  synapse_sdk/clients/exceptions.py,sha256=ylv7x10eOp4aA3a48jwonnvqvkiYwzJYXjkVkRTAjwk,220
26
26
  synapse_sdk/clients/utils.py,sha256=8pPJTdzHiRPSbZMoQYHAgR2BAMO6u_R_jMV6a2p34iQ,392
27
27
  synapse_sdk/clients/agent/__init__.py,sha256=Pz8_iTbIbnb7ywGJ3feqoZVmO2I3mEbwpWsISIxh0BU,1968
@@ -44,11 +44,11 @@ synapse_sdk/clients/validators/collections.py,sha256=LtnwvutsScubOUcZ2reGHLCzseX
44
44
  synapse_sdk/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
45
  synapse_sdk/plugins/enums.py,sha256=ibixwqA3sCNSriG1jAtL54JQc_Zwo3MufwYUqGhVncc,523
46
46
  synapse_sdk/plugins/exceptions.py,sha256=Qs7qODp_RRLO9y2otU2T4ryj5LFwIZODvSIXkAh91u0,691
47
- synapse_sdk/plugins/models.py,sha256=njTQIT-c2d7TsqAN__q1aoYm8hLEPC7ludTj665iN-4,4148
47
+ synapse_sdk/plugins/models.py,sha256=FI_6Hr4q4hGj-GwHjucOfX4HYsUpraGd2yeuy4FjjC0,4438
48
48
  synapse_sdk/plugins/upload.py,sha256=VJOotYMayylOH0lNoAGeGHRkLdhP7jnC_A0rFQMvQpQ,3228
49
49
  synapse_sdk/plugins/utils.py,sha256=4_K6jIl0WrsXOEhFp94faMOriSsddOhIiaXcawYYUUA,3300
50
50
  synapse_sdk/plugins/categories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
- synapse_sdk/plugins/categories/base.py,sha256=cfk7mgidCBnldZ8nCo_Nq_qzP9C1LJYgSXWtYT6hJDw,10046
51
+ synapse_sdk/plugins/categories/base.py,sha256=ATI1VjBWm2rimSkNiiCjfZn7FO4x2oltmh81pJJGL0w,10389
52
52
  synapse_sdk/plugins/categories/decorators.py,sha256=Gw6T-UHwpCKrSt596X-g2sZbY_Z1zbbogowClj7Pr5Q,518
53
53
  synapse_sdk/plugins/categories/registry.py,sha256=KdQR8SUlLT-3kgYzDNWawS1uJnAhrcw2j4zFaTpilRs,636
54
54
  synapse_sdk/plugins/categories/templates.py,sha256=FF5FerhkZMeW1YcKLY5cylC0SkWSYdJODA_Qcm4OGYQ,887
@@ -59,17 +59,19 @@ synapse_sdk/plugins/categories/data_validation/templates/config.yaml,sha256=Hijb
59
59
  synapse_sdk/plugins/categories/data_validation/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
60
  synapse_sdk/plugins/categories/data_validation/templates/plugin/validation.py,sha256=90I5boUpEXvO3mEuKKBs528ls2A4h8Iw4ReOID2h00Y,139
61
61
  synapse_sdk/plugins/categories/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
+ synapse_sdk/plugins/categories/export/enums.py,sha256=gtyngvQ1DKkos9iKGcbecwTVQQ6sDwbrBPSGPNb5Am0,127
62
63
  synapse_sdk/plugins/categories/export/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
- synapse_sdk/plugins/categories/export/actions/export.py,sha256=DN1z2zxjzkgAaalEVWyJGXIVkpb4QeppODm1S7XDVic,7693
64
+ synapse_sdk/plugins/categories/export/actions/export.py,sha256=m-V-PsCmcF1rhiv8Rf9s3xaqJgpjC_kwITv5od5epGQ,9918
64
65
  synapse_sdk/plugins/categories/export/templates/config.yaml,sha256=N7YmnFROb3s3M35SA9nmabyzoSb5O2t2TRPicwFNN2o,56
65
66
  synapse_sdk/plugins/categories/export/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
66
- synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=nbjvgFVQpPN5Lo1UnPL5p__BYeejMZLMZ4RT_yd7vJU,4561
67
+ synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=39XLGo8ui5FscbwZyX3JwmrJqGGvOYrY3FMYDKXwTOQ,5192
67
68
  synapse_sdk/plugins/categories/neural_net/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
68
69
  synapse_sdk/plugins/categories/neural_net/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
70
  synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=oetIwZoee5vxriPX3r1onmxgwojUyaRTlnBIdaQ1zk8,3895
70
71
  synapse_sdk/plugins/categories/neural_net/actions/inference.py,sha256=0a655ELqNVjPFZTJDiw4EUdcMCPGveUEKyoYqpwMFBU,1019
71
72
  synapse_sdk/plugins/categories/neural_net/actions/test.py,sha256=JY25eg-Fo6WbgtMkGoo_qNqoaZkp3AQNEypJmeGzEog,320
72
73
  synapse_sdk/plugins/categories/neural_net/actions/train.py,sha256=kve6iTCg2kUeavMQTR2JFuoYDu-QWZFFlB58ZICQtdM,5406
74
+ synapse_sdk/plugins/categories/neural_net/actions/tune.py,sha256=XJczlLDF8FOJXA-7TXNZa3npWhMsT0wGqQwYW3w5TDo,9475
73
75
  synapse_sdk/plugins/categories/neural_net/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
76
  synapse_sdk/plugins/categories/neural_net/base/inference.py,sha256=R5DASI6-5vzsjDOYxqeGGMBjnav5qHF4hNJT8zNUR3I,1097
75
77
  synapse_sdk/plugins/categories/neural_net/templates/config.yaml,sha256=uZVuXjIfsd_pTaSKptHeHn1TN2FIiLrvvpkClToc6po,596
@@ -97,7 +99,7 @@ synapse_sdk/plugins/categories/smart_tool/templates/plugin/__init__.py,sha256=47
97
99
  synapse_sdk/plugins/categories/smart_tool/templates/plugin/auto_label.py,sha256=eevNg0nOcYFR4z_L_R-sCvVOYoLWSAH1jwDkAf3YCjY,320
98
100
  synapse_sdk/plugins/categories/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
101
  synapse_sdk/plugins/categories/upload/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
100
- synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=ry5Whogy5opZ7U_G6hQlbej8ufs5BS_VaJLY648vpF4,11347
102
+ synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=9DIH4Aw70LxDpfhrpD0MfncE1m9oj-v52FpaChkVEnA,14755
101
103
  synapse_sdk/plugins/categories/upload/templates/config.yaml,sha256=0PhB2uD-9ufavZs7EiF6xj4aBgZuif9mFFGGfzG7HuY,147
102
104
  synapse_sdk/plugins/categories/upload/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
103
105
  synapse_sdk/plugins/categories/upload/templates/plugin/upload.py,sha256=dnK8gy33GjG5ettayawDJv1gM3xCm1K6lM-PfeeTjQw,1163
@@ -116,7 +118,7 @@ synapse_sdk/shared/enums.py,sha256=WMZPag9deVF7VCXaQkLk7ly_uX1KwbNzRx9TdvgaeFE,1
116
118
  synapse_sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
117
119
  synapse_sdk/utils/dataset.py,sha256=zWTzFmv589izFr62BDuApi3r5FpTsdm-5AmriC0AEdM,1865
118
120
  synapse_sdk/utils/debug.py,sha256=F7JlUwYjTFZAMRbBqKm6hxOIz-_IXYA8lBInOS4jbS4,100
119
- synapse_sdk/utils/file.py,sha256=zP8eOZifGiYP9PyC4ivQwxs-ljbtXRtbWN4yOjZF6tc,6658
121
+ synapse_sdk/utils/file.py,sha256=Qb5FihoX1J0wsF2UAckc0d0c3IMHn0NrX9Vt3cXAwt4,6732
120
122
  synapse_sdk/utils/module_loading.py,sha256=chHpU-BZjtYaTBD_q0T7LcKWtqKvYBS4L0lPlKkoMQ8,1020
121
123
  synapse_sdk/utils/network.py,sha256=wg-oFM0gKK5REqIUO8d-x9yXJfqbnkSbbF0_qyxpwz4,412
122
124
  synapse_sdk/utils/string.py,sha256=rEwuZ9SAaZLcQ8TYiwNKr1h2u4CfnrQx7SUL8NWmChg,216
@@ -130,9 +132,9 @@ synapse_sdk/utils/storage/providers/__init__.py,sha256=x7RGwZryT2FpVxS7fGWryRVpq
130
132
  synapse_sdk/utils/storage/providers/gcp.py,sha256=i2BQCu1Kej1If9SuNr2_lEyTcr5M_ncGITZrL0u5wEA,363
131
133
  synapse_sdk/utils/storage/providers/s3.py,sha256=W94rQvhGRXti3R4mYP7gmU5pcyCQpGFIBLvxxqLVdRM,2231
132
134
  synapse_sdk/utils/storage/providers/sftp.py,sha256=_8s9hf0JXIO21gvm-JVS00FbLsbtvly4c-ETLRax68A,1426
133
- synapse_sdk-1.0.0a36.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
134
- synapse_sdk-1.0.0a36.dist-info/METADATA,sha256=q_dbloQbRO4v_kWdEH5BFmftNs0ocAHu5agv_cOvJA8,1160
135
- synapse_sdk-1.0.0a36.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
136
- synapse_sdk-1.0.0a36.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
137
- synapse_sdk-1.0.0a36.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
138
- synapse_sdk-1.0.0a36.dist-info/RECORD,,
135
+ synapse_sdk-1.0.0a38.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
136
+ synapse_sdk-1.0.0a38.dist-info/METADATA,sha256=GxLFIbbXbVYUjusRE7MAZefelK_XNYz0oGTOOCNuybU,1160
137
+ synapse_sdk-1.0.0a38.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
138
+ synapse_sdk-1.0.0a38.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
139
+ synapse_sdk-1.0.0a38.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
140
+ synapse_sdk-1.0.0a38.dist-info/RECORD,,