synapse-sdk 1.0.0a31__py3-none-any.whl → 1.0.0a33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

Files changed (28) hide show
  1. synapse_sdk/clients/backend/__init__.py +2 -0
  2. synapse_sdk/clients/backend/annotation.py +4 -4
  3. synapse_sdk/clients/backend/dataset.py +57 -5
  4. synapse_sdk/clients/backend/hitl.py +17 -0
  5. synapse_sdk/clients/backend/integration.py +3 -1
  6. synapse_sdk/clients/backend/models.py +44 -0
  7. synapse_sdk/clients/base.py +61 -16
  8. synapse_sdk/plugins/categories/base.py +40 -0
  9. synapse_sdk/plugins/categories/export/actions/export.py +168 -28
  10. synapse_sdk/plugins/categories/export/templates/plugin/export.py +43 -33
  11. synapse_sdk/plugins/categories/smart_tool/templates/config.yaml +2 -0
  12. synapse_sdk/plugins/categories/upload/actions/upload.py +292 -0
  13. synapse_sdk/plugins/categories/upload/templates/config.yaml +6 -0
  14. synapse_sdk/plugins/categories/upload/templates/plugin/__init__.py +0 -0
  15. synapse_sdk/plugins/categories/upload/templates/plugin/upload.py +44 -0
  16. synapse_sdk/plugins/enums.py +3 -1
  17. synapse_sdk/plugins/models.py +16 -0
  18. synapse_sdk/utils/storage/__init__.py +20 -2
  19. {synapse_sdk-1.0.0a31.dist-info → synapse_sdk-1.0.0a33.dist-info}/METADATA +3 -2
  20. {synapse_sdk-1.0.0a31.dist-info → synapse_sdk-1.0.0a33.dist-info}/RECORD +26 -22
  21. {synapse_sdk-1.0.0a31.dist-info → synapse_sdk-1.0.0a33.dist-info}/WHEEL +1 -1
  22. synapse_sdk/plugins/categories/export/actions/utils.py +0 -5
  23. synapse_sdk/plugins/categories/import/actions/import.py +0 -10
  24. /synapse_sdk/plugins/categories/{import → upload}/__init__.py +0 -0
  25. /synapse_sdk/plugins/categories/{import → upload}/actions/__init__.py +0 -0
  26. {synapse_sdk-1.0.0a31.dist-info → synapse_sdk-1.0.0a33.dist-info}/entry_points.txt +0 -0
  27. {synapse_sdk-1.0.0a31.dist-info → synapse_sdk-1.0.0a33.dist-info/licenses}/LICENSE +0 -0
  28. {synapse_sdk-1.0.0a31.dist-info → synapse_sdk-1.0.0a33.dist-info}/top_level.txt +0 -0
@@ -1,20 +1,19 @@
1
1
  import json
2
+ from pathlib import Path
2
3
 
3
4
  import requests
4
5
 
5
- from synapse_sdk.plugins.categories.export.actions.utils import get_original_file_path
6
6
 
7
-
8
- def export(run, input_dataset, path_root, **params):
7
+ def export(run, export_items, path_root, **params):
9
8
  """Executes the export task.
10
9
 
11
10
  Args:
12
11
  run : Execution object
13
- input_dataset (generator):
12
+ export_items (generator):
14
13
  - data (dict): dm_schema_data information.
15
14
  - files (dict): File information. Includes file URL, original file path, metadata, etc.
16
15
  - id (int): ground_truth ID
17
- path_root : Save path
16
+ path_root : pathlib object, the path to export
18
17
  **params: Additional parameters
19
18
 
20
19
  Returns:
@@ -24,10 +23,11 @@ def export(run, input_dataset, path_root, **params):
24
23
  path_root.mkdir(parents=True, exist_ok=True)
25
24
  run.log_message('Starting export process.')
26
25
 
27
- # results: Contains all information fetched through the list API.
28
- results = params.get('results', [])
26
+ # results contains all information fetched through the list API.
27
+ # example:
28
+ # params.get('results', [])
29
29
 
30
- save_original_file = params.get('save_original_file')
30
+ save_original_file_flag = params.get('save_original_file')
31
31
  errors_json_file_list = []
32
32
  errors_original_file_list = []
33
33
 
@@ -39,21 +39,28 @@ def export(run, input_dataset, path_root, **params):
39
39
  origin_files_output_path = path_root / 'origin_files'
40
40
  origin_files_output_path.mkdir(parents=True, exist_ok=True)
41
41
 
42
- total = len(results)
43
- for no, input_data in enumerate(input_dataset):
42
+ total = params['count']
43
+ # progress init
44
+ run.set_progress(0, total, category='dataset_conversion')
45
+ for no, export_item in enumerate(export_items, start=1):
44
46
  run.set_progress(no, total, category='dataset_conversion')
45
- preprocessed_data = before_convert(input_data)
47
+ if no == 1:
48
+ run.log_message('Converting dataset.')
49
+ preprocessed_data = before_convert(export_item)
46
50
  converted_data = convert_data(preprocessed_data)
47
51
  final_data = after_convert(converted_data)
48
52
 
49
53
  # Call if original file extraction is needed
50
- if save_original_file:
54
+ if save_original_file_flag:
55
+ if no == 1:
56
+ run.log_message('Saving original file.')
51
57
  save_original_file(final_data, origin_files_output_path, errors_original_file_list)
52
58
 
53
59
  # Extract data as JSON files
60
+ if no == 1:
61
+ run.log_message('Saving json file.')
54
62
  save_as_json(final_data, json_output_path, errors_json_file_list)
55
63
 
56
- run.log_message('Saving converted dataset.')
57
64
  run.end_log()
58
65
 
59
66
  # Save error list files
@@ -62,7 +69,7 @@ def export(run, input_dataset, path_root, **params):
62
69
  with (path_root / 'error_file_list.json').open('w', encoding='utf-8') as f:
63
70
  json.dump(export_error_file, f, indent=4, ensure_ascii=False)
64
71
 
65
- return {'export_path': path_root}
72
+ return {'export_path': str(path_root)}
66
73
 
67
74
 
68
75
  def convert_data(data):
@@ -80,19 +87,29 @@ def after_convert(data):
80
87
  return data
81
88
 
82
89
 
83
- def save_original_file(result, base_path, error_file_list):
84
- """Saves the original file.
90
+ def get_original_file_pathlib(files):
91
+ """Retrieve the original file path from the given file information.
85
92
 
86
93
  Args:
87
- result (dict): Result data
88
- base_path (Path): Save path
89
- error_file_list (list): List of error files
94
+ files (dict): A dictionary containing file information, including file URL,
95
+ original file path, metadata, etc.
90
96
 
91
97
  Returns:
92
- base_path (str): Save path
98
+ pathlib.Path: The original file path extracted from the metadata.
99
+ """
100
+ return Path(files['meta']['path_original'])
101
+
102
+
103
+ def save_original_file(result, base_path, error_file_list):
104
+ """Saves the original file.
105
+
106
+ Args:
107
+ result (dict): API response data containing file information.
108
+ base_path (Path): The directory where the file will be saved.
109
+ error_file_list (list): A list to store error files.
93
110
  """
94
- file_url = next(iter(result['files'].values()))['url']
95
- file_name = get_original_file_path(result['files']).name
111
+ file_url = result['files']['url']
112
+ file_name = get_original_file_pathlib(result['files']).name
96
113
  response = requests.get(file_url)
97
114
  try:
98
115
  with (base_path / file_name).open('wb') as file:
@@ -100,27 +117,20 @@ def save_original_file(result, base_path, error_file_list):
100
117
  except Exception as e:
101
118
  error_file_list.append([file_name, str(e)])
102
119
 
103
- return base_path
104
-
105
120
 
106
121
  def save_as_json(result, base_path, error_file_list):
107
122
  """Saves the data as a JSON file.
108
123
 
109
124
  Args:
110
- result (dict): Result data
111
- base_path (Path): Save path
112
- error_file_list (list): List of error files
113
-
114
- Returns:
115
- base_path (str): Save path
125
+ result (dict): API response data containing file information.
126
+ base_path (Path): The directory where the file will be saved.
127
+ error_file_list (list): A list to store error files.
116
128
  """
117
129
  # Default save file name: original file name
118
- file_name = get_original_file_path(result['files']).stem
130
+ file_name = get_original_file_pathlib(result['files']).stem
119
131
  json_data = result['data']
120
132
  try:
121
133
  with (base_path / f'{file_name}.json').open('w', encoding='utf-8') as f:
122
134
  json.dump(json_data, f, indent=4, ensure_ascii=False)
123
135
  except Exception as e:
124
136
  error_file_list.append([f'{file_name}.json', str(e)])
125
-
126
- return base_path
@@ -6,3 +6,5 @@ actions:
6
6
  entrypoint: plugin.auto_label.MyAutoLabel
7
7
  model:
8
8
  neural_net__plugin__code: sam2
9
+ ui_schema: |
10
+ Dumped FormKit Schema for auto_label action
@@ -0,0 +1,292 @@
1
+ from enum import Enum
2
+ from typing import Annotated, Dict, List
3
+
4
+ from pydantic import AfterValidator, BaseModel, field_validator
5
+ from pydantic_core import PydanticCustomError
6
+
7
+ from synapse_sdk.clients.exceptions import ClientError
8
+ from synapse_sdk.clients.utils import get_batched_list
9
+ from synapse_sdk.i18n import gettext as _
10
+ from synapse_sdk.plugins.categories.base import Action
11
+ from synapse_sdk.plugins.categories.decorators import register_action
12
+ from synapse_sdk.plugins.enums import PluginCategory, RunMethod
13
+ from synapse_sdk.plugins.models import Run
14
+ from synapse_sdk.shared.enums import Context
15
+ from synapse_sdk.utils.pydantic.validators import non_blank
16
+ from synapse_sdk.utils.storage import get_pathlib
17
+
18
+
19
+ class UploadRun(Run):
20
+ pass
21
+
22
+
23
+ class UploadParams(BaseModel):
24
+ """Upload action parameters.
25
+
26
+ Args:
27
+ name (str): The name of the action.
28
+ description (str | None): The description of the action.
29
+ checkpoint (int | None): The checkpoint of the action.
30
+ path (str): The path of the action.
31
+ storage (int): The storage of the action.
32
+ collection (int): The collection of the action.
33
+ project (int | None): The project of the action.
34
+ is_generate_tasks (bool): The flag to generate tasks.
35
+ is_generate_ground_truths (bool): The flag to generate ground truths
36
+ """
37
+
38
+ name: Annotated[str, AfterValidator(non_blank)]
39
+ description: str | None
40
+ path: str
41
+ storage: int
42
+ collection: int
43
+ project: int | None
44
+ is_generate_tasks: bool = False
45
+ is_generate_ground_truths: bool = False
46
+
47
+ @field_validator('storage', mode='before')
48
+ @classmethod
49
+ def check_storage_exists(cls, value: str, info) -> str:
50
+ """Validate synapse-backend storage exists.
51
+
52
+ TODO: Need to define validation method naming convention.
53
+ TODO: Need to make validation method reusable.
54
+ """
55
+ action = info.context['action']
56
+ client = action.client
57
+ try:
58
+ client.get_storage(value)
59
+ except ClientError:
60
+ raise PydanticCustomError('client_error', _('Error occurred while checking storage exists.'))
61
+ return value
62
+
63
+ @field_validator('collection', mode='before')
64
+ @classmethod
65
+ def check_collection_exists(cls, value: str, info) -> str:
66
+ """Validate synapse-backend collection exists."""
67
+ action = info.context['action']
68
+ client = action.client
69
+ try:
70
+ client.get_dataset(value)
71
+ except ClientError:
72
+ raise PydanticCustomError('client_error', _('Error occurred while checking collection exists.'))
73
+ return value
74
+
75
+ @field_validator('project', mode='before')
76
+ @classmethod
77
+ def check_project_exists(cls, value: str, info) -> str:
78
+ """Validate synapse-backend project exists."""
79
+ if not value:
80
+ return value
81
+
82
+ action = info.context['action']
83
+ client = action.client
84
+ try:
85
+ client.get_project(value)
86
+ except ClientError:
87
+ raise PydanticCustomError('client_error', _('Error occurred while checking project exists.'))
88
+ return value
89
+
90
+
91
+ @register_action
92
+ class UploadAction(Action):
93
+ """Upload action class.
94
+
95
+ Attrs:
96
+ name (str): The name of the action.
97
+ category (PluginCategory): The category of the action.
98
+ method (RunMethod): The method to run of the action.
99
+
100
+ Progress Categories:
101
+ analyze_collection: The progress category for the analyze collection process.
102
+ data_file_upload: The progress category for the upload process.
103
+ generate_data_units: The progress category for the generate data units process.
104
+ generate_tasks: The progress category for the generate tasks process.
105
+ generate_ground_truths: The progress category for the generate ground truths process.
106
+ """
107
+
108
+ name = 'upload'
109
+ category = PluginCategory.UPLOAD
110
+ method = RunMethod.JOB
111
+ progress_categories = {
112
+ 'analyze_collection': {
113
+ 'proportion': 5,
114
+ },
115
+ 'upload_data_files': {
116
+ 'proportion': 35,
117
+ },
118
+ 'generate_data_units': {
119
+ 'proportion': 20,
120
+ },
121
+ 'generate_tasks': {
122
+ 'proportion': 20,
123
+ },
124
+ 'generate_ground_truths': {
125
+ 'proportion': 20,
126
+ },
127
+ }
128
+
129
+ def get_uploader(self, path):
130
+ """Get uploader from entrypoint."""
131
+ return self.entrypoint(self.run, path)
132
+
133
+ def start(self) -> Dict:
134
+ """Start upload process.
135
+
136
+ Returns:
137
+ Dict: The result of the upload process.
138
+ """
139
+ # Setup path object with path and storage.
140
+ storage = self.client.get_storage(self.params['storage'])
141
+ pathlib_cwd = get_pathlib(storage, self.params['path'])
142
+
143
+ # Initialize uploader.
144
+ uploader = self.get_uploader(pathlib_cwd)
145
+
146
+ # Analyze Collection file specifications to determine the data structure for upload.
147
+ self.run.set_progress(0, 1, category='analyze_collection')
148
+ file_specification_skeleton = self._analyze_collection()
149
+ self.run.set_progress(1, 1, category='analyze_collection')
150
+
151
+ # Setup result dict.
152
+ result = {}
153
+
154
+ # Organize data according to Collection file specification structure.
155
+ organized_files = uploader.handle_upload_files()
156
+ if not self._validate_organized_files(file_specification_skeleton, organized_files):
157
+ self.run.log_message('Validate organized files failed.')
158
+ return result
159
+
160
+ # Upload files to synapse-backend.
161
+ organized_files_count = len(organized_files)
162
+ if not organized_files_count:
163
+ self.run.log_message('Files not found on the path.', context=Context.WARNING.value)
164
+ return result
165
+
166
+ self.run.set_progress(0, organized_files_count, category='upload_data_files')
167
+ self.run.log_message('Uploading data files...')
168
+ result['uploaded_files'] = self._upload_files(organized_files)
169
+ self.run.set_progress(organized_files_count, organized_files_count, category='upload_data_files')
170
+ self.run.log_message('Upload data files completed.')
171
+
172
+ # Generate data units for the uploaded data.
173
+ upload_result_count = len(result['uploaded_files'])
174
+ if not upload_result_count:
175
+ self.run.log_message('No files were uploaded.', context=Context.WARNING.value)
176
+ return result
177
+
178
+ self.run.set_progress(0, upload_result_count, category='generate_data_units')
179
+ generated_data_units = self._generate_data_units(result['uploaded_files'])
180
+ result['generated_data_units'] = generated_data_units
181
+ self.run.set_progress(upload_result_count, upload_result_count, category='generate_data_units')
182
+
183
+ # Setup task with uploaded synapse-backend data units.
184
+ if not len(generated_data_units):
185
+ self.run.log_message('No data units were generated.', context=Context.WARNING.value)
186
+ return result
187
+
188
+ self.run.set_progress(0, 1, category='generate_tasks')
189
+ if self.config['options']['allow_generate_tasks'] and self.params['is_generate_tasks']:
190
+ self.run.log_message('Generating tasks with data files...')
191
+ self._generate_tasks(generated_data_units)
192
+ self.run.log_message('Generating tasks completed')
193
+ else:
194
+ self.run.log_message('Generating tasks process has passed.')
195
+
196
+ self.run.set_progress(1, 1, category='generate_tasks')
197
+
198
+ # Generate ground truths for the uploaded data.
199
+ # TODO: Need to add ground truths generation logic later.
200
+ self.run.set_progress(0, 1, category='generate_ground_truths')
201
+ if self.config['options']['allow_generate_ground_truths'] and self.params['is_generate_ground_truths']:
202
+ self.run.log_message('Generating ground truths...')
203
+ self._generate_ground_truths()
204
+ self.run.log_message('Generating ground truths completed')
205
+ else:
206
+ self.run.log_message('Generating ground truths process has passed.')
207
+ self.run.set_progress(1, 1, category='generate_ground_truths')
208
+
209
+ return result
210
+
211
+ def _analyze_collection(self) -> Dict:
212
+ """Analyze Synapse Collection Specifications.
213
+
214
+ Returns:
215
+ Dict: The file specifications of the collection.
216
+ """
217
+ client = self.run.client
218
+ collection_id = self.params['collection']
219
+ collection = client.get_dataset(collection_id)
220
+ return collection['file_specifications']
221
+
222
+ def _validate_organized_files(self, file_specification_skeleton: Dict, organized_files: List) -> bool:
223
+ """Validate organized files from Uploader."""
224
+ return True
225
+
226
+ def _upload_files(self, organized_files) -> List:
227
+ """Upload files to synapse-backend.
228
+
229
+ Returns:
230
+ Dict: The result of the upload.
231
+ """
232
+ client = self.run.client
233
+ collection_id = self.params['collection']
234
+ upload_result = []
235
+ organized_files_count = len(organized_files)
236
+ current_progress = 0
237
+ for organized_file in organized_files:
238
+ upload_result.append(client.upload_data_file(organized_file, collection_id))
239
+ self.run.set_progress(current_progress, organized_files_count, category='upload_data_files')
240
+ current_progress += 1
241
+ return upload_result
242
+
243
+ def _generate_data_units(self, uploaded_files: List) -> List:
244
+ """Generate data units for the uploaded data.
245
+
246
+ TODO: make batch size configurable.
247
+
248
+ Returns:
249
+ Dict: The result of the generate data units process.
250
+ """
251
+ client = self.run.client
252
+
253
+ generation_result = []
254
+ current_progress = 0
255
+ batches = get_batched_list(uploaded_files, 100)
256
+ batches_count = len(batches)
257
+ for batch in batches:
258
+ generation_result.append(client.create_data_units(batch))
259
+ self.run.set_progress(current_progress, batches_count, category='generate_data_units')
260
+ current_progress += 1
261
+ return generation_result
262
+
263
+ def _generate_tasks(self, generated_data_units: List):
264
+ """Setup task with uploaded synapse-backend data units.
265
+
266
+ TODO: make batch size configurable.
267
+ """
268
+
269
+ # Prepare batches for processing
270
+ client = self.run.client
271
+ project_id = self.params['project']
272
+ current_progress = 0
273
+
274
+ # Generate tasks
275
+ generated_data_units_count = len(generated_data_units)
276
+ for data_units in generated_data_units:
277
+ tasks_data = []
278
+ for data_unit in data_units:
279
+ task_data = {'project': project_id, 'data_unit': data_unit['id']}
280
+ tasks_data.append(task_data)
281
+
282
+ if tasks_data:
283
+ client.create_tasks(tasks_data)
284
+
285
+ self.run.set_progress(current_progress, generated_data_units_count, category='generate_tasks')
286
+ current_progress += 1
287
+
288
+ def _generate_ground_truths(self):
289
+ """Generate ground truths for the uploaded data.
290
+
291
+ TODO: Need to add ground truths generation logic later.
292
+ """
@@ -0,0 +1,6 @@
1
+ actions:
2
+ upload:
3
+ entrypoint: plugin.upload.Uploader
4
+ options:
5
+ allow_generate_tasks: false
6
+ allow_generate_ground_truths: false
@@ -0,0 +1,44 @@
1
+ from pathlib import Path
2
+ from typing import List
3
+
4
+
5
+ class Uploader:
6
+ """Plugin upload action class.
7
+
8
+ * Organize, upload, setup task, generate ground truths for the uploaded data.
9
+ """
10
+
11
+ def __init__(self, run, path: Path, *args, **kwargs):
12
+ """Initialize the plugin upload action class.
13
+
14
+ Args:
15
+ run: Plugin run object.
16
+ path: pathlib object by upload target destination path.
17
+ """
18
+ self.run = run
19
+ self.path = path
20
+
21
+ def handle_upload_files(self) -> List:
22
+ """Handle upload files.
23
+
24
+ * Organize data according to collection file specification structure.
25
+ * Structure files according to the file specification of the target collection.
26
+
27
+ Returns:
28
+ List: List of dictionaries containing 'files' and 'meta'.
29
+
30
+ Examples:
31
+ [
32
+ {
33
+ "files": {
34
+ 'image_1': image_1_pathlib_object,
35
+ 'image_2': image_2_pathlib_object,
36
+ 'meta_1': meta_1_pathlib_object,
37
+ },
38
+ "meta": {
39
+ "key": "value"
40
+ }
41
+ }
42
+ ]
43
+ """
44
+ return []
@@ -2,6 +2,8 @@ from enum import Enum
2
2
 
3
3
 
4
4
  class RunMethod(Enum):
5
+ """Plugin Execution Methods."""
6
+
5
7
  JOB = 'job'
6
8
  TASK = 'task'
7
9
  RESTAPI = 'restapi'
@@ -10,7 +12,7 @@ class RunMethod(Enum):
10
12
  class PluginCategory(Enum):
11
13
  NEURAL_NET = 'neural_net'
12
14
  EXPORT = 'export'
13
- IMPORT = 'import'
15
+ UPLOAD = 'upload'
14
16
  SMART_TOOL = 'smart_tool'
15
17
  POST_ANNOTATION = 'post_annotation'
16
18
  PRE_ANNOTATION = 'pre_annotation'
@@ -87,6 +87,15 @@ class PluginRelease:
87
87
 
88
88
 
89
89
  class Run:
90
+ """Run class for manage plugin run istance.
91
+
92
+ Attrs:
93
+ job_id: plugin run job id
94
+ context: plugin run context
95
+ client: backend client for communicate with backend
96
+ logger: logger for log plugin run events
97
+ """
98
+
90
99
  logger = None
91
100
  job_id = None
92
101
  context = None
@@ -111,6 +120,13 @@ class Run:
111
120
  self.logger = ConsoleLogger(**kwargs)
112
121
 
113
122
  def set_progress(self, current, total, category=''):
123
+ """Set progress for plugin run.
124
+
125
+ Args:
126
+ current: current progress value
127
+ total: total progress value
128
+ category: progress category
129
+ """
114
130
  self.logger.set_progress(current, total, category)
115
131
 
116
132
  def log(self, event, data, file=None):
@@ -1,3 +1,4 @@
1
+ from pathlib import Path
1
2
  from urllib.parse import urlparse
2
3
 
3
4
  from synapse_sdk.i18n import gettext as _
@@ -5,16 +6,33 @@ from synapse_sdk.utils.storage.registry import STORAGE_PROVIDERS
5
6
 
6
7
 
7
8
  def get_storage(connection_param: str | dict):
9
+ """Get storage class with connection param.
10
+
11
+ Args:
12
+ connection_param (str | dict): The connection param for the Storage provider.
13
+
14
+ Returns:
15
+ BaseStorage: The storage class object with connection param.
16
+ """
8
17
  storage_scheme = None
9
18
  if isinstance(connection_param, dict):
10
19
  storage_scheme = connection_param['provider']
11
20
  else:
12
21
  storage_scheme = urlparse(connection_param).scheme
13
22
 
14
- assert storage_scheme in STORAGE_PROVIDERS.keys(), _('지원하지 않는 저장소입니다.')
23
+ assert storage_scheme in STORAGE_PROVIDERS.keys(), _('Storage provider not supported.')
15
24
  return STORAGE_PROVIDERS[storage_scheme](connection_param)
16
25
 
17
26
 
18
- def get_pathlib(storage_config, path_root):
27
+ def get_pathlib(storage_config: str | dict, path_root: str) -> Path:
28
+ """Get pathlib object with synapse-backend storage config.
29
+
30
+ Args:
31
+ storage_config (str | dict): The storage config by synapse-backend storage api.
32
+ path_root (str): The path root.
33
+
34
+ Returns:
35
+ pathlib.Path: The pathlib object.
36
+ """
19
37
  storage_class = get_storage(storage_config)
20
38
  return storage_class.get_pathlib(path_root)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: synapse-sdk
3
- Version: 1.0.0a31
3
+ Version: 1.0.0a33
4
4
  Summary: synapse sdk
5
5
  Author-email: datamaker <developer@datamaker.io>
6
6
  License: MIT
@@ -21,6 +21,7 @@ Requires-Dist: universal-pathlib
21
21
  Requires-Dist: fsspec[gcs,s3,sftp]
22
22
  Provides-Extra: all
23
23
  Requires-Dist: ray[all]; extra == "all"
24
+ Dynamic: license-file
24
25
 
25
26
  This is the SDK to develop synapse plugins
26
27