synapse-sdk 1.0.0a54__py3-none-any.whl → 1.0.0a56__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  from synapse_sdk.clients.backend.annotation import AnnotationClientMixin
2
2
  from synapse_sdk.clients.backend.core import CoreClientMixin
3
- from synapse_sdk.clients.backend.dataset import DatasetClientMixin
3
+ from synapse_sdk.clients.backend.data_collection import DataCollectionClientMixin
4
4
  from synapse_sdk.clients.backend.hitl import HITLClientMixin
5
5
  from synapse_sdk.clients.backend.integration import IntegrationClientMixin
6
6
  from synapse_sdk.clients.backend.ml import MLClientMixin
@@ -9,7 +9,7 @@ from synapse_sdk.clients.backend.ml import MLClientMixin
9
9
  class BackendClient(
10
10
  AnnotationClientMixin,
11
11
  CoreClientMixin,
12
- DatasetClientMixin,
12
+ DataCollectionClientMixin,
13
13
  IntegrationClientMixin,
14
14
  MLClientMixin,
15
15
  HITLClientMixin,
@@ -8,18 +8,18 @@ from synapse_sdk.clients.base import BaseClient
8
8
  from synapse_sdk.clients.utils import get_batched_list
9
9
 
10
10
 
11
- class DatasetClientMixin(BaseClient):
12
- def list_dataset(self):
13
- path = 'datasets/'
11
+ class DataCollectionClientMixin(BaseClient):
12
+ def list_data_collection(self):
13
+ path = 'data_collections/'
14
14
  return self._list(path)
15
15
 
16
- def get_dataset(self, dataset_id):
17
- """Get dataset from synapse-backend.
16
+ def get_data_collection(self, data_collection_id):
17
+ """Get data_collection from synapse-backend.
18
18
 
19
19
  Args:
20
- dataset_id: The dataset id to get.
20
+ data_collection_id: The data_collection id to get.
21
21
  """
22
- path = f'datasets/{dataset_id}/?expand=file_specifications'
22
+ path = f'data_collections/{data_collection_id}/?expand=file_specifications'
23
23
  return self._get(path)
24
24
 
25
25
  def create_data_file(self, file_path: Path):
@@ -40,19 +40,19 @@ class DatasetClientMixin(BaseClient):
40
40
  path = 'data_units/'
41
41
  return self._post(path, data=data)
42
42
 
43
- def upload_dataset(
43
+ def upload_data_collection(
44
44
  self,
45
- dataset_id: int,
46
- dataset: Dict,
45
+ data_collection_id: int,
46
+ data_collection: Dict,
47
47
  project_id: Optional[int] = None,
48
48
  batch_size: int = 1000,
49
49
  process_pool: int = 10,
50
50
  ):
51
- """Upload dataset to synapse-backend.
51
+ """Upload data_collection to synapse-backend.
52
52
 
53
53
  Args:
54
- dataset_id: The dataset id to upload the data to.
55
- dataset: The dataset to upload.
54
+ data_collection_id: The data_collection id to upload the data to.
55
+ data_collection: The data_collection to upload.
56
56
  * structure:
57
57
  - files: The files to upload. (key: file name, value: file pathlib object)
58
58
  - meta: The meta data to upload.
@@ -60,14 +60,14 @@ class DatasetClientMixin(BaseClient):
60
60
  batch_size: The batch size to upload the data.
61
61
  process_pool: The process pool to upload the data.
62
62
  """
63
- # TODO validate dataset with schema
63
+ # TODO validate data_collection with schema
64
64
 
65
- params = [(data, dataset_id) for data in dataset]
65
+ params = [(data, data_collection_id) for data in data_collection]
66
66
 
67
67
  with Pool(processes=process_pool) as pool:
68
- dataset = pool.starmap(self.upload_data_file, tqdm(params))
68
+ data_collection = pool.starmap(self.upload_data_file, tqdm(params))
69
69
 
70
- batches = get_batched_list(dataset, batch_size)
70
+ batches = get_batched_list(data_collection, batch_size)
71
71
 
72
72
  for batch in tqdm(batches):
73
73
  data_units = self.create_data_units(batch)
@@ -82,7 +82,7 @@ class DatasetClientMixin(BaseClient):
82
82
 
83
83
  self.create_tasks(tasks_data)
84
84
 
85
- def upload_data_file(self, data: Dict, dataset_id: int) -> Dict:
85
+ def upload_data_file(self, data: Dict, data_collection_id: int) -> Dict:
86
86
  """Upload files to synapse-backend.
87
87
 
88
88
  Args:
@@ -90,13 +90,13 @@ class DatasetClientMixin(BaseClient):
90
90
  * structure:
91
91
  - files: The files to upload. (key: file name, value: file pathlib object)
92
92
  - meta: The meta data to upload.
93
- dataset_id: The dataset id to upload the data to.
93
+ data_collection_id: The data_collection id to upload the data to.
94
94
 
95
95
  Returns:
96
96
  Dict: The result of the upload.
97
97
  """
98
98
  for name, path in data['files'].items():
99
99
  data_file = self.create_data_file(path)
100
- data['dataset'] = dataset_id
100
+ data['data_collection'] = data_collection_id
101
101
  data['files'][name] = {'checksum': data_file['checksum'], 'path': str(path)}
102
102
  return data
@@ -27,6 +27,13 @@ class ExportRun(Run):
27
27
  error: str | None = None
28
28
  created: str
29
29
 
30
+ class MetricsRecord(BaseModel):
31
+ """Metrics record model."""
32
+
33
+ stand_by: int
34
+ failed: int
35
+ success: int
36
+
30
37
  def log_file(
31
38
  self, log_type: str, target_id: int, data_file_info: dict, status: ExportStatus, error: str | None = None
32
39
  ):
@@ -51,6 +58,16 @@ class ExportRun(Run):
51
58
  ).model_dump(),
52
59
  )
53
60
 
61
+ def log_metrics(self, record: MetricsRecord, category: str):
62
+ """Log export metrics.
63
+
64
+ Args:
65
+ record (MetricsRecord): The metrics record to log.
66
+ category (str): The category of the metrics.
67
+ """
68
+ record = self.MetricsRecord.model_validate(record)
69
+ self.set_metrics(value=record.dict(), category=category)
70
+
54
71
  def export_log_json_file(
55
72
  self,
56
73
  target_id: int,
@@ -263,6 +280,7 @@ class ExportAction(Action):
263
280
  'proportion': 100,
264
281
  }
265
282
  }
283
+ metrics_categories = {'data_file', 'original_file'}
266
284
 
267
285
  def get_filtered_results(self, filters, handler):
268
286
  """Get filtered target results."""
@@ -42,6 +42,8 @@ def export(run, export_items, path_root, **params):
42
42
  origin_files_output_path.mkdir(parents=True, exist_ok=True)
43
43
 
44
44
  total = params['count']
45
+ original_file_metrics_record = run.MetricsRecord(stand_by=total, success=0, failed=0)
46
+ data_file_metrics_record = run.MetricsRecord(stand_by=total, success=0, failed=0)
45
47
  # progress init
46
48
  run.set_progress(0, total, category='dataset_conversion')
47
49
  for no, export_item in enumerate(export_items, start=1):
@@ -56,12 +58,30 @@ def export(run, export_items, path_root, **params):
56
58
  if save_original_file_flag:
57
59
  if no == 1:
58
60
  run.log_message('Saving original file.')
59
- save_original_file(run, final_data, origin_files_output_path, errors_original_file_list)
61
+ original_status = save_original_file(run, final_data, origin_files_output_path, errors_original_file_list)
62
+
63
+ original_file_metrics_record.stand_by -= 1
64
+ if original_status == ExportStatus.FAILED:
65
+ original_file_metrics_record.failed += 1
66
+ continue
67
+ else:
68
+ original_file_metrics_record.success += 1
69
+
70
+ run.log_metrics(record=original_file_metrics_record, category='original_file')
60
71
 
61
72
  # Extract data as JSON files
62
73
  if no == 1:
63
74
  run.log_message('Saving json file.')
64
- save_as_json(run, final_data, json_output_path, errors_json_file_list)
75
+ data_status = save_as_json(run, final_data, json_output_path, errors_json_file_list)
76
+
77
+ data_file_metrics_record.stand_by -= 1
78
+ if data_status == ExportStatus.FAILED:
79
+ data_file_metrics_record.failed += 1
80
+ continue
81
+ else:
82
+ data_file_metrics_record.success += 1
83
+
84
+ run.log_metrics(record=data_file_metrics_record, category='data_file')
65
85
 
66
86
  run.end_log()
67
87
 
@@ -126,6 +146,7 @@ def save_original_file(run, result, base_path, error_file_list):
126
146
  status = ExportStatus.FAILED
127
147
 
128
148
  run.export_log_original_file(result['id'], file_info, status, error_msg)
149
+ return status
129
150
 
130
151
 
131
152
  def save_as_json(run, result, base_path, error_file_list):
@@ -152,3 +173,4 @@ def save_as_json(run, result, base_path, error_file_list):
152
173
  status = ExportStatus.FAILED
153
174
 
154
175
  run.export_log_json_file(result['id'], file_info, status, error_msg)
176
+ return status
@@ -0,0 +1,101 @@
1
+ from enum import Enum
2
+ from typing import Annotated
3
+
4
+ from pydantic import AfterValidator, BaseModel, field_validator
5
+ from pydantic_core import PydanticCustomError
6
+
7
+ from synapse_sdk.clients.exceptions import ClientError
8
+ from synapse_sdk.plugins.categories.base import Action
9
+ from synapse_sdk.plugins.categories.decorators import register_action
10
+ from synapse_sdk.plugins.enums import PluginCategory, RunMethod
11
+ from synapse_sdk.plugins.models import Run
12
+ from synapse_sdk.utils.pydantic.validators import non_blank
13
+
14
+
15
+ class TaskDataAnnotationType(str, Enum):
16
+ FILE = 'file'
17
+ INFERENCE = 'inference'
18
+
19
+
20
+ class TaskPreAnnotationRun(Run):
21
+ pass
22
+
23
+
24
+ class TaskPreAnnotationParams(BaseModel):
25
+ """TaskPreAnnotation action parameters.
26
+
27
+ Args:
28
+ name (str): The name of the action.
29
+ description (str | None): The description of the action.
30
+ project (int): The project ID.
31
+ data_collection (int): The data collection ID.
32
+ task_data_annotation_type (TaskDataAnnotationType): The type of task data annotation.
33
+ """
34
+
35
+ name: Annotated[str, AfterValidator(non_blank)]
36
+ description: str | None
37
+ project: int
38
+ data_collection: int
39
+ task_data_annotation_type: TaskDataAnnotationType
40
+
41
+ @field_validator('data_collection', mode='before')
42
+ @classmethod
43
+ def check_data_collection_exists(cls, value: str, info) -> str:
44
+ """Validate synapse-backend collection exists."""
45
+ action = info.context['action']
46
+ client = action.client
47
+ try:
48
+ client.get_data_collection(value)
49
+ except ClientError:
50
+ raise PydanticCustomError('client_error', 'Error occurred while checking data collection exists.')
51
+ return value
52
+
53
+ @field_validator('project', mode='before')
54
+ @classmethod
55
+ def check_project_exists(cls, value: str, info) -> str:
56
+ """Validate synapse-backend project exists."""
57
+ if not value:
58
+ return value
59
+
60
+ action = info.context['action']
61
+ client = action.client
62
+ try:
63
+ client.get_project(value)
64
+ except ClientError:
65
+ raise PydanticCustomError('client_error', 'Error occurred while checking project exists.')
66
+ return value
67
+
68
+
69
+ @register_action
70
+ class TaskPreAnnotationAction(Action):
71
+ """TaskPreAnnotation action class.
72
+
73
+ * Annotate data to tasks.
74
+ """
75
+
76
+ name = 'task_pre_annotation'
77
+ category = PluginCategory.UPLOAD
78
+ method = RunMethod.JOB
79
+ run_class = TaskPreAnnotationRun
80
+ progress_categories = {
81
+ 'generate_tasks': {
82
+ 'proportion': 10,
83
+ },
84
+ 'annotate_task_data': {
85
+ 'proportion': 90,
86
+ },
87
+ }
88
+
89
+ def start(self):
90
+ """Start task_pre_annotation action.
91
+
92
+ * Generate tasks.
93
+ * Annotate data to tasks.
94
+ """
95
+ task_pre_annotation = self.get_task_pre_annotation()
96
+ task_pre_annotation.handle_annotate_data_from_files()
97
+ return {}
98
+
99
+ def get_task_pre_annotation(self):
100
+ """Get task pre annotation entrypoint."""
101
+ return self.entrypoint()
@@ -8,7 +8,6 @@ from pydantic_core import PydanticCustomError
8
8
 
9
9
  from synapse_sdk.clients.exceptions import ClientError
10
10
  from synapse_sdk.clients.utils import get_batched_list
11
- from synapse_sdk.clients.validators.collections import FileSpecificationValidator
12
11
  from synapse_sdk.i18n import gettext as _
13
12
  from synapse_sdk.plugins.categories.base import Action
14
13
  from synapse_sdk.plugins.categories.decorators import register_action
@@ -105,8 +104,6 @@ class UploadParams(BaseModel):
105
104
  storage: int
106
105
  collection: int
107
106
  project: int | None
108
- is_generate_tasks: bool = False
109
- is_generate_ground_truths: bool = False
110
107
 
111
108
  @field_validator('storage', mode='before')
112
109
  @classmethod
@@ -131,7 +128,7 @@ class UploadParams(BaseModel):
131
128
  action = info.context['action']
132
129
  client = action.client
133
130
  try:
134
- client.get_dataset(value)
131
+ client.get_data_collection(value)
135
132
  except ClientError:
136
133
  raise PydanticCustomError('client_error', _('Error occurred while checking collection exists.'))
137
134
  return value
@@ -165,8 +162,6 @@ class UploadAction(Action):
165
162
  analyze_collection: The progress category for the analyze collection process.
166
163
  data_file_upload: The progress category for the upload process.
167
164
  generate_data_units: The progress category for the generate data units process.
168
- generate_tasks: The progress category for the generate tasks process.
169
- generate_ground_truths: The progress category for the generate ground truths process.
170
165
  """
171
166
 
172
167
  name = 'upload'
@@ -175,51 +170,19 @@ class UploadAction(Action):
175
170
  run_class = UploadRun
176
171
  progress_categories = {
177
172
  'analyze_collection': {
178
- 'proportion': 0,
173
+ 'proportion': 10,
179
174
  },
180
175
  'upload_data_files': {
181
- 'proportion': 0,
176
+ 'proportion': 50,
182
177
  },
183
178
  'generate_data_units': {
184
- 'proportion': 0,
185
- },
186
- 'generate_tasks': {
187
- 'proportion': 0,
188
- },
189
- 'generate_ground_truths': {
190
- 'proportion': 0,
179
+ 'proportion': 40,
191
180
  },
192
181
  }
193
182
 
194
- def __init__(self, *args, **kwargs):
195
- """Initialize UploadAction."""
196
- super().__init__(*args, **kwargs)
197
-
198
- # Setup progress categories ratio by options.
199
- progress_ratios = {
200
- 'upload_only': (5, 60, 35, 0, 0),
201
- 'generate_tasks': (5, 45, 25, 25, 0),
202
- 'generate_ground_truths': (5, 35, 30, 15, 15),
203
- }
204
- options = kwargs['plugin_config']['actions']['upload']['options']
205
- progress_categories = self.progress_categories
206
- if options['allow_generate_tasks'] and not kwargs['params']['allow_generate_ground_truths']:
207
- ratio_name = 'generate_tasks'
208
- elif options['allow_generate_ground_truths'] and kwargs['params']['allow_generate_tasks']:
209
- ratio_name = 'generate_ground_truths'
210
- else:
211
- ratio_name = 'upload_only'
212
-
213
- assert len(progress_categories) == len(progress_ratios[ratio_name]), (
214
- 'Progress categories and ratios length mismatch.'
215
- )
216
- for i, category in enumerate(progress_categories):
217
- progress_categories[category]['proportion'] = progress_ratios[ratio_name][i]
218
- self.progress_categories = progress_categories
219
-
220
- def get_uploader(self, path):
183
+ def get_uploader(self, path, file_specification, organized_files):
221
184
  """Get uploader from entrypoint."""
222
- return self.entrypoint(self.run, path)
185
+ return self.entrypoint(self.run, path, file_specification, organized_files)
223
186
 
224
187
  def start(self) -> Dict:
225
188
  """Start upload process.
@@ -231,19 +194,23 @@ class UploadAction(Action):
231
194
  storage = self.client.get_storage(self.params['storage'])
232
195
  pathlib_cwd = get_pathlib(storage, self.params['path'])
233
196
 
234
- # Initialize uploader.
235
- uploader = self.get_uploader(pathlib_cwd)
236
-
237
197
  # Analyze Collection file specifications to determine the data structure for upload.
238
198
  file_specification_template = self._analyze_collection()
199
+ organized_files = self._organize_files(pathlib_cwd, file_specification_template)
200
+
201
+ # Initialize uploader.
202
+ uploader = self.get_uploader(pathlib_cwd, file_specification_template, organized_files)
239
203
 
240
204
  # Setup result dict.
241
205
  result = {}
242
206
 
243
- # Organize data according to Collection file specification structure.
207
+ # Get organized files from the uploader (plugin developer's custom implementation)
208
+ # or use the default organization method if uploader doesn't provide valid files
244
209
  organized_files = uploader.handle_upload_files()
245
- if not self._validate_organized_files(file_specification_template, organized_files):
246
- self.run.log_message('Validate organized files failed.')
210
+
211
+ # Validate the organized files
212
+ if not self._validate_organized_files(organized_files, file_specification_template):
213
+ self.run.log_message('Validation failed.', context=Context.ERROR.value)
247
214
  self.run.end_log()
248
215
  return result
249
216
 
@@ -265,26 +232,6 @@ class UploadAction(Action):
265
232
  generated_data_units = self._generate_data_units(uploaded_files, upload_result_count)
266
233
  result['generated_data_units_count'] = len(generated_data_units)
267
234
 
268
- # Setup task with uploaded synapse-backend data units.
269
- if not len(generated_data_units):
270
- self.run.log_message('No data units were generated.', context=Context.WARNING.value)
271
- self.run.end_log()
272
- return result
273
-
274
- if self.config['options']['allow_generate_tasks'] and self.params['is_generate_tasks']:
275
- generated_tasks = self._generate_tasks(generated_data_units)
276
- result['generated_tasks_count'] = len(generated_tasks)
277
- else:
278
- self.run.log_message('Generating tasks process has passed.')
279
-
280
- # Generate ground truths for the uploaded data.
281
- # TODO: Need to add ground truths generation logic later.
282
- if self.config['options']['allow_generate_ground_truths'] and self.params['is_generate_ground_truths']:
283
- generated_ground_truths = self._generate_ground_truths()
284
- result['generated_ground_truths_count'] = len(generated_ground_truths)
285
- else:
286
- self.run.log_message('Generating ground truths process has passed.')
287
-
288
235
  self.run.end_log()
289
236
  return result
290
237
 
@@ -300,18 +247,13 @@ class UploadAction(Action):
300
247
 
301
248
  client = self.run.client
302
249
  collection_id = self.params['collection']
303
- collection = client.get_dataset(collection_id)
250
+ collection = client.get_data_collection(collection_id)
304
251
 
305
252
  # Finish progress
306
253
  self.run.set_progress(1, 1, category='analyze_collection')
307
254
 
308
255
  return collection['file_specifications']
309
256
 
310
- def _validate_organized_files(self, file_specification_template: Dict, organized_files: List) -> bool:
311
- """Validate organized files from Uploader."""
312
- validator = FileSpecificationValidator(file_specification_template, organized_files)
313
- return validator.validate()
314
-
315
257
  def _upload_files(self, organized_files, organized_files_count: int) -> List:
316
258
  """Upload files to synapse-backend.
317
259
 
@@ -369,53 +311,3 @@ class UploadAction(Action):
369
311
  self.run.set_progress(upload_result_count, upload_result_count, category='generate_data_units')
370
312
 
371
313
  return sum(generated_data_units, [])
372
-
373
- def _generate_tasks(self, generated_data_units: List) -> List:
374
- """Setup task with uploaded synapse-backend data units.
375
-
376
- TODO: make batch size configurable.
377
- """
378
- # Initialize progress
379
- self.run.set_progress(0, 1, category='generate_tasks')
380
- self.run.log_message('Generating tasks with data files...')
381
-
382
- # Prepare batches for processing
383
- client = self.run.client
384
- project_id = self.params['project']
385
- current_progress = 0
386
-
387
- # Generate tasks
388
- generated_tasks = []
389
- generated_data_units_count = len(generated_data_units)
390
- for data_unit in generated_data_units:
391
- tasks_data = []
392
- task_data = {'project': project_id, 'data_unit': data_unit['id']}
393
- tasks_data.append(task_data)
394
- if tasks_data:
395
- created_tasks = client.create_tasks(tasks_data)
396
- created_task_ids = [created_task['id'] for created_task in created_tasks]
397
- generated_tasks.append(created_task_ids)
398
- for created_task_id in created_task_ids:
399
- self.run.log_task(created_task_id, UploadStatus.SUCCESS)
400
-
401
- self.run.set_progress(current_progress, generated_data_units_count, category='generate_tasks')
402
- current_progress += 1
403
-
404
- # Finish progress
405
- self.run.log_message('Generating tasks completed')
406
- self.run.set_progress(1, 1, category='generate_tasks')
407
-
408
- return sum(generated_tasks, [])
409
-
410
- def _generate_ground_truths(self):
411
- """Generate ground truths for the uploaded data.
412
-
413
- TODO: Need to add ground truths generation logic later.
414
- """
415
- # Initialize progress
416
- self.run.set_progress(0, 1, category='generate_ground_truths')
417
- self.run.log_message('Generating ground truths...')
418
-
419
- # Finish progress
420
- self.run.log_message('Generating ground truths completed')
421
- self.run.set_progress(1, 1, category='generate_ground_truths')
@@ -1,9 +1,10 @@
1
1
  actions:
2
2
  upload:
3
3
  entrypoint: plugin.upload.Uploader
4
- options:
5
- allow_generate_tasks: false # Allow the plugin to generate tasks for the uploaded data
6
- allow_generate_ground_truths: false # Allow the plugin to generate ground truths for the uploaded data
7
4
  supported_data_type: image # A primary data type of synapse backend collection. (e.g. 'image', 'text', 'video', 'pcd', 'audio')
8
5
  ui_schema: |
9
6
  Dumped FormKit Schema for upload plugin custom options
7
+ task_pre_annotation:
8
+ entrypoint: plugin.upload.TaskPreAnnotation
9
+ ui_schema: |
10
+ Dumped FormKit Schema for upload plugin custom options
@@ -0,0 +1,14 @@
1
+ class TaskPreAnnotation:
2
+ def __init__(self, run, *args, **kwargs):
3
+ """Initialize the plugin task pre annotation action class.
4
+
5
+ Args:
6
+ run: Plugin run object.
7
+ """
8
+ self.run = run
9
+
10
+ def handle_annotate_data_from_files(self):
11
+ pass
12
+
13
+ def handle_annotate_data_with_inference(self):
14
+ pass
@@ -3,42 +3,38 @@ from typing import List
3
3
 
4
4
 
5
5
  class Uploader:
6
- """Plugin upload action class.
6
+ """Plugin upload action interface for organizing files.
7
7
 
8
- * Organize, upload, setup task, generate ground truths for the uploaded data.
8
+ This class provides a minimal interface for plugin developers to implement
9
+ their own file organization logic.
9
10
  """
10
11
 
11
- def __init__(self, run, path: Path, *args, **kwargs):
12
+ def __init__(self, run, path: Path, file_specification: List = None, organized_files: List = None):
12
13
  """Initialize the plugin upload action class.
13
14
 
14
15
  Args:
15
- run: Plugin run object.
16
- path: pathlib object by upload target destination path.
16
+ run: Plugin run object with logging capabilities.
17
+ path: Path object pointing to the upload target directory.
18
+ file_specification: List of specifications that define the structure of files to be uploaded.
19
+ Each specification contains details like file name, type, and requirements.
17
20
  """
18
21
  self.run = run
19
22
  self.path = path
23
+ self.file_specification = file_specification
24
+ self.organized_files = organized_files
20
25
 
21
26
  def handle_upload_files(self) -> List:
22
- """Handle upload files.
27
+ """Customize the organization of files for upload.
23
28
 
24
- * Organize data according to collection file specification structure.
25
- * Structure files according to the file specification of the target collection.
29
+ This method provides a hook for plugin developers to modify the default file organization.
30
+ You can override this method to filter files, transform data, or add custom metadata
31
+ based on your specific requirements.
32
+
33
+ Args:
34
+ organized_files (List): The default organized files structure.
35
+ Each item is a dictionary with 'files' and 'meta' keys.
26
36
 
27
37
  Returns:
28
- List: List of dictionaries containing 'files' and 'meta'.
29
-
30
- Examples:
31
- [
32
- {
33
- "files": {
34
- 'image_1': image_1_pathlib_object,
35
- 'image_2': image_2_pathlib_object,
36
- 'meta_1': meta_1_pathlib_object,
37
- },
38
- "meta": {
39
- "key": "value"
40
- }
41
- }
42
- ]
38
+ List: The modified list of organized files to be uploaded.
43
39
  """
44
- return []
40
+ return self.organized_files
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: synapse-sdk
3
- Version: 1.0.0a54
3
+ Version: 1.0.0a56
4
4
  Summary: synapse sdk
5
5
  Author-email: datamaker <developer@datamaker.io>
6
6
  License: MIT
@@ -28,10 +28,10 @@ synapse_sdk/clients/agent/__init__.py,sha256=Pz8_iTbIbnb7ywGJ3feqoZVmO2I3mEbwpWs
28
28
  synapse_sdk/clients/agent/core.py,sha256=x2jgORTjT7pJY67SLuc-5lMG6CD5OWpy8UgGeTf7IhA,270
29
29
  synapse_sdk/clients/agent/ray.py,sha256=JrwLyVOUDG2yYsbPrxyUtWbM-FWp9B6Bl_GdDby0rt8,1559
30
30
  synapse_sdk/clients/agent/service.py,sha256=s7KuPK_DB1nr2VHrigttV1WyFonaGHNrPvU8loRxHcE,478
31
- synapse_sdk/clients/backend/__init__.py,sha256=Fiehino2n3voaHTdpJHXSY7K_CDnMkQeokapbgeoTBk,1187
31
+ synapse_sdk/clients/backend/__init__.py,sha256=MC3pndBk-SPyW9L6WnrTozoub9-EK7auXFvPHCaxeFU,1209
32
32
  synapse_sdk/clients/backend/annotation.py,sha256=f4jS4qlXH7M7mQ3EuCq-NrjJ_hJNDz8pEFAYqf-e008,996
33
33
  synapse_sdk/clients/backend/core.py,sha256=5XAOdo6JZ0drfk-FMPJ96SeTd9oja-VnTwzGXdvK7Bg,1027
34
- synapse_sdk/clients/backend/dataset.py,sha256=11R5LuTva9jgXatxQAlKy7UEJmwIWzTsLVdFf3MZ9F8,3400
34
+ synapse_sdk/clients/backend/data_collection.py,sha256=kj9TurBAljK_mFF75oaazlqnL0bd6PHbgRfR3KyTUmI,3623
35
35
  synapse_sdk/clients/backend/hitl.py,sha256=na2mSXFud92p4zUEuagcDWk2klxO7xn-e86cm0VZEvs,709
36
36
  synapse_sdk/clients/backend/integration.py,sha256=9LjkYcBpi7aog-MODSDS4RlmYahypu65qxBj-AcY7xc,2683
37
37
  synapse_sdk/clients/backend/ml.py,sha256=JoPH9Ly2E3HJ7S5mdGLtcGq7ruQVVrYfWArogwZLlms,1193
@@ -61,10 +61,10 @@ synapse_sdk/plugins/categories/data_validation/templates/plugin/validation.py,sh
61
61
  synapse_sdk/plugins/categories/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  synapse_sdk/plugins/categories/export/enums.py,sha256=gtyngvQ1DKkos9iKGcbecwTVQQ6sDwbrBPSGPNb5Am0,127
63
63
  synapse_sdk/plugins/categories/export/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
- synapse_sdk/plugins/categories/export/actions/export.py,sha256=xqPB_MufeMP3riaKCbGVFGukV8RdXcg6-zUrkw4t1-A,9922
64
+ synapse_sdk/plugins/categories/export/actions/export.py,sha256=2lIjur8EiwTB9sc16FV8ZaPXFxUtGRPx9hreG_DKLQA,10483
65
65
  synapse_sdk/plugins/categories/export/templates/config.yaml,sha256=N7YmnFROb3s3M35SA9nmabyzoSb5O2t2TRPicwFNN2o,56
66
66
  synapse_sdk/plugins/categories/export/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
- synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=JA2Y_A30QyJekSqDq8PeRuFR9k0yjQjOG-Xy6C8zPew,5196
67
+ synapse_sdk/plugins/categories/export/templates/plugin/export.py,sha256=zG8mSn7ZGIj8cttWmb7GEPcGgQRbZ97brJCzkuK7RP8,6106
68
68
  synapse_sdk/plugins/categories/neural_net/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
69
  synapse_sdk/plugins/categories/neural_net/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
70
  synapse_sdk/plugins/categories/neural_net/actions/deployment.py,sha256=y2LrS-pwazqRI5O0q1NUy45NQYsBj6ykbrXnDMs_fqE,1987
@@ -100,10 +100,12 @@ synapse_sdk/plugins/categories/smart_tool/templates/plugin/__init__.py,sha256=47
100
100
  synapse_sdk/plugins/categories/smart_tool/templates/plugin/auto_label.py,sha256=eevNg0nOcYFR4z_L_R-sCvVOYoLWSAH1jwDkAf3YCjY,320
101
101
  synapse_sdk/plugins/categories/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
102
102
  synapse_sdk/plugins/categories/upload/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
103
- synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=8FIRkEtSPTToSZBYn7B2qaTj9778RDjr84BHnn6ajSM,15951
104
- synapse_sdk/plugins/categories/upload/templates/config.yaml,sha256=kwHNWHFYbzDi1mEh40KozatPZbZGH44dlP0t0J7ejJw,483
103
+ synapse_sdk/plugins/categories/upload/actions/task_pre_annotation.py,sha256=YkQZ7QECu6-PnSEv2lAbbL3smxeIHxUiu9ruBdA0_0k,3066
104
+ synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=WSzTulI3GAejXKy6DsmxP2zE7fRAX_bYTUaL0Za7Ci8,11287
105
+ synapse_sdk/plugins/categories/upload/templates/config.yaml,sha256=1O0kMfkFMGYwnpBcttrlC9bu4xzU9docw2MBOq_Elmo,417
105
106
  synapse_sdk/plugins/categories/upload/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
- synapse_sdk/plugins/categories/upload/templates/plugin/upload.py,sha256=dnK8gy33GjG5ettayawDJv1gM3xCm1K6lM-PfeeTjQw,1163
107
+ synapse_sdk/plugins/categories/upload/templates/plugin/task_pre_annotation.py,sha256=9XkUZu7USjVjDPufM0NlYmkdKfV7Hf_9v5GN1RgZzS0,350
108
+ synapse_sdk/plugins/categories/upload/templates/plugin/upload.py,sha256=IZU4sdSMSLKPCtlNqF7DP2howTdYR6hr74HCUZsGdPk,1559
107
109
  synapse_sdk/plugins/templates/cookiecutter.json,sha256=NxOWk9A_v1pO0Ny4IYT9Cj5iiJ16--cIQrGC67QdR0I,396
108
110
  synapse_sdk/plugins/templates/hooks/post_gen_project.py,sha256=jqlYkY1O2TxIR-Vh3gnwILYy8k-D39Xx66d2KNQVMCs,147
109
111
  synapse_sdk/plugins/templates/hooks/pre_prompt.py,sha256=aOAMM623s0sKFGjTZaotAOYFvsNMxeii4tPyhOAFKVE,539
@@ -134,9 +136,9 @@ synapse_sdk/utils/storage/providers/__init__.py,sha256=x7RGwZryT2FpVxS7fGWryRVpq
134
136
  synapse_sdk/utils/storage/providers/gcp.py,sha256=i2BQCu1Kej1If9SuNr2_lEyTcr5M_ncGITZrL0u5wEA,363
135
137
  synapse_sdk/utils/storage/providers/s3.py,sha256=W94rQvhGRXti3R4mYP7gmU5pcyCQpGFIBLvxxqLVdRM,2231
136
138
  synapse_sdk/utils/storage/providers/sftp.py,sha256=_8s9hf0JXIO21gvm-JVS00FbLsbtvly4c-ETLRax68A,1426
137
- synapse_sdk-1.0.0a54.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
138
- synapse_sdk-1.0.0a54.dist-info/METADATA,sha256=JHOOafiDzcZBZjHOAL8AUzlEjhWs1YyLz6VH4jCGK4I,1303
139
- synapse_sdk-1.0.0a54.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
140
- synapse_sdk-1.0.0a54.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
141
- synapse_sdk-1.0.0a54.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
142
- synapse_sdk-1.0.0a54.dist-info/RECORD,,
139
+ synapse_sdk-1.0.0a56.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
140
+ synapse_sdk-1.0.0a56.dist-info/METADATA,sha256=Ms_yriRQzC_lTwXwNUzA_rhSafE6YG69OurqybsaQPs,1303
141
+ synapse_sdk-1.0.0a56.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
142
+ synapse_sdk-1.0.0a56.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
143
+ synapse_sdk-1.0.0a56.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
144
+ synapse_sdk-1.0.0a56.dist-info/RECORD,,