synapse-sdk 1.0.0a54__py3-none-any.whl → 1.0.0a55__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of synapse-sdk might be problematic. Click here for more details.
- synapse_sdk/clients/backend/dataset.py +7 -7
- synapse_sdk/plugins/categories/upload/actions/upload.py +186 -14
- synapse_sdk/plugins/categories/upload/templates/plugin/upload.py +20 -24
- {synapse_sdk-1.0.0a54.dist-info → synapse_sdk-1.0.0a55.dist-info}/METADATA +1 -1
- {synapse_sdk-1.0.0a54.dist-info → synapse_sdk-1.0.0a55.dist-info}/RECORD +9 -9
- {synapse_sdk-1.0.0a54.dist-info → synapse_sdk-1.0.0a55.dist-info}/WHEEL +0 -0
- {synapse_sdk-1.0.0a54.dist-info → synapse_sdk-1.0.0a55.dist-info}/entry_points.txt +0 -0
- {synapse_sdk-1.0.0a54.dist-info → synapse_sdk-1.0.0a55.dist-info}/licenses/LICENSE +0 -0
- {synapse_sdk-1.0.0a54.dist-info → synapse_sdk-1.0.0a55.dist-info}/top_level.txt +0 -0
|
@@ -10,16 +10,16 @@ from synapse_sdk.clients.utils import get_batched_list
|
|
|
10
10
|
|
|
11
11
|
class DatasetClientMixin(BaseClient):
|
|
12
12
|
def list_dataset(self):
|
|
13
|
-
path = '
|
|
13
|
+
path = 'data_collections/'
|
|
14
14
|
return self._list(path)
|
|
15
15
|
|
|
16
|
-
def get_dataset(self,
|
|
16
|
+
def get_dataset(self, data_collection_id):
|
|
17
17
|
"""Get dataset from synapse-backend.
|
|
18
18
|
|
|
19
19
|
Args:
|
|
20
|
-
|
|
20
|
+
data_collection_id: The data_collection id to get.
|
|
21
21
|
"""
|
|
22
|
-
path = f'
|
|
22
|
+
path = f'data_collections/{data_collection_id}/?expand=file_specifications'
|
|
23
23
|
return self._get(path)
|
|
24
24
|
|
|
25
25
|
def create_data_file(self, file_path: Path):
|
|
@@ -82,7 +82,7 @@ class DatasetClientMixin(BaseClient):
|
|
|
82
82
|
|
|
83
83
|
self.create_tasks(tasks_data)
|
|
84
84
|
|
|
85
|
-
def upload_data_file(self, data: Dict,
|
|
85
|
+
def upload_data_file(self, data: Dict, data_collection_id: int) -> Dict:
|
|
86
86
|
"""Upload files to synapse-backend.
|
|
87
87
|
|
|
88
88
|
Args:
|
|
@@ -90,13 +90,13 @@ class DatasetClientMixin(BaseClient):
|
|
|
90
90
|
* structure:
|
|
91
91
|
- files: The files to upload. (key: file name, value: file pathlib object)
|
|
92
92
|
- meta: The meta data to upload.
|
|
93
|
-
|
|
93
|
+
data_collection_id: The dataset id to upload the data to.
|
|
94
94
|
|
|
95
95
|
Returns:
|
|
96
96
|
Dict: The result of the upload.
|
|
97
97
|
"""
|
|
98
98
|
for name, path in data['files'].items():
|
|
99
99
|
data_file = self.create_data_file(path)
|
|
100
|
-
data['
|
|
100
|
+
data['data_collection'] = data_collection_id
|
|
101
101
|
data['files'][name] = {'checksum': data_file['checksum'], 'path': str(path)}
|
|
102
102
|
return data
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import re
|
|
2
3
|
from datetime import datetime
|
|
3
4
|
from enum import Enum
|
|
5
|
+
from pathlib import Path
|
|
4
6
|
from typing import Annotated, Dict, List
|
|
5
7
|
|
|
6
8
|
from pydantic import AfterValidator, BaseModel, field_validator
|
|
@@ -201,7 +203,7 @@ class UploadAction(Action):
|
|
|
201
203
|
'generate_tasks': (5, 45, 25, 25, 0),
|
|
202
204
|
'generate_ground_truths': (5, 35, 30, 15, 15),
|
|
203
205
|
}
|
|
204
|
-
options =
|
|
206
|
+
options = self.config.get('options', {})
|
|
205
207
|
progress_categories = self.progress_categories
|
|
206
208
|
if options['allow_generate_tasks'] and not kwargs['params']['allow_generate_ground_truths']:
|
|
207
209
|
ratio_name = 'generate_tasks'
|
|
@@ -217,9 +219,9 @@ class UploadAction(Action):
|
|
|
217
219
|
progress_categories[category]['proportion'] = progress_ratios[ratio_name][i]
|
|
218
220
|
self.progress_categories = progress_categories
|
|
219
221
|
|
|
220
|
-
def get_uploader(self, path):
|
|
222
|
+
def get_uploader(self, path, file_specification, organized_files):
|
|
221
223
|
"""Get uploader from entrypoint."""
|
|
222
|
-
return self.entrypoint(self.run, path)
|
|
224
|
+
return self.entrypoint(self.run, path, file_specification, organized_files)
|
|
223
225
|
|
|
224
226
|
def start(self) -> Dict:
|
|
225
227
|
"""Start upload process.
|
|
@@ -231,19 +233,23 @@ class UploadAction(Action):
|
|
|
231
233
|
storage = self.client.get_storage(self.params['storage'])
|
|
232
234
|
pathlib_cwd = get_pathlib(storage, self.params['path'])
|
|
233
235
|
|
|
234
|
-
# Initialize uploader.
|
|
235
|
-
uploader = self.get_uploader(pathlib_cwd)
|
|
236
|
-
|
|
237
236
|
# Analyze Collection file specifications to determine the data structure for upload.
|
|
238
237
|
file_specification_template = self._analyze_collection()
|
|
238
|
+
organized_files = self._organize_files(pathlib_cwd, file_specification_template)
|
|
239
|
+
|
|
240
|
+
# Initialize uploader.
|
|
241
|
+
uploader = self.get_uploader(pathlib_cwd, file_specification_template, organized_files)
|
|
239
242
|
|
|
240
243
|
# Setup result dict.
|
|
241
244
|
result = {}
|
|
242
245
|
|
|
243
|
-
#
|
|
246
|
+
# Get organized files from the uploader (plugin developer's custom implementation)
|
|
247
|
+
# or use the default organization method if uploader doesn't provide valid files
|
|
244
248
|
organized_files = uploader.handle_upload_files()
|
|
245
|
-
|
|
246
|
-
|
|
249
|
+
|
|
250
|
+
# Validate the organized files
|
|
251
|
+
if not self._validate_organized_files(organized_files, file_specification_template):
|
|
252
|
+
self.run.log_message('Validation failed.', context=Context.ERROR.value)
|
|
247
253
|
self.run.end_log()
|
|
248
254
|
return result
|
|
249
255
|
|
|
@@ -307,11 +313,6 @@ class UploadAction(Action):
|
|
|
307
313
|
|
|
308
314
|
return collection['file_specifications']
|
|
309
315
|
|
|
310
|
-
def _validate_organized_files(self, file_specification_template: Dict, organized_files: List) -> bool:
|
|
311
|
-
"""Validate organized files from Uploader."""
|
|
312
|
-
validator = FileSpecificationValidator(file_specification_template, organized_files)
|
|
313
|
-
return validator.validate()
|
|
314
|
-
|
|
315
316
|
def _upload_files(self, organized_files, organized_files_count: int) -> List:
|
|
316
317
|
"""Upload files to synapse-backend.
|
|
317
318
|
|
|
@@ -419,3 +420,174 @@ class UploadAction(Action):
|
|
|
419
420
|
# Finish progress
|
|
420
421
|
self.run.log_message('Generating ground truths completed')
|
|
421
422
|
self.run.set_progress(1, 1, category='generate_ground_truths')
|
|
423
|
+
|
|
424
|
+
def _validate_organized_files(self, organized_files: List, file_specification_template: Dict) -> bool:
|
|
425
|
+
"""Validate organized files from Uploader."""
|
|
426
|
+
validator = FileSpecificationValidator(file_specification_template, organized_files)
|
|
427
|
+
return validator.validate()
|
|
428
|
+
|
|
429
|
+
def _organize_files(self, directory: Path, file_specification: List) -> List:
|
|
430
|
+
"""Organize files according to the file specification.
|
|
431
|
+
This method handles type-based directory structure where files are organized in
|
|
432
|
+
directories named after file types (e.g., 'image_1/' directory contains image files
|
|
433
|
+
like '1.jpg', '2.jpg'). For each dataset ID found in the primary directory, it attempts
|
|
434
|
+
to find corresponding files in all type directories.
|
|
435
|
+
|
|
436
|
+
TODO : Add Logic to handle file specific name patterns and extensions.
|
|
437
|
+
(e.g. pcd:S_DCH_230725_0156_LR_037.pcd, image_1:S_DCH_230725_0156_FC_037, image_2:S_DCH_230725_0156_LF_037.jpg)
|
|
438
|
+
Args:
|
|
439
|
+
directory (Path): Root directory containing files to organize.
|
|
440
|
+
file_specification (List): File specification list.
|
|
441
|
+
Returns:
|
|
442
|
+
List: List of dictionaries containing organized files.
|
|
443
|
+
"""
|
|
444
|
+
organized_files = []
|
|
445
|
+
self.run.log_message(f'Looking for files in {directory}...')
|
|
446
|
+
|
|
447
|
+
# Check for type-based directory structure (e.g., image_1/, pcd_1/)
|
|
448
|
+
type_dirs = {}
|
|
449
|
+
type_extensions = {} # Store common extensions for each type directory
|
|
450
|
+
|
|
451
|
+
for spec in file_specification:
|
|
452
|
+
spec_name = spec['name']
|
|
453
|
+
|
|
454
|
+
spec_dir = directory / spec_name
|
|
455
|
+
if spec_dir.exists() and spec_dir.is_dir():
|
|
456
|
+
type_dirs[spec_name] = spec_dir
|
|
457
|
+
|
|
458
|
+
# Analyze file extensions in this directory
|
|
459
|
+
extensions = {}
|
|
460
|
+
for file_path in spec_dir.glob('*'):
|
|
461
|
+
if file_path.is_file():
|
|
462
|
+
ext = file_path.suffix.lower()
|
|
463
|
+
extensions[ext] = extensions.get(ext, 0) + 1
|
|
464
|
+
|
|
465
|
+
# Find the most common extension
|
|
466
|
+
if extensions:
|
|
467
|
+
common_ext = max(extensions.items(), key=lambda x: x[1])[0]
|
|
468
|
+
type_extensions[spec_name] = common_ext
|
|
469
|
+
self.run.log_message(f'Found type directory: {spec_name} (common extension: {common_ext})')
|
|
470
|
+
|
|
471
|
+
# If type-based directories don't exist, exit early
|
|
472
|
+
if not type_dirs:
|
|
473
|
+
self.run.log_message('No type-based directory structure found.', context=Context.INFO.value)
|
|
474
|
+
return organized_files
|
|
475
|
+
|
|
476
|
+
self.run.log_message('Detected type-based directory structure')
|
|
477
|
+
|
|
478
|
+
# Build a comprehensive map of all dataset IDs across all type directories
|
|
479
|
+
dataset_files = {} # Dictionary: file_name -> {spec_name -> file_path}
|
|
480
|
+
|
|
481
|
+
# First pass: collect all dataset IDs from all type directories
|
|
482
|
+
for spec_name, dir_path in type_dirs.items():
|
|
483
|
+
for file_path in dir_path.glob('*'):
|
|
484
|
+
if file_path.is_file():
|
|
485
|
+
file_name = file_path.stem
|
|
486
|
+
|
|
487
|
+
# Initialize dataset entry if it doesn't exist
|
|
488
|
+
if file_name not in dataset_files:
|
|
489
|
+
dataset_files[file_name] = {}
|
|
490
|
+
|
|
491
|
+
# Map this file to its specification
|
|
492
|
+
if spec_name not in dataset_files[file_name]:
|
|
493
|
+
dataset_files[file_name][spec_name] = file_path
|
|
494
|
+
else:
|
|
495
|
+
# If multiple files with same file_name for same spec, use most recent
|
|
496
|
+
existing_file = dataset_files[file_name][spec_name]
|
|
497
|
+
if file_path.stat().st_mtime > existing_file.stat().st_mtime:
|
|
498
|
+
dataset_files[file_name][spec_name] = file_path
|
|
499
|
+
self.run.log_message(
|
|
500
|
+
f"Found newer file for name of {file_name}, spec '{spec_name}': "
|
|
501
|
+
f'{file_path.name} (replacing {existing_file.name})'
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
if not dataset_files:
|
|
505
|
+
self.run.log_message('No dataset files found.', context=Context.WARNING.value)
|
|
506
|
+
return organized_files
|
|
507
|
+
|
|
508
|
+
self.run.log_message(f'Found {len(dataset_files)} potential datasets by ID')
|
|
509
|
+
|
|
510
|
+
# Second pass: organize valid datasets
|
|
511
|
+
for file_name, files_dict in sorted(dataset_files.items()):
|
|
512
|
+
self.run.log_message(f'Processing file name: {file_name}')
|
|
513
|
+
|
|
514
|
+
# Add file spec details for logging
|
|
515
|
+
for spec_name, file_path in files_dict.items():
|
|
516
|
+
self.run.log_message(f"Mapped '{spec_name}' to: {file_path.name}")
|
|
517
|
+
|
|
518
|
+
# Check if all required files are present
|
|
519
|
+
required_specs = [spec['name'] for spec in file_specification if spec.get('is_required', False)]
|
|
520
|
+
if all(req in files_dict for req in required_specs):
|
|
521
|
+
# Create metadata for this dataset
|
|
522
|
+
meta_data = {
|
|
523
|
+
'origin_file_stem': file_name,
|
|
524
|
+
'created_at': datetime.now().isoformat(),
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
# Add the organized dataset
|
|
528
|
+
organized_files.append({'files': files_dict, 'meta': meta_data})
|
|
529
|
+
self.run.log_message(f'Successfully organized dataset for ID {file_name}')
|
|
530
|
+
else:
|
|
531
|
+
# Missing required files warning
|
|
532
|
+
missing = [req for req in required_specs if req not in files_dict]
|
|
533
|
+
self.run.log_message(
|
|
534
|
+
f'Dataset ID {file_name} is missing required files: {", ".join(missing)}',
|
|
535
|
+
context=Context.WARNING.value,
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
self.run.log_message(f'Total datasets organized: {len(organized_files)}')
|
|
539
|
+
return organized_files
|
|
540
|
+
|
|
541
|
+
def _map_files_to_specification(self, directory: Path, file_specification: List) -> Dict[str, Path]:
|
|
542
|
+
"""Map files in a directory to the file specification.
|
|
543
|
+
|
|
544
|
+
Args:
|
|
545
|
+
directory (Path): Directory containing files to map.
|
|
546
|
+
file_specification (List): File specification list.
|
|
547
|
+
|
|
548
|
+
Returns:
|
|
549
|
+
Dict[str, Path]: Dictionary mapping file specification names to file paths.
|
|
550
|
+
"""
|
|
551
|
+
files_dict = {}
|
|
552
|
+
|
|
553
|
+
# Get all files in the directory once
|
|
554
|
+
all_files = [f for f in directory.iterdir() if f.is_file()]
|
|
555
|
+
|
|
556
|
+
# Process each file specification
|
|
557
|
+
for file_spec in file_specification:
|
|
558
|
+
file_name = file_spec['name']
|
|
559
|
+
is_required = file_spec.get('is_required', False)
|
|
560
|
+
|
|
561
|
+
# Generate name pattern based on the specification
|
|
562
|
+
name_parts = re.split(r'_(\d+)$', file_name)
|
|
563
|
+
|
|
564
|
+
# Find files matching the pattern
|
|
565
|
+
matching_files = []
|
|
566
|
+
if len(name_parts) > 1:
|
|
567
|
+
base_name = name_parts[0]
|
|
568
|
+
index = name_parts[1]
|
|
569
|
+
# Match patterns like "pcd_1.ext", "point_cloud_1.ext", etc.
|
|
570
|
+
for file in all_files:
|
|
571
|
+
if base_name in file.stem and f'_{index}' in file.stem:
|
|
572
|
+
matching_files.append(file)
|
|
573
|
+
else:
|
|
574
|
+
# Simple match - just find files containing the pattern
|
|
575
|
+
for file in all_files:
|
|
576
|
+
if file_name in file.stem:
|
|
577
|
+
matching_files.append(file)
|
|
578
|
+
|
|
579
|
+
# Process matching files
|
|
580
|
+
if matching_files:
|
|
581
|
+
# Sort by modification time (newest first) if multiple files match
|
|
582
|
+
if len(matching_files) > 1:
|
|
583
|
+
matching_files.sort(key=lambda f: f.stat().st_mtime, reverse=True)
|
|
584
|
+
self.run.log_message(
|
|
585
|
+
f"Multiple files match '{file_name}'. Using most recent: {matching_files[0].name}"
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
files_dict[file_name] = matching_files[0]
|
|
589
|
+
self.run.log_message(f"Mapped '{file_name}' to: {matching_files[0].name}")
|
|
590
|
+
elif is_required:
|
|
591
|
+
self.run.log_message(f"Required file '{file_name}' not found.", context=Context.WARNING.value)
|
|
592
|
+
|
|
593
|
+
return files_dict
|
|
@@ -3,42 +3,38 @@ from typing import List
|
|
|
3
3
|
|
|
4
4
|
|
|
5
5
|
class Uploader:
|
|
6
|
-
"""Plugin upload action
|
|
6
|
+
"""Plugin upload action interface for organizing files.
|
|
7
7
|
|
|
8
|
-
|
|
8
|
+
This class provides a minimal interface for plugin developers to implement
|
|
9
|
+
their own file organization logic.
|
|
9
10
|
"""
|
|
10
11
|
|
|
11
|
-
def __init__(self, run, path: Path,
|
|
12
|
+
def __init__(self, run, path: Path, file_specification: List = None, organized_files: List = None):
|
|
12
13
|
"""Initialize the plugin upload action class.
|
|
13
14
|
|
|
14
15
|
Args:
|
|
15
|
-
run: Plugin run object.
|
|
16
|
-
path:
|
|
16
|
+
run: Plugin run object with logging capabilities.
|
|
17
|
+
path: Path object pointing to the upload target directory.
|
|
18
|
+
file_specification: List of specifications that define the structure of files to be uploaded.
|
|
19
|
+
Each specification contains details like file name, type, and requirements.
|
|
17
20
|
"""
|
|
18
21
|
self.run = run
|
|
19
22
|
self.path = path
|
|
23
|
+
self.file_specification = file_specification
|
|
24
|
+
self.organized_files = organized_files
|
|
20
25
|
|
|
21
26
|
def handle_upload_files(self) -> List:
|
|
22
|
-
"""
|
|
27
|
+
"""Customize the organization of files for upload.
|
|
23
28
|
|
|
24
|
-
|
|
25
|
-
|
|
29
|
+
This method provides a hook for plugin developers to modify the default file organization.
|
|
30
|
+
You can override this method to filter files, transform data, or add custom metadata
|
|
31
|
+
based on your specific requirements.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
organized_files (List): The default organized files structure.
|
|
35
|
+
Each item is a dictionary with 'files' and 'meta' keys.
|
|
26
36
|
|
|
27
37
|
Returns:
|
|
28
|
-
List:
|
|
29
|
-
|
|
30
|
-
Examples:
|
|
31
|
-
[
|
|
32
|
-
{
|
|
33
|
-
"files": {
|
|
34
|
-
'image_1': image_1_pathlib_object,
|
|
35
|
-
'image_2': image_2_pathlib_object,
|
|
36
|
-
'meta_1': meta_1_pathlib_object,
|
|
37
|
-
},
|
|
38
|
-
"meta": {
|
|
39
|
-
"key": "value"
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
]
|
|
38
|
+
List: The modified list of organized files to be uploaded.
|
|
43
39
|
"""
|
|
44
|
-
return
|
|
40
|
+
return self.organized_files
|
|
@@ -31,7 +31,7 @@ synapse_sdk/clients/agent/service.py,sha256=s7KuPK_DB1nr2VHrigttV1WyFonaGHNrPvU8
|
|
|
31
31
|
synapse_sdk/clients/backend/__init__.py,sha256=Fiehino2n3voaHTdpJHXSY7K_CDnMkQeokapbgeoTBk,1187
|
|
32
32
|
synapse_sdk/clients/backend/annotation.py,sha256=f4jS4qlXH7M7mQ3EuCq-NrjJ_hJNDz8pEFAYqf-e008,996
|
|
33
33
|
synapse_sdk/clients/backend/core.py,sha256=5XAOdo6JZ0drfk-FMPJ96SeTd9oja-VnTwzGXdvK7Bg,1027
|
|
34
|
-
synapse_sdk/clients/backend/dataset.py,sha256=
|
|
34
|
+
synapse_sdk/clients/backend/dataset.py,sha256=eQ0O43Ck91z5Om7mb-vW_P5NIaX1OZKirjGs-WQHdM4,3480
|
|
35
35
|
synapse_sdk/clients/backend/hitl.py,sha256=na2mSXFud92p4zUEuagcDWk2klxO7xn-e86cm0VZEvs,709
|
|
36
36
|
synapse_sdk/clients/backend/integration.py,sha256=9LjkYcBpi7aog-MODSDS4RlmYahypu65qxBj-AcY7xc,2683
|
|
37
37
|
synapse_sdk/clients/backend/ml.py,sha256=JoPH9Ly2E3HJ7S5mdGLtcGq7ruQVVrYfWArogwZLlms,1193
|
|
@@ -100,10 +100,10 @@ synapse_sdk/plugins/categories/smart_tool/templates/plugin/__init__.py,sha256=47
|
|
|
100
100
|
synapse_sdk/plugins/categories/smart_tool/templates/plugin/auto_label.py,sha256=eevNg0nOcYFR4z_L_R-sCvVOYoLWSAH1jwDkAf3YCjY,320
|
|
101
101
|
synapse_sdk/plugins/categories/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
102
102
|
synapse_sdk/plugins/categories/upload/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
103
|
-
synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=
|
|
103
|
+
synapse_sdk/plugins/categories/upload/actions/upload.py,sha256=L9OqHWNyzO5qXi9-afkgRI1hfL7ysJjY0z0a5kujJrQ,24202
|
|
104
104
|
synapse_sdk/plugins/categories/upload/templates/config.yaml,sha256=kwHNWHFYbzDi1mEh40KozatPZbZGH44dlP0t0J7ejJw,483
|
|
105
105
|
synapse_sdk/plugins/categories/upload/templates/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
106
|
-
synapse_sdk/plugins/categories/upload/templates/plugin/upload.py,sha256=
|
|
106
|
+
synapse_sdk/plugins/categories/upload/templates/plugin/upload.py,sha256=IZU4sdSMSLKPCtlNqF7DP2howTdYR6hr74HCUZsGdPk,1559
|
|
107
107
|
synapse_sdk/plugins/templates/cookiecutter.json,sha256=NxOWk9A_v1pO0Ny4IYT9Cj5iiJ16--cIQrGC67QdR0I,396
|
|
108
108
|
synapse_sdk/plugins/templates/hooks/post_gen_project.py,sha256=jqlYkY1O2TxIR-Vh3gnwILYy8k-D39Xx66d2KNQVMCs,147
|
|
109
109
|
synapse_sdk/plugins/templates/hooks/pre_prompt.py,sha256=aOAMM623s0sKFGjTZaotAOYFvsNMxeii4tPyhOAFKVE,539
|
|
@@ -134,9 +134,9 @@ synapse_sdk/utils/storage/providers/__init__.py,sha256=x7RGwZryT2FpVxS7fGWryRVpq
|
|
|
134
134
|
synapse_sdk/utils/storage/providers/gcp.py,sha256=i2BQCu1Kej1If9SuNr2_lEyTcr5M_ncGITZrL0u5wEA,363
|
|
135
135
|
synapse_sdk/utils/storage/providers/s3.py,sha256=W94rQvhGRXti3R4mYP7gmU5pcyCQpGFIBLvxxqLVdRM,2231
|
|
136
136
|
synapse_sdk/utils/storage/providers/sftp.py,sha256=_8s9hf0JXIO21gvm-JVS00FbLsbtvly4c-ETLRax68A,1426
|
|
137
|
-
synapse_sdk-1.0.
|
|
138
|
-
synapse_sdk-1.0.
|
|
139
|
-
synapse_sdk-1.0.
|
|
140
|
-
synapse_sdk-1.0.
|
|
141
|
-
synapse_sdk-1.0.
|
|
142
|
-
synapse_sdk-1.0.
|
|
137
|
+
synapse_sdk-1.0.0a55.dist-info/licenses/LICENSE,sha256=bKzmC5YAg4V1Fhl8OO_tqY8j62hgdncAkN7VrdjmrGk,1101
|
|
138
|
+
synapse_sdk-1.0.0a55.dist-info/METADATA,sha256=d2fJLQn1LuXa0NeLokHzsjeE8zgIyOJrOH3bRBUoIw0,1303
|
|
139
|
+
synapse_sdk-1.0.0a55.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
140
|
+
synapse_sdk-1.0.0a55.dist-info/entry_points.txt,sha256=VNptJoGoNJI8yLXfBmhgUefMsmGI0m3-0YoMvrOgbxo,48
|
|
141
|
+
synapse_sdk-1.0.0a55.dist-info/top_level.txt,sha256=ytgJMRK1slVOKUpgcw3LEyHHP7S34J6n_gJzdkcSsw8,12
|
|
142
|
+
synapse_sdk-1.0.0a55.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|