synapse-sdk 1.0.0b17__py3-none-any.whl → 1.0.0b19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

Files changed (25) hide show
  1. synapse_sdk/clients/backend/data_collection.py +2 -2
  2. synapse_sdk/devtools/docs/docs/contributing.md +1 -1
  3. synapse_sdk/devtools/docs/docs/features/index.md +4 -4
  4. synapse_sdk/devtools/docs/docs/plugins/export-plugins.md +786 -0
  5. synapse_sdk/devtools/docs/docs/{features/plugins/index.md → plugins/plugins.md} +352 -21
  6. synapse_sdk/devtools/docs/docusaurus.config.ts +8 -0
  7. synapse_sdk/devtools/docs/i18n/ko/docusaurus-plugin-content-docs/current/plugins/export-plugins.md +788 -0
  8. synapse_sdk/devtools/docs/i18n/ko/docusaurus-plugin-content-docs/current/plugins/plugins.md +71 -0
  9. synapse_sdk/devtools/docs/package-lock.json +1366 -37
  10. synapse_sdk/devtools/docs/package.json +2 -1
  11. synapse_sdk/devtools/docs/sidebars.ts +8 -1
  12. synapse_sdk/plugins/categories/export/actions/export.py +2 -1
  13. synapse_sdk/plugins/categories/export/templates/config.yaml +1 -1
  14. synapse_sdk/plugins/categories/export/templates/plugin/__init__.py +376 -0
  15. synapse_sdk/plugins/categories/export/templates/plugin/export.py +56 -190
  16. synapse_sdk/plugins/categories/upload/actions/upload.py +181 -22
  17. synapse_sdk/plugins/categories/upload/templates/config.yaml +24 -2
  18. synapse_sdk/plugins/categories/upload/templates/plugin/upload.py +9 -2
  19. {synapse_sdk-1.0.0b17.dist-info → synapse_sdk-1.0.0b19.dist-info}/METADATA +1 -1
  20. {synapse_sdk-1.0.0b17.dist-info → synapse_sdk-1.0.0b19.dist-info}/RECORD +24 -22
  21. synapse_sdk/devtools/docs/i18n/ko/docusaurus-plugin-content-docs/current/features/plugins/index.md +0 -30
  22. {synapse_sdk-1.0.0b17.dist-info → synapse_sdk-1.0.0b19.dist-info}/WHEEL +0 -0
  23. {synapse_sdk-1.0.0b17.dist-info → synapse_sdk-1.0.0b19.dist-info}/entry_points.txt +0 -0
  24. {synapse_sdk-1.0.0b17.dist-info → synapse_sdk-1.0.0b19.dist-info}/licenses/LICENSE +0 -0
  25. {synapse_sdk-1.0.0b17.dist-info → synapse_sdk-1.0.0b19.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  id: plugins
3
3
  title: Plugin System
4
- sidebar_position: 2
4
+ sidebar_position: 1
5
5
  ---
6
6
 
7
7
  # Plugin System
@@ -30,6 +30,7 @@ The SDK organizes plugins into specific categories, each designed for different
30
30
  ML model training, inference, and deployment operations.
31
31
 
32
32
  **Available Actions:**
33
+
33
34
  - `deployment` - Deploy models to production environments
34
35
  - `gradio` - Create interactive web interfaces for models
35
36
  - `inference` - Run model predictions on data
@@ -38,6 +39,7 @@ ML model training, inference, and deployment operations.
38
39
  - `tune` - Hyperparameter optimization and model tuning
39
40
 
40
41
  **Use Cases:**
42
+
41
43
  - Training computer vision models
42
44
  - Deploying models as web services
43
45
  - Running batch inference on datasets
@@ -45,24 +47,38 @@ ML model training, inference, and deployment operations.
45
47
 
46
48
  ### 2. Export (`export`)
47
49
 
48
- Data export and transformation operations.
50
+ Data export and transformation operations for exporting annotated data, ground truth datasets, assignments, and tasks from the Synapse platform.
49
51
 
50
52
  **Available Actions:**
51
- - `export` - Export data in various formats and destinations
53
+
54
+ - `export` - Export data from various sources (assignments, ground truth, tasks) with customizable processing
52
55
 
53
56
  **Use Cases:**
54
- - Converting datasets to different formats
55
- - Exporting processed data to cloud storage
57
+
58
+ - Exporting annotated datasets for training
59
+ - Converting ground truth data to custom formats
56
60
  - Creating data packages for distribution
61
+ - Batch processing of assignment results
62
+ - Transforming annotation data for external tools
63
+
64
+ **Supported Export Targets:**
65
+
66
+ - `assignment` - Export assignment data with annotations
67
+ - `ground_truth` - Export ground truth dataset versions
68
+ - `task` - Export task data with associated annotations
69
+
70
+ For detailed information about export plugins, BaseExporter class architecture, implementation examples, and best practices, see the [Export Plugins](./export-plugins) documentation.
57
71
 
58
72
  ### 3. Upload (`upload`)
59
73
 
60
74
  File and data upload functionality with support for various storage backends.
61
75
 
62
76
  **Available Actions:**
77
+
63
78
  - `upload` - Upload files to storage providers
64
79
 
65
80
  **Use Cases:**
81
+
66
82
  - Uploading datasets to cloud storage
67
83
  - Backing up processed data
68
84
  - Sharing data between team members
@@ -72,9 +88,11 @@ File and data upload functionality with support for various storage backends.
72
88
  Intelligent automation tools powered by AI.
73
89
 
74
90
  **Available Actions:**
91
+
75
92
  - `auto_label` - Automated data labeling and annotation
76
93
 
77
94
  **Use Cases:**
95
+
78
96
  - Pre-labeling datasets with AI models
79
97
  - Quality assurance for manual annotations
80
98
  - Accelerating annotation workflows
@@ -84,10 +102,12 @@ Intelligent automation tools powered by AI.
84
102
  Data preparation and processing before annotation.
85
103
 
86
104
  **Available Actions:**
105
+
87
106
  - `pre_annotation` - Prepare data for annotation workflows
88
107
  - `to_task` - Convert data to annotation tasks
89
108
 
90
109
  **Use Cases:**
110
+
91
111
  - Data preprocessing and filtering
92
112
  - Creating annotation tasks from raw data
93
113
  - Setting up annotation workflows
@@ -97,9 +117,11 @@ Data preparation and processing before annotation.
97
117
  Data processing and validation after annotation.
98
118
 
99
119
  **Available Actions:**
120
+
100
121
  - `post_annotation` - Process completed annotations
101
122
 
102
123
  **Use Cases:**
124
+
103
125
  - Validating annotation quality
104
126
  - Post-processing annotated data
105
127
  - Generating training datasets from annotations
@@ -109,9 +131,11 @@ Data processing and validation after annotation.
109
131
  Data quality checks and validation operations.
110
132
 
111
133
  **Available Actions:**
134
+
112
135
  - `validation` - Perform data quality and integrity checks
113
136
 
114
137
  **Use Cases:**
138
+
115
139
  - Validating dataset integrity
116
140
  - Checking annotation consistency
117
141
  - Quality assurance workflows
@@ -151,11 +175,13 @@ Plugins support three different execution methods depending on the use case:
151
175
  #### Plugin Models
152
176
 
153
177
  **PluginRelease Class** (`synapse_sdk/plugins/models.py:14`)
178
+
154
179
  - Manages plugin metadata and configuration
155
180
  - Handles versioning and checksums
156
181
  - Provides runtime environment setup
157
182
 
158
183
  **Run Class** (`synapse_sdk/plugins/models.py:98`)
184
+
159
185
  - Manages plugin execution instances
160
186
  - Provides logging and progress tracking
161
187
  - Handles backend communication
@@ -163,6 +189,7 @@ Plugins support three different execution methods depending on the use case:
163
189
  #### Action Base Class
164
190
 
165
191
  **Action Class** (`synapse_sdk/plugins/categories/base.py:19`)
192
+
166
193
  - Unified interface for all plugin actions
167
194
  - Parameter validation with Pydantic models
168
195
  - Built-in logging and error handling
@@ -171,6 +198,7 @@ Plugins support three different execution methods depending on the use case:
171
198
  #### Template System
172
199
 
173
200
  **Cookiecutter Templates** (`synapse_sdk/plugins/templates/`)
201
+
174
202
  - Standardized plugin scaffolding
175
203
  - Category-specific templates
176
204
  - Automated project setup with proper structure
@@ -202,7 +230,7 @@ category: "neural_net"
202
230
  description: "A custom ML plugin"
203
231
 
204
232
  # Package management
205
- package_manager: "pip" # or "uv"
233
+ package_manager: "pip" # or "uv"
206
234
 
207
235
  # Action definitions
208
236
  actions:
@@ -225,6 +253,7 @@ synapse plugin create
225
253
  ```
226
254
 
227
255
  This will prompt for:
256
+
228
257
  - Plugin code (unique identifier)
229
258
  - Plugin name and description
230
259
  - Category selection
@@ -247,30 +276,263 @@ class TrainParams(BaseModel):
247
276
  class TrainAction(BaseTrainAction):
248
277
  name = "train"
249
278
  params_model = TrainParams
250
-
279
+
251
280
  def start(self):
252
281
  # Access validated parameters
253
282
  dataset_path = self.params['dataset_path']
254
283
  epochs = self.params['epochs']
255
-
284
+
256
285
  # Log progress
257
286
  self.run.log_message("Starting training...")
258
-
287
+
259
288
  # Your training logic here
260
289
  for epoch in range(epochs):
261
290
  # Update progress
262
291
  self.run.set_progress(epoch + 1, epochs, "training")
263
-
292
+
264
293
  # Training step
265
294
  loss = train_epoch(dataset_path)
266
-
295
+
267
296
  # Log metrics
268
297
  self.run.set_metrics({"loss": loss}, "training")
269
-
298
+
270
299
  self.run.log_message("Training completed!")
271
300
  return {"status": "success", "final_loss": loss}
272
301
  ```
273
302
 
303
+ #### Creating Export Plugins
304
+
305
+ Export plugins now use the BaseExporter class-based approach for better organization and reusability. Here's how to create a custom export plugin:
306
+
307
+ **Step 1: Generate Export Plugin Template**
308
+
309
+ ```bash
310
+ synapse plugin create
311
+ # Select 'export' as category
312
+ # Plugin will be created with export template
313
+ ```
314
+
315
+ **Step 2: Customize Export Parameters**
316
+
317
+ The `ExportParams` model defines the required parameters:
318
+
319
+ ```python
320
+ from synapse_sdk.plugins.categories.export.actions.export import ExportParams
321
+ from pydantic import BaseModel
322
+ from typing import Literal
323
+
324
+ class CustomExportParams(ExportParams):
325
+ # Add custom parameters
326
+ output_format: Literal['json', 'csv', 'xml'] = 'json'
327
+ include_metadata: bool = True
328
+ compression: bool = False
329
+ ```
330
+
331
+ **Step 3: Implement Data Transformation**
332
+
333
+ Implement the required methods in your `Exporter` class in `plugin/export.py`:
334
+
335
+ ```python
336
+ from datetime import datetime
337
+ from synapse_sdk.plugins.categories.export.templates.plugin import BaseExporter
338
+
339
+ class Exporter(BaseExporter):
340
+ """Custom export plugin with COCO format conversion."""
341
+
342
+ def convert_data(self, data):
343
+ """Convert annotation data to your desired format."""
344
+ # Example: Convert to COCO format
345
+ if data.get('data_type') == 'image_detection':
346
+ return self.convert_to_coco_format(data)
347
+ elif data.get('data_type') == 'image_classification':
348
+ return self.convert_to_classification_format(data)
349
+ return data
350
+
351
+ def before_convert(self, export_item):
352
+ """Preprocess data before conversion."""
353
+ # Add validation, filtering, or preprocessing
354
+ if not export_item.get('data'):
355
+ return None # Skip empty items
356
+
357
+ # Add custom metadata
358
+ export_item['processed_at'] = datetime.now().isoformat()
359
+ return export_item
360
+
361
+ def after_convert(self, converted_data):
362
+ """Post-process converted data."""
363
+ # Add final touches, validation, or formatting
364
+ if 'annotations' in converted_data:
365
+ converted_data['annotation_count'] = len(converted_data['annotations'])
366
+ return converted_data
367
+
368
+ def convert_to_coco_format(self, data):
369
+ """Example: Convert to COCO detection format."""
370
+ coco_data = {
371
+ "images": [],
372
+ "annotations": [],
373
+ "categories": []
374
+ }
375
+
376
+ # Transform annotation data to COCO format
377
+ for annotation in data.get('annotations', []):
378
+ coco_annotation = {
379
+ "id": annotation['id'],
380
+ "image_id": annotation['image_id'],
381
+ "category_id": annotation['category_id'],
382
+ "bbox": annotation['bbox'],
383
+ "area": annotation.get('area', 0),
384
+ "iscrowd": 0
385
+ }
386
+ coco_data["annotations"].append(coco_annotation)
387
+
388
+ return coco_data
389
+ ```
390
+
391
+ **Step 4: Configure Export Targets**
392
+
393
+ The export action supports different data sources:
394
+
395
+ ```python
396
+ # Filter examples for different targets
397
+ filters = {
398
+ # For ground truth export
399
+ "ground_truth": {
400
+ "ground_truth_dataset_version": 123,
401
+ "expand": ["data"]
402
+ },
403
+
404
+ # For assignment export
405
+ "assignment": {
406
+ "project": 456,
407
+ "status": "completed",
408
+ "expand": ["data"]
409
+ },
410
+
411
+ # For task export
412
+ "task": {
413
+ "project": 456,
414
+ "assignment": 789,
415
+ "expand": ["data_unit", "assignment"]
416
+ }
417
+ }
418
+ ```
419
+
420
+ **Step 5: Handle File Operations**
421
+
422
+ Customize file saving and organization by overriding BaseExporter methods:
423
+
424
+ ```python
425
+ import json
426
+ from pathlib import Path
427
+ from synapse_sdk.plugins.categories.export.enums import ExportStatus
428
+
429
+ class Exporter(BaseExporter):
430
+ """Custom export plugin with multiple format support."""
431
+
432
+ def save_as_json(self, result, base_path, error_file_list):
433
+ """Custom JSON saving with different formats."""
434
+ file_name = Path(self.get_original_file_name(result['files'])).stem
435
+
436
+ # Choose output format based on params
437
+ if self.params.get('output_format') == 'csv':
438
+ return self.save_as_csv(result, base_path, error_file_list)
439
+ elif self.params.get('output_format') == 'xml':
440
+ return self.save_as_xml(result, base_path, error_file_list)
441
+
442
+ # Default JSON handling
443
+ json_data = result['data']
444
+ file_info = {'file_name': f'{file_name}.json'}
445
+
446
+ try:
447
+ with (base_path / f'{file_name}.json').open('w', encoding='utf-8') as f:
448
+ json.dump(json_data, f, indent=4, ensure_ascii=False)
449
+ status = ExportStatus.SUCCESS
450
+ except Exception as e:
451
+ error_file_list.append([f'{file_name}.json', str(e)])
452
+ status = ExportStatus.FAILED
453
+
454
+ self.run.export_log_json_file(result['id'], file_info, status)
455
+ return status
456
+
457
+ def setup_output_directories(self, unique_export_path, save_original_file_flag):
458
+ """Custom directory structure."""
459
+ # Create format-specific directories
460
+ output_paths = super().setup_output_directories(unique_export_path, save_original_file_flag)
461
+
462
+ # Add custom directories based on output format
463
+ format_dir = unique_export_path / self.params.get('output_format', 'json')
464
+ format_dir.mkdir(parents=True, exist_ok=True)
465
+ output_paths['format_output_path'] = format_dir
466
+
467
+ return output_paths
468
+ ```
469
+
470
+ **Step 6: Usage Examples**
471
+
472
+ Running export plugins with different configurations:
473
+
474
+ ```bash
475
+ # Basic export of ground truth data
476
+ synapse plugin run export '{
477
+ "name": "my_export",
478
+ "storage": 1,
479
+ "target": "ground_truth",
480
+ "filter": {"ground_truth_dataset_version": 123},
481
+ "path": "exports/ground_truth",
482
+ "save_original_file": true
483
+ }' --plugin my-export-plugin
484
+
485
+ # Export assignments with custom parameters
486
+ synapse plugin run export '{
487
+ "name": "assignment_export",
488
+ "storage": 1,
489
+ "target": "assignment",
490
+ "filter": {"project": 456, "status": "completed"},
491
+ "path": "exports/assignments",
492
+ "save_original_file": false,
493
+ "extra_params": {
494
+ "output_format": "coco",
495
+ "include_metadata": true
496
+ }
497
+ }' --plugin custom-coco-export
498
+ ```
499
+
500
+ **Common Export Patterns:**
501
+
502
+ ```python
503
+ # Pattern 1: Format-specific conversion
504
+ class Exporter(BaseExporter):
505
+ def convert_data(self, data):
506
+ """Convert to YOLO format."""
507
+ if data.get('task_type') == 'object_detection':
508
+ return self.convert_to_yolo_format(data)
509
+ return data
510
+
511
+ # Pattern 2: Conditional file organization
512
+ class Exporter(BaseExporter):
513
+ def setup_output_directories(self, unique_export_path, save_original_file_flag):
514
+ # Call parent method
515
+ output_paths = super().setup_output_directories(unique_export_path, save_original_file_flag)
516
+
517
+ # Create separate folders by category
518
+ for category in ['train', 'val', 'test']:
519
+ category_path = unique_export_path / category
520
+ category_path.mkdir(parents=True, exist_ok=True)
521
+ output_paths[f'{category}_path'] = category_path
522
+
523
+ return output_paths
524
+
525
+ # Pattern 3: Batch processing with validation
526
+ class Exporter(BaseExporter):
527
+ def before_convert(self, export_item):
528
+ # Validate required fields
529
+ required_fields = ['data', 'files', 'id']
530
+ for field in required_fields:
531
+ if field not in export_item:
532
+ raise ValueError(f"Missing required field: {field}")
533
+ return export_item
534
+ ```
535
+
274
536
  ### 3. Configure Actions
275
537
 
276
538
  Define actions in `config.yaml`:
@@ -281,6 +543,12 @@ actions:
281
543
  entrypoint: "plugin.train.TrainAction"
282
544
  method: "job"
283
545
  description: "Train a neural network model"
546
+
547
+ # Export plugin configuration
548
+ export:
549
+ entrypoint: "plugin.export.Exporter"
550
+ method: "job"
551
+ description: "Export and transform annotation data"
284
552
  ```
285
553
 
286
554
  ### 4. Package and Publish
@@ -373,10 +641,10 @@ synapse plugin run action-name --job-id production-job
373
641
  class MyAction(Action):
374
642
  progress_categories = {
375
643
  "preprocessing": "Data preprocessing",
376
- "training": "Model training",
644
+ "training": "Model training",
377
645
  "validation": "Model validation"
378
646
  }
379
-
647
+
380
648
  def start(self):
381
649
  # Update different progress categories
382
650
  self.run.set_progress(50, 100, "preprocessing")
@@ -400,13 +668,13 @@ def start(self):
400
668
  ```python
401
669
  def get_runtime_env(self):
402
670
  env = super().get_runtime_env()
403
-
671
+
404
672
  # Add custom environment variables
405
673
  env['env_vars']['CUSTOM_VAR'] = 'value'
406
-
674
+
407
675
  # Add additional packages
408
676
  env['pip']['packages'].append('custom-package==1.0.0')
409
-
677
+
410
678
  return env
411
679
  ```
412
680
 
@@ -420,7 +688,7 @@ class TrainParams(BaseModel):
420
688
  model_type: Literal["cnn", "transformer", "resnet"]
421
689
  dataset_path: str
422
690
  batch_size: int = 32
423
-
691
+
424
692
  @validator('batch_size')
425
693
  def validate_batch_size(cls, v):
426
694
  if v <= 0 or v > 512:
@@ -451,7 +719,70 @@ class TrainParams(BaseModel):
451
719
  - **Parameter Validation**: Test edge cases and error conditions
452
720
  - **Performance Tests**: Validate execution time and resource usage
453
721
 
454
- ### 4. Security
722
+ ### 4. Export Plugin Best Practices
723
+
724
+ #### Data Processing
725
+
726
+ - **Memory Efficiency**: Use generators for processing large datasets
727
+ - **Error Recovery**: Implement graceful error handling for individual items
728
+ - **Progress Reporting**: Update progress regularly for long-running exports
729
+ - **Data Validation**: Validate data structure before conversion
730
+
731
+ ```python
732
+ class Exporter(BaseExporter):
733
+ def export(self, export_items=None, results=None, **kwargs):
734
+ """Override the main export method for custom processing."""
735
+ # Use tee to count items without consuming generator
736
+ items_to_process = export_items if export_items is not None else self.export_items
737
+ export_items_count, export_items_process = tee(items_to_process)
738
+ total = sum(1 for _ in export_items_count)
739
+
740
+ # Custom processing with error handling
741
+ for no, export_item in enumerate(export_items_process, start=1):
742
+ try:
743
+ # Use the built-in data conversion pipeline
744
+ processed_item = self.process_data_conversion(export_item)
745
+ self.run.set_progress(no, total, category='dataset_conversion')
746
+ except Exception as e:
747
+ self.run.log_message(f"Error processing item {no}: {str(e)}", "ERROR")
748
+ continue
749
+
750
+ # Call parent's export method for standard processing
751
+ # or implement your own complete workflow
752
+ return super().export(export_items, results, **kwargs)
753
+ ```
754
+
755
+ #### File Management
756
+
757
+ - **Unique Paths**: Prevent file collisions with timestamp or counter suffixes
758
+ - **Directory Structure**: Organize output files logically
759
+ - **Error Logging**: Track failed files for debugging
760
+ - **Cleanup**: Remove temporary files on completion
761
+
762
+ ```python
763
+ class Exporter(BaseExporter):
764
+ def setup_output_directories(self, unique_export_path, save_original_file_flag):
765
+ """Create unique export directory structure."""
766
+ # BaseExporter already handles unique path creation via _create_unique_export_path
767
+ # This method sets up the internal directory structure
768
+ output_paths = super().setup_output_directories(unique_export_path, save_original_file_flag)
769
+
770
+ # Add custom subdirectories as needed
771
+ custom_dir = unique_export_path / 'custom_output'
772
+ custom_dir.mkdir(parents=True, exist_ok=True)
773
+ output_paths['custom_output_path'] = custom_dir
774
+
775
+ return output_paths
776
+ ```
777
+
778
+ #### Format Conversion
779
+
780
+ - **Flexible Templates**: Design templates that work with multiple data types
781
+ - **Schema Validation**: Validate output against expected schemas
782
+ - **Metadata Preservation**: Maintain important metadata during conversion
783
+ - **Version Compatibility**: Handle different data schema versions
784
+
785
+ ### 5. Security
455
786
 
456
787
  - **Input Validation**: Validate all parameters and inputs
457
788
  - **File Access**: Restrict file system access appropriately
@@ -465,7 +796,7 @@ class TrainParams(BaseModel):
465
796
  ```python
466
797
  # In your action
467
798
  self.run.log_message("Processing started", "INFO")
468
- self.run.log_message("Warning: low memory", "WARNING")
799
+ self.run.log_message("Warning: low memory", "WARNING")
469
800
  self.run.log_message("Error occurred", "ERROR")
470
801
 
471
802
  # With structured data
@@ -506,4 +837,4 @@ self.run.set_metrics({
506
837
  }, "performance")
507
838
  ```
508
839
 
509
- The plugin system provides a powerful foundation for building scalable, distributed ML workflows. By following the established patterns and best practices, you can create robust plugins that integrate seamlessly with the Synapse ecosystem.
840
+ The plugin system provides a powerful foundation for building scalable, distributed ML workflows. By following the established patterns and best practices, you can create robust plugins that integrate seamlessly with the Synapse ecosystem.
@@ -57,6 +57,11 @@ const config: Config = {
57
57
  ],
58
58
  ],
59
59
 
60
+ themes: ['@docusaurus/theme-mermaid'],
61
+ markdown: {
62
+ mermaid: true,
63
+ },
64
+
60
65
 
61
66
 
62
67
  themeConfig: {
@@ -134,6 +139,9 @@ const config: Config = {
134
139
  darkTheme: prismThemes.dracula,
135
140
  additionalLanguages: ['python', 'bash'],
136
141
  },
142
+ mermaid: {
143
+ theme: {light: 'neutral', dark: 'dark'},
144
+ },
137
145
  } satisfies Preset.ThemeConfig,
138
146
  };
139
147