synapse-sdk 1.0.0b18__py3-none-any.whl → 1.0.0b20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of synapse-sdk might be problematic. Click here for more details.

Files changed (28) hide show
  1. synapse_sdk/devtools/docs/docs/contributing.md +1 -1
  2. synapse_sdk/devtools/docs/docs/features/index.md +4 -4
  3. synapse_sdk/devtools/docs/docs/plugins/export-plugins.md +786 -0
  4. synapse_sdk/devtools/docs/docs/{features/plugins/index.md → plugins/plugins.md} +357 -21
  5. synapse_sdk/devtools/docs/docusaurus.config.ts +8 -0
  6. synapse_sdk/devtools/docs/i18n/ko/docusaurus-plugin-content-docs/current/plugins/export-plugins.md +788 -0
  7. synapse_sdk/devtools/docs/i18n/ko/docusaurus-plugin-content-docs/current/plugins/plugins.md +71 -0
  8. synapse_sdk/devtools/docs/package-lock.json +1366 -37
  9. synapse_sdk/devtools/docs/package.json +2 -1
  10. synapse_sdk/devtools/docs/sidebars.ts +8 -1
  11. synapse_sdk/plugins/categories/base.py +28 -2
  12. synapse_sdk/plugins/categories/export/actions/export.py +2 -1
  13. synapse_sdk/plugins/categories/export/templates/config.yaml +1 -1
  14. synapse_sdk/plugins/categories/export/templates/plugin/__init__.py +375 -0
  15. synapse_sdk/plugins/categories/export/templates/plugin/export.py +56 -190
  16. synapse_sdk/plugins/categories/upload/actions/upload.py +181 -22
  17. synapse_sdk/plugins/categories/upload/templates/config.yaml +24 -2
  18. synapse_sdk/plugins/categories/upload/templates/plugin/upload.py +8 -2
  19. synapse_sdk/plugins/models.py +28 -2
  20. synapse_sdk/plugins/templates/plugin-config-schema.json +7 -0
  21. synapse_sdk/plugins/templates/schema.json +7 -0
  22. {synapse_sdk-1.0.0b18.dist-info → synapse_sdk-1.0.0b20.dist-info}/METADATA +1 -1
  23. {synapse_sdk-1.0.0b18.dist-info → synapse_sdk-1.0.0b20.dist-info}/RECORD +27 -25
  24. synapse_sdk/devtools/docs/i18n/ko/docusaurus-plugin-content-docs/current/features/plugins/index.md +0 -30
  25. {synapse_sdk-1.0.0b18.dist-info → synapse_sdk-1.0.0b20.dist-info}/WHEEL +0 -0
  26. {synapse_sdk-1.0.0b18.dist-info → synapse_sdk-1.0.0b20.dist-info}/entry_points.txt +0 -0
  27. {synapse_sdk-1.0.0b18.dist-info → synapse_sdk-1.0.0b20.dist-info}/licenses/LICENSE +0 -0
  28. {synapse_sdk-1.0.0b18.dist-info → synapse_sdk-1.0.0b20.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  id: plugins
3
3
  title: Plugin System
4
- sidebar_position: 2
4
+ sidebar_position: 1
5
5
  ---
6
6
 
7
7
  # Plugin System
@@ -30,6 +30,7 @@ The SDK organizes plugins into specific categories, each designed for different
30
30
  ML model training, inference, and deployment operations.
31
31
 
32
32
  **Available Actions:**
33
+
33
34
  - `deployment` - Deploy models to production environments
34
35
  - `gradio` - Create interactive web interfaces for models
35
36
  - `inference` - Run model predictions on data
@@ -38,6 +39,7 @@ ML model training, inference, and deployment operations.
38
39
  - `tune` - Hyperparameter optimization and model tuning
39
40
 
40
41
  **Use Cases:**
42
+
41
43
  - Training computer vision models
42
44
  - Deploying models as web services
43
45
  - Running batch inference on datasets
@@ -45,24 +47,38 @@ ML model training, inference, and deployment operations.
45
47
 
46
48
  ### 2. Export (`export`)
47
49
 
48
- Data export and transformation operations.
50
+ Data export and transformation operations for exporting annotated data, ground truth datasets, assignments, and tasks from the Synapse platform.
49
51
 
50
52
  **Available Actions:**
51
- - `export` - Export data in various formats and destinations
53
+
54
+ - `export` - Export data from various sources (assignments, ground truth, tasks) with customizable processing
52
55
 
53
56
  **Use Cases:**
54
- - Converting datasets to different formats
55
- - Exporting processed data to cloud storage
57
+
58
+ - Exporting annotated datasets for training
59
+ - Converting ground truth data to custom formats
56
60
  - Creating data packages for distribution
61
+ - Batch processing of assignment results
62
+ - Transforming annotation data for external tools
63
+
64
+ **Supported Export Targets:**
65
+
66
+ - `assignment` - Export assignment data with annotations
67
+ - `ground_truth` - Export ground truth dataset versions
68
+ - `task` - Export task data with associated annotations
69
+
70
+ For detailed information about export plugins, BaseExporter class architecture, implementation examples, and best practices, see the [Export Plugins](./export-plugins) documentation.
57
71
 
58
72
  ### 3. Upload (`upload`)
59
73
 
60
74
  File and data upload functionality with support for various storage backends.
61
75
 
62
76
  **Available Actions:**
77
+
63
78
  - `upload` - Upload files to storage providers
64
79
 
65
80
  **Use Cases:**
81
+
66
82
  - Uploading datasets to cloud storage
67
83
  - Backing up processed data
68
84
  - Sharing data between team members
@@ -72,9 +88,11 @@ File and data upload functionality with support for various storage backends.
72
88
  Intelligent automation tools powered by AI.
73
89
 
74
90
  **Available Actions:**
91
+
75
92
  - `auto_label` - Automated data labeling and annotation
76
93
 
77
94
  **Use Cases:**
95
+
78
96
  - Pre-labeling datasets with AI models
79
97
  - Quality assurance for manual annotations
80
98
  - Accelerating annotation workflows
@@ -84,10 +102,12 @@ Intelligent automation tools powered by AI.
84
102
  Data preparation and processing before annotation.
85
103
 
86
104
  **Available Actions:**
105
+
87
106
  - `pre_annotation` - Prepare data for annotation workflows
88
107
  - `to_task` - Convert data to annotation tasks
89
108
 
90
109
  **Use Cases:**
110
+
91
111
  - Data preprocessing and filtering
92
112
  - Creating annotation tasks from raw data
93
113
  - Setting up annotation workflows
@@ -97,9 +117,11 @@ Data preparation and processing before annotation.
97
117
  Data processing and validation after annotation.
98
118
 
99
119
  **Available Actions:**
120
+
100
121
  - `post_annotation` - Process completed annotations
101
122
 
102
123
  **Use Cases:**
124
+
103
125
  - Validating annotation quality
104
126
  - Post-processing annotated data
105
127
  - Generating training datasets from annotations
@@ -109,9 +131,11 @@ Data processing and validation after annotation.
109
131
  Data quality checks and validation operations.
110
132
 
111
133
  **Available Actions:**
134
+
112
135
  - `validation` - Perform data quality and integrity checks
113
136
 
114
137
  **Use Cases:**
138
+
115
139
  - Validating dataset integrity
116
140
  - Checking annotation consistency
117
141
  - Quality assurance workflows
@@ -151,11 +175,13 @@ Plugins support three different execution methods depending on the use case:
151
175
  #### Plugin Models
152
176
 
153
177
  **PluginRelease Class** (`synapse_sdk/plugins/models.py:14`)
178
+
154
179
  - Manages plugin metadata and configuration
155
180
  - Handles versioning and checksums
156
181
  - Provides runtime environment setup
157
182
 
158
183
  **Run Class** (`synapse_sdk/plugins/models.py:98`)
184
+
159
185
  - Manages plugin execution instances
160
186
  - Provides logging and progress tracking
161
187
  - Handles backend communication
@@ -163,6 +189,7 @@ Plugins support three different execution methods depending on the use case:
163
189
  #### Action Base Class
164
190
 
165
191
  **Action Class** (`synapse_sdk/plugins/categories/base.py:19`)
192
+
166
193
  - Unified interface for all plugin actions
167
194
  - Parameter validation with Pydantic models
168
195
  - Built-in logging and error handling
@@ -171,6 +198,7 @@ Plugins support three different execution methods depending on the use case:
171
198
  #### Template System
172
199
 
173
200
  **Cookiecutter Templates** (`synapse_sdk/plugins/templates/`)
201
+
174
202
  - Standardized plugin scaffolding
175
203
  - Category-specific templates
176
204
  - Automated project setup with proper structure
@@ -202,7 +230,12 @@ category: "neural_net"
202
230
  description: "A custom ML plugin"
203
231
 
204
232
  # Package management
205
- package_manager: "pip" # or "uv"
233
+ package_manager: "pip" # or "uv"
234
+
235
+ # Package manager options (optional)
236
+ # For pip defaults to ['--disable-pip-version-check', '--no-cache-dir']
237
+ # For uv defaults to ['--no-cache']
238
+ package_manager_options: ["--disable-pip-version-check", "--no-cache-dir", "--quiet"]
206
239
 
207
240
  # Action definitions
208
241
  actions:
@@ -225,6 +258,7 @@ synapse plugin create
225
258
  ```
226
259
 
227
260
  This will prompt for:
261
+
228
262
  - Plugin code (unique identifier)
229
263
  - Plugin name and description
230
264
  - Category selection
@@ -247,30 +281,263 @@ class TrainParams(BaseModel):
247
281
  class TrainAction(BaseTrainAction):
248
282
  name = "train"
249
283
  params_model = TrainParams
250
-
284
+
251
285
  def start(self):
252
286
  # Access validated parameters
253
287
  dataset_path = self.params['dataset_path']
254
288
  epochs = self.params['epochs']
255
-
289
+
256
290
  # Log progress
257
291
  self.run.log_message("Starting training...")
258
-
292
+
259
293
  # Your training logic here
260
294
  for epoch in range(epochs):
261
295
  # Update progress
262
296
  self.run.set_progress(epoch + 1, epochs, "training")
263
-
297
+
264
298
  # Training step
265
299
  loss = train_epoch(dataset_path)
266
-
300
+
267
301
  # Log metrics
268
302
  self.run.set_metrics({"loss": loss}, "training")
269
-
303
+
270
304
  self.run.log_message("Training completed!")
271
305
  return {"status": "success", "final_loss": loss}
272
306
  ```
273
307
 
308
+ #### Creating Export Plugins
309
+
310
+ Export plugins now use the BaseExporter class-based approach for better organization and reusability. Here's how to create a custom export plugin:
311
+
312
+ **Step 1: Generate Export Plugin Template**
313
+
314
+ ```bash
315
+ synapse plugin create
316
+ # Select 'export' as category
317
+ # Plugin will be created with export template
318
+ ```
319
+
320
+ **Step 2: Customize Export Parameters**
321
+
322
+ The `ExportParams` model defines the required parameters:
323
+
324
+ ```python
325
+ from synapse_sdk.plugins.categories.export.actions.export import ExportParams
326
+ from pydantic import BaseModel
327
+ from typing import Literal
328
+
329
+ class CustomExportParams(ExportParams):
330
+ # Add custom parameters
331
+ output_format: Literal['json', 'csv', 'xml'] = 'json'
332
+ include_metadata: bool = True
333
+ compression: bool = False
334
+ ```
335
+
336
+ **Step 3: Implement Data Transformation**
337
+
338
+ Implement the required methods in your `Exporter` class in `plugin/export.py`:
339
+
340
+ ```python
341
+ from datetime import datetime
342
+ from synapse_sdk.plugins.categories.export.templates.plugin import BaseExporter
343
+
344
+ class Exporter(BaseExporter):
345
+ """Custom export plugin with COCO format conversion."""
346
+
347
+ def convert_data(self, data):
348
+ """Convert annotation data to your desired format."""
349
+ # Example: Convert to COCO format
350
+ if data.get('data_type') == 'image_detection':
351
+ return self.convert_to_coco_format(data)
352
+ elif data.get('data_type') == 'image_classification':
353
+ return self.convert_to_classification_format(data)
354
+ return data
355
+
356
+ def before_convert(self, export_item):
357
+ """Preprocess data before conversion."""
358
+ # Add validation, filtering, or preprocessing
359
+ if not export_item.get('data'):
360
+ return None # Skip empty items
361
+
362
+ # Add custom metadata
363
+ export_item['processed_at'] = datetime.now().isoformat()
364
+ return export_item
365
+
366
+ def after_convert(self, converted_data):
367
+ """Post-process converted data."""
368
+ # Add final touches, validation, or formatting
369
+ if 'annotations' in converted_data:
370
+ converted_data['annotation_count'] = len(converted_data['annotations'])
371
+ return converted_data
372
+
373
+ def convert_to_coco_format(self, data):
374
+ """Example: Convert to COCO detection format."""
375
+ coco_data = {
376
+ "images": [],
377
+ "annotations": [],
378
+ "categories": []
379
+ }
380
+
381
+ # Transform annotation data to COCO format
382
+ for annotation in data.get('annotations', []):
383
+ coco_annotation = {
384
+ "id": annotation['id'],
385
+ "image_id": annotation['image_id'],
386
+ "category_id": annotation['category_id'],
387
+ "bbox": annotation['bbox'],
388
+ "area": annotation.get('area', 0),
389
+ "iscrowd": 0
390
+ }
391
+ coco_data["annotations"].append(coco_annotation)
392
+
393
+ return coco_data
394
+ ```
395
+
396
+ **Step 4: Configure Export Targets**
397
+
398
+ The export action supports different data sources:
399
+
400
+ ```python
401
+ # Filter examples for different targets
402
+ filters = {
403
+ # For ground truth export
404
+ "ground_truth": {
405
+ "ground_truth_dataset_version": 123,
406
+ "expand": ["data"]
407
+ },
408
+
409
+ # For assignment export
410
+ "assignment": {
411
+ "project": 456,
412
+ "status": "completed",
413
+ "expand": ["data"]
414
+ },
415
+
416
+ # For task export
417
+ "task": {
418
+ "project": 456,
419
+ "assignment": 789,
420
+ "expand": ["data_unit", "assignment"]
421
+ }
422
+ }
423
+ ```
424
+
425
+ **Step 5: Handle File Operations**
426
+
427
+ Customize file saving and organization by overriding BaseExporter methods:
428
+
429
+ ```python
430
+ import json
431
+ from pathlib import Path
432
+ from synapse_sdk.plugins.categories.export.enums import ExportStatus
433
+
434
+ class Exporter(BaseExporter):
435
+ """Custom export plugin with multiple format support."""
436
+
437
+ def save_as_json(self, result, base_path, error_file_list):
438
+ """Custom JSON saving with different formats."""
439
+ file_name = Path(self.get_original_file_name(result['files'])).stem
440
+
441
+ # Choose output format based on params
442
+ if self.params.get('output_format') == 'csv':
443
+ return self.save_as_csv(result, base_path, error_file_list)
444
+ elif self.params.get('output_format') == 'xml':
445
+ return self.save_as_xml(result, base_path, error_file_list)
446
+
447
+ # Default JSON handling
448
+ json_data = result['data']
449
+ file_info = {'file_name': f'{file_name}.json'}
450
+
451
+ try:
452
+ with (base_path / f'{file_name}.json').open('w', encoding='utf-8') as f:
453
+ json.dump(json_data, f, indent=4, ensure_ascii=False)
454
+ status = ExportStatus.SUCCESS
455
+ except Exception as e:
456
+ error_file_list.append([f'{file_name}.json', str(e)])
457
+ status = ExportStatus.FAILED
458
+
459
+ self.run.export_log_json_file(result['id'], file_info, status)
460
+ return status
461
+
462
+ def setup_output_directories(self, unique_export_path, save_original_file_flag):
463
+ """Custom directory structure."""
464
+ # Create format-specific directories
465
+ output_paths = super().setup_output_directories(unique_export_path, save_original_file_flag)
466
+
467
+ # Add custom directories based on output format
468
+ format_dir = unique_export_path / self.params.get('output_format', 'json')
469
+ format_dir.mkdir(parents=True, exist_ok=True)
470
+ output_paths['format_output_path'] = format_dir
471
+
472
+ return output_paths
473
+ ```
474
+
475
+ **Step 6: Usage Examples**
476
+
477
+ Running export plugins with different configurations:
478
+
479
+ ```bash
480
+ # Basic export of ground truth data
481
+ synapse plugin run export '{
482
+ "name": "my_export",
483
+ "storage": 1,
484
+ "target": "ground_truth",
485
+ "filter": {"ground_truth_dataset_version": 123},
486
+ "path": "exports/ground_truth",
487
+ "save_original_file": true
488
+ }' --plugin my-export-plugin
489
+
490
+ # Export assignments with custom parameters
491
+ synapse plugin run export '{
492
+ "name": "assignment_export",
493
+ "storage": 1,
494
+ "target": "assignment",
495
+ "filter": {"project": 456, "status": "completed"},
496
+ "path": "exports/assignments",
497
+ "save_original_file": false,
498
+ "extra_params": {
499
+ "output_format": "coco",
500
+ "include_metadata": true
501
+ }
502
+ }' --plugin custom-coco-export
503
+ ```
504
+
505
+ **Common Export Patterns:**
506
+
507
+ ```python
508
+ # Pattern 1: Format-specific conversion
509
+ class Exporter(BaseExporter):
510
+ def convert_data(self, data):
511
+ """Convert to YOLO format."""
512
+ if data.get('task_type') == 'object_detection':
513
+ return self.convert_to_yolo_format(data)
514
+ return data
515
+
516
+ # Pattern 2: Conditional file organization
517
+ class Exporter(BaseExporter):
518
+ def setup_output_directories(self, unique_export_path, save_original_file_flag):
519
+ # Call parent method
520
+ output_paths = super().setup_output_directories(unique_export_path, save_original_file_flag)
521
+
522
+ # Create separate folders by category
523
+ for category in ['train', 'val', 'test']:
524
+ category_path = unique_export_path / category
525
+ category_path.mkdir(parents=True, exist_ok=True)
526
+ output_paths[f'{category}_path'] = category_path
527
+
528
+ return output_paths
529
+
530
+ # Pattern 3: Batch processing with validation
531
+ class Exporter(BaseExporter):
532
+ def before_convert(self, export_item):
533
+ # Validate required fields
534
+ required_fields = ['data', 'files', 'id']
535
+ for field in required_fields:
536
+ if field not in export_item:
537
+ raise ValueError(f"Missing required field: {field}")
538
+ return export_item
539
+ ```
540
+
274
541
  ### 3. Configure Actions
275
542
 
276
543
  Define actions in `config.yaml`:
@@ -281,6 +548,12 @@ actions:
281
548
  entrypoint: "plugin.train.TrainAction"
282
549
  method: "job"
283
550
  description: "Train a neural network model"
551
+
552
+ # Export plugin configuration
553
+ export:
554
+ entrypoint: "plugin.export.Exporter"
555
+ method: "job"
556
+ description: "Export and transform annotation data"
284
557
  ```
285
558
 
286
559
  ### 4. Package and Publish
@@ -373,10 +646,10 @@ synapse plugin run action-name --job-id production-job
373
646
  class MyAction(Action):
374
647
  progress_categories = {
375
648
  "preprocessing": "Data preprocessing",
376
- "training": "Model training",
649
+ "training": "Model training",
377
650
  "validation": "Model validation"
378
651
  }
379
-
652
+
380
653
  def start(self):
381
654
  # Update different progress categories
382
655
  self.run.set_progress(50, 100, "preprocessing")
@@ -400,13 +673,13 @@ def start(self):
400
673
  ```python
401
674
  def get_runtime_env(self):
402
675
  env = super().get_runtime_env()
403
-
676
+
404
677
  # Add custom environment variables
405
678
  env['env_vars']['CUSTOM_VAR'] = 'value'
406
-
679
+
407
680
  # Add additional packages
408
681
  env['pip']['packages'].append('custom-package==1.0.0')
409
-
682
+
410
683
  return env
411
684
  ```
412
685
 
@@ -420,7 +693,7 @@ class TrainParams(BaseModel):
420
693
  model_type: Literal["cnn", "transformer", "resnet"]
421
694
  dataset_path: str
422
695
  batch_size: int = 32
423
-
696
+
424
697
  @validator('batch_size')
425
698
  def validate_batch_size(cls, v):
426
699
  if v <= 0 or v > 512:
@@ -451,7 +724,70 @@ class TrainParams(BaseModel):
451
724
  - **Parameter Validation**: Test edge cases and error conditions
452
725
  - **Performance Tests**: Validate execution time and resource usage
453
726
 
454
- ### 4. Security
727
+ ### 4. Export Plugin Best Practices
728
+
729
+ #### Data Processing
730
+
731
+ - **Memory Efficiency**: Use generators for processing large datasets
732
+ - **Error Recovery**: Implement graceful error handling for individual items
733
+ - **Progress Reporting**: Update progress regularly for long-running exports
734
+ - **Data Validation**: Validate data structure before conversion
735
+
736
+ ```python
737
+ class Exporter(BaseExporter):
738
+ def export(self, export_items=None, results=None, **kwargs):
739
+ """Override the main export method for custom processing."""
740
+ # Use tee to count items without consuming generator
741
+ items_to_process = export_items if export_items is not None else self.export_items
742
+ export_items_count, export_items_process = tee(items_to_process)
743
+ total = sum(1 for _ in export_items_count)
744
+
745
+ # Custom processing with error handling
746
+ for no, export_item in enumerate(export_items_process, start=1):
747
+ try:
748
+ # Use the built-in data conversion pipeline
749
+ processed_item = self.process_data_conversion(export_item)
750
+ self.run.set_progress(no, total, category='dataset_conversion')
751
+ except Exception as e:
752
+ self.run.log_message(f"Error processing item {no}: {str(e)}", "ERROR")
753
+ continue
754
+
755
+ # Call parent's export method for standard processing
756
+ # or implement your own complete workflow
757
+ return super().export(export_items, results, **kwargs)
758
+ ```
759
+
760
+ #### File Management
761
+
762
+ - **Unique Paths**: Prevent file collisions with timestamp or counter suffixes
763
+ - **Directory Structure**: Organize output files logically
764
+ - **Error Logging**: Track failed files for debugging
765
+ - **Cleanup**: Remove temporary files on completion
766
+
767
+ ```python
768
+ class Exporter(BaseExporter):
769
+ def setup_output_directories(self, unique_export_path, save_original_file_flag):
770
+ """Create unique export directory structure."""
771
+ # BaseExporter already handles unique path creation via _create_unique_export_path
772
+ # This method sets up the internal directory structure
773
+ output_paths = super().setup_output_directories(unique_export_path, save_original_file_flag)
774
+
775
+ # Add custom subdirectories as needed
776
+ custom_dir = unique_export_path / 'custom_output'
777
+ custom_dir.mkdir(parents=True, exist_ok=True)
778
+ output_paths['custom_output_path'] = custom_dir
779
+
780
+ return output_paths
781
+ ```
782
+
783
+ #### Format Conversion
784
+
785
+ - **Flexible Templates**: Design templates that work with multiple data types
786
+ - **Schema Validation**: Validate output against expected schemas
787
+ - **Metadata Preservation**: Maintain important metadata during conversion
788
+ - **Version Compatibility**: Handle different data schema versions
789
+
790
+ ### 5. Security
455
791
 
456
792
  - **Input Validation**: Validate all parameters and inputs
457
793
  - **File Access**: Restrict file system access appropriately
@@ -465,7 +801,7 @@ class TrainParams(BaseModel):
465
801
  ```python
466
802
  # In your action
467
803
  self.run.log_message("Processing started", "INFO")
468
- self.run.log_message("Warning: low memory", "WARNING")
804
+ self.run.log_message("Warning: low memory", "WARNING")
469
805
  self.run.log_message("Error occurred", "ERROR")
470
806
 
471
807
  # With structured data
@@ -506,4 +842,4 @@ self.run.set_metrics({
506
842
  }, "performance")
507
843
  ```
508
844
 
509
- The plugin system provides a powerful foundation for building scalable, distributed ML workflows. By following the established patterns and best practices, you can create robust plugins that integrate seamlessly with the Synapse ecosystem.
845
+ The plugin system provides a powerful foundation for building scalable, distributed ML workflows. By following the established patterns and best practices, you can create robust plugins that integrate seamlessly with the Synapse ecosystem.
@@ -57,6 +57,11 @@ const config: Config = {
57
57
  ],
58
58
  ],
59
59
 
60
+ themes: ['@docusaurus/theme-mermaid'],
61
+ markdown: {
62
+ mermaid: true,
63
+ },
64
+
60
65
 
61
66
 
62
67
  themeConfig: {
@@ -134,6 +139,9 @@ const config: Config = {
134
139
  darkTheme: prismThemes.dracula,
135
140
  additionalLanguages: ['python', 'bash'],
136
141
  },
142
+ mermaid: {
143
+ theme: {light: 'neutral', dark: 'dark'},
144
+ },
137
145
  } satisfies Preset.ThemeConfig,
138
146
  };
139
147