datamint 2.3.5__tar.gz → 2.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (68) hide show
  1. datamint-2.4.1/PKG-INFO +320 -0
  2. datamint-2.4.1/README.md +267 -0
  3. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/base_api.py +42 -8
  4. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/client.py +2 -0
  5. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/resources_api.py +37 -13
  6. {datamint-2.3.5 → datamint-2.4.1}/datamint/apihandler/base_api_handler.py +0 -1
  7. {datamint-2.3.5 → datamint-2.4.1}/datamint/apihandler/dto/annotation_dto.py +2 -0
  8. {datamint-2.3.5 → datamint-2.4.1}/datamint/dataset/base_dataset.py +4 -0
  9. datamint-2.4.1/datamint/lightning/__init__.py +1 -0
  10. datamint-2.4.1/datamint/lightning/datamintdatamodule.py +103 -0
  11. datamint-2.4.1/datamint/mlflow/__init__.py +46 -0
  12. datamint-2.4.1/datamint/mlflow/artifact/__init__.py +1 -0
  13. datamint-2.4.1/datamint/mlflow/artifact/datamint_artifacts_repo.py +8 -0
  14. datamint-2.4.1/datamint/mlflow/env_utils.py +109 -0
  15. datamint-2.4.1/datamint/mlflow/env_vars.py +5 -0
  16. datamint-2.4.1/datamint/mlflow/lightning/callbacks/__init__.py +1 -0
  17. datamint-2.4.1/datamint/mlflow/lightning/callbacks/modelcheckpoint.py +338 -0
  18. datamint-2.4.1/datamint/mlflow/models/__init__.py +94 -0
  19. datamint-2.4.1/datamint/mlflow/tracking/datamint_store.py +46 -0
  20. datamint-2.4.1/datamint/mlflow/tracking/default_experiment.py +27 -0
  21. datamint-2.4.1/datamint/mlflow/tracking/fluent.py +78 -0
  22. {datamint-2.3.5 → datamint-2.4.1}/pyproject.toml +16 -2
  23. datamint-2.3.5/PKG-INFO +0 -125
  24. datamint-2.3.5/README.md +0 -73
  25. {datamint-2.3.5 → datamint-2.4.1}/datamint/__init__.py +0 -0
  26. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/__init__.py +0 -0
  27. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/dto/__init__.py +0 -0
  28. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/__init__.py +0 -0
  29. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/annotations_api.py +0 -0
  30. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/annotationsets_api.py +0 -0
  31. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/channels_api.py +0 -0
  32. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/datasetsinfo_api.py +0 -0
  33. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/models_api.py +0 -0
  34. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/projects_api.py +0 -0
  35. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/endpoints/users_api.py +0 -0
  36. {datamint-2.3.5 → datamint-2.4.1}/datamint/api/entity_base_api.py +0 -0
  37. {datamint-2.3.5 → datamint-2.4.1}/datamint/apihandler/annotation_api_handler.py +0 -0
  38. {datamint-2.3.5 → datamint-2.4.1}/datamint/apihandler/api_handler.py +0 -0
  39. {datamint-2.3.5 → datamint-2.4.1}/datamint/apihandler/dto/__init__.py +0 -0
  40. {datamint-2.3.5 → datamint-2.4.1}/datamint/apihandler/exp_api_handler.py +0 -0
  41. {datamint-2.3.5 → datamint-2.4.1}/datamint/apihandler/root_api_handler.py +0 -0
  42. {datamint-2.3.5 → datamint-2.4.1}/datamint/client_cmd_tools/__init__.py +0 -0
  43. {datamint-2.3.5 → datamint-2.4.1}/datamint/client_cmd_tools/datamint_config.py +0 -0
  44. {datamint-2.3.5 → datamint-2.4.1}/datamint/client_cmd_tools/datamint_upload.py +0 -0
  45. {datamint-2.3.5 → datamint-2.4.1}/datamint/configs.py +0 -0
  46. {datamint-2.3.5 → datamint-2.4.1}/datamint/dataset/__init__.py +0 -0
  47. {datamint-2.3.5 → datamint-2.4.1}/datamint/dataset/annotation.py +0 -0
  48. {datamint-2.3.5 → datamint-2.4.1}/datamint/dataset/dataset.py +0 -0
  49. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/__init__.py +0 -0
  50. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/annotation.py +0 -0
  51. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/base_entity.py +0 -0
  52. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/cache_manager.py +0 -0
  53. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/channel.py +0 -0
  54. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/datasetinfo.py +0 -0
  55. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/project.py +0 -0
  56. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/resource.py +0 -0
  57. {datamint-2.3.5 → datamint-2.4.1}/datamint/entities/user.py +0 -0
  58. {datamint-2.3.5 → datamint-2.4.1}/datamint/examples/__init__.py +0 -0
  59. {datamint-2.3.5 → datamint-2.4.1}/datamint/examples/example_projects.py +0 -0
  60. {datamint-2.3.5 → datamint-2.4.1}/datamint/exceptions.py +0 -0
  61. {datamint-2.3.5 → datamint-2.4.1}/datamint/experiment/__init__.py +0 -0
  62. {datamint-2.3.5 → datamint-2.4.1}/datamint/experiment/_patcher.py +0 -0
  63. {datamint-2.3.5 → datamint-2.4.1}/datamint/experiment/experiment.py +0 -0
  64. {datamint-2.3.5 → datamint-2.4.1}/datamint/logging.yaml +0 -0
  65. {datamint-2.3.5 → datamint-2.4.1}/datamint/types.py +0 -0
  66. {datamint-2.3.5 → datamint-2.4.1}/datamint/utils/logging_utils.py +0 -0
  67. {datamint-2.3.5 → datamint-2.4.1}/datamint/utils/torchmetrics.py +0 -0
  68. {datamint-2.3.5 → datamint-2.4.1}/datamint/utils/visualization.py +0 -0
@@ -0,0 +1,320 @@
1
+ Metadata-Version: 2.4
2
+ Name: datamint
3
+ Version: 2.4.1
4
+ Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
5
+ Requires-Python: >=3.10
6
+ Classifier: Programming Language :: Python :: 3
7
+ Classifier: Programming Language :: Python :: 3.10
8
+ Classifier: Programming Language :: Python :: 3.11
9
+ Classifier: Programming Language :: Python :: 3.12
10
+ Classifier: Programming Language :: Python :: 3.13
11
+ Classifier: Programming Language :: Python :: 3.14
12
+ Provides-Extra: dev
13
+ Provides-Extra: docs
14
+ Requires-Dist: Deprecated (>=1.2.0)
15
+ Requires-Dist: aiohttp (>=3.0.0,<4.0.0)
16
+ Requires-Dist: aioresponses (>=0.7.8,<0.8.0) ; extra == "dev"
17
+ Requires-Dist: albumentations (>=2.0.0)
18
+ Requires-Dist: backports-strenum ; python_version < "3.11"
19
+ Requires-Dist: datamintapi (==0.0.*)
20
+ Requires-Dist: httpx
21
+ Requires-Dist: humanize (>=4.0.0,<5.0.0)
22
+ Requires-Dist: lazy-loader (>=0.3.0)
23
+ Requires-Dist: lightning (>=2.0.0,!=2.5.1,!=2.5.1.post0)
24
+ Requires-Dist: matplotlib
25
+ Requires-Dist: medimgkit (>=0.7.3)
26
+ Requires-Dist: mlflow (>=2.0.0,<3.0.0)
27
+ Requires-Dist: nest-asyncio (>=1.0.0,<2.0.0)
28
+ Requires-Dist: nibabel (>=4.0.0)
29
+ Requires-Dist: numpy
30
+ Requires-Dist: opencv-python (>=4.0.0)
31
+ Requires-Dist: pandas (>=2.0.0)
32
+ Requires-Dist: platformdirs (>=4.0.0,<5.0.0)
33
+ Requires-Dist: pydantic (>=2.6.4)
34
+ Requires-Dist: pydicom (>=3.0.0,<4.0.0)
35
+ Requires-Dist: pylibjpeg (>=2.0.0,<3.0.0)
36
+ Requires-Dist: pylibjpeg-libjpeg (>=2.0.0,<3.0.0)
37
+ Requires-Dist: pytest (>=7.0.0,<8.0.0) ; extra == "dev"
38
+ Requires-Dist: pytest-cov (>=4.0.0,<5.0.0) ; extra == "dev"
39
+ Requires-Dist: pyyaml (>=5.0.0)
40
+ Requires-Dist: requests (>=2.0.0,<3.0.0)
41
+ Requires-Dist: responses (>=0.20.0,<0.21.0) ; extra == "dev"
42
+ Requires-Dist: rich (>=10.0.0)
43
+ Requires-Dist: setuptools (>=57.0) ; extra == "docs"
44
+ Requires-Dist: sphinx (>=5.0) ; extra == "docs"
45
+ Requires-Dist: sphinx-tabs (>=3.0.0) ; extra == "docs"
46
+ Requires-Dist: sphinx_rtd_theme (>=2.0.0) ; extra == "docs"
47
+ Requires-Dist: torch (>=1.2.0,!=2.3.0)
48
+ Requires-Dist: torchvision (>=0.18.0)
49
+ Requires-Dist: tqdm (>=4.0.0,<5.0.0)
50
+ Requires-Dist: typing_extensions (>=4.0.0)
51
+ Description-Content-Type: text/markdown
52
+
53
+ # Datamint Python API
54
+
55
+ ![Build Status](https://github.com/SonanceAI/datamint-python-api/actions/workflows/run_test.yaml/badge.svg)
56
+ [![Python 3.10+](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/)
57
+
58
+ A comprehensive Python SDK for interacting with the Datamint platform, providing seamless integration for medical imaging workflows, dataset management, and machine learning experiments.
59
+
60
+ ## 📋 Table of Contents
61
+
62
+ - [Features](#-features)
63
+ - [Installation](#-installation)
64
+ - [Quick Setup](#-quick-setup)
65
+ - [Documentation](#-documentation)
66
+ - [Key Components](#-key-components)
67
+ - [Command Line Tools](#️-command-line-tools)
68
+ - [Examples](#-examples)
69
+ - [Support](#-support)
70
+
71
+ ## 🚀 Features
72
+
73
+ - **Dataset Management**: Download, upload, and manage medical imaging datasets
74
+ - **Annotation Tools**: Create, upload, and manage annotations (segmentations, labels, measurements)
75
+ - **Experiment Tracking**: Integrated MLflow support for experiment management
76
+ - **PyTorch Lightning Integration**: Streamlined ML workflows with Lightning DataModules and callbacks
77
+ - **DICOM Support**: Native handling of DICOM files with anonymization capabilities
78
+ - **Multi-format Support**: PNG, JPEG, NIfTI, and other medical imaging formats
79
+
80
+ See the full documentation at https://sonanceai.github.io/datamint-python-api/
81
+
82
+ ## 📦 Installation
83
+
84
+ > [!NOTE]
85
+ > We recommend using a virtual environment to avoid package conflicts.
86
+
87
+ ### From PyPI
88
+
89
+ To be released soon
90
+
91
+ ### From Source
92
+
93
+ ```bash
94
+ pip install git+https://github.com/SonanceAI/datamint-python-api
95
+ ```
96
+
97
+ ### Virtual Environment Setup
98
+
99
+ <details>
100
+ <summary>Click to expand virtual environment setup instructions</summary>
101
+
102
+ We recommend that you install Datamint in a dedicated virtual environment, to avoid conflicting with your system packages.
103
+ For instance, create the enviroment once with `python3 -m venv datamint-env` and then activate it whenever you need it with:
104
+
105
+ 1. **Create the environment** (one-time setup):
106
+ ```bash
107
+ python3 -m venv datamint-env
108
+ ```
109
+
110
+ 2. **Activate the environment** (run whenever you need it):
111
+
112
+ | Platform | Command |
113
+ |----------|---------|
114
+ | Linux/macOS | `source datamint-env/bin/activate` |
115
+ | Windows CMD | `datamint-env\Scripts\activate.bat` |
116
+ | Windows PowerShell | `datamint-env\Scripts\Activate.ps1` |
117
+
118
+ 3. **Install the package**:
119
+ ```bash
120
+ pip install git+https://github.com/SonanceAI/datamint-python-api
121
+ ```
122
+
123
+ </details>
124
+
125
+ ## Setup API key
126
+
127
+ To use the Datamint API, you need to setup your API key (ask your administrator if you don't have one). Use one of the following methods to setup your API key:
128
+
129
+ ### Method 1: Command-line tool (recommended)
130
+
131
+ Run ``datamint-config`` in the terminal and follow the instructions. See [command_line_tools](https://sonanceai.github.io/datamint-python-api/command_line_tools.html) for more details.
132
+
133
+ ### Method 2: Environment variable
134
+
135
+ Specify the API key as an environment variable.
136
+
137
+ **Bash:**
138
+ ```bash
139
+ export DATAMINT_API_KEY="my_api_key"
140
+ # run your commands (e.g., `datamint-upload`, `python script.py`)
141
+ ```
142
+
143
+ **Python:**
144
+ ```python
145
+ import os
146
+ os.environ["DATAMINT_API_KEY"] = "my_api_key"
147
+ ```
148
+
149
+ ## 📚 Documentation
150
+
151
+ | Resource | Description |
152
+ |----------|-------------|
153
+ | [🚀 Getting Started](docs/source/getting_started.rst) | Step-by-step setup and basic usage |
154
+ | [📖 API Reference](docs/source/client_api.rst) | Complete API documentation |
155
+ | [🔥 PyTorch Integration](docs/source/pytorch_integration.rst) | ML workflow integration |
156
+ | [💡 Examples](examples/) | Practical usage examples |
157
+
158
+ ## 🔗 Key Components
159
+
160
+ ### Dataset Management
161
+
162
+ ```python
163
+ from datamint import Dataset
164
+
165
+ # Load dataset with annotations
166
+ dataset = Dataset(
167
+ project_name="medical-segmentation",
168
+ )
169
+
170
+ # Access data
171
+ for sample in dataset:
172
+ image = sample['image'] # torch.Tensor
173
+ mask = sample['segmentation'] # torch.Tensor (if available)
174
+ metadata = sample['metainfo'] # dict
175
+ ```
176
+
177
+
178
+ ### PyTorch Lightning Integration
179
+
180
+ ```python
181
+ import lightning as L
182
+ from datamint.lightning import DatamintDataModule
183
+ from datamint.mlflow.lightning.callbacks import MLFlowModelCheckpoint
184
+
185
+ # Data module
186
+ datamodule = DatamintDataModule(
187
+ project_name="your-project",
188
+ batch_size=16,
189
+ train_split=0.8
190
+ )
191
+
192
+ # ML tracking callback
193
+ checkpoint_callback = MLFlowModelCheckpoint(
194
+ monitor="val_loss",
195
+ save_top_k=1,
196
+ register_model_name="best-model"
197
+ )
198
+
199
+ # Trainer with MLflow logging
200
+ trainer = L.Trainer(
201
+ max_epochs=100,
202
+ callbacks=[checkpoint_callback],
203
+ logger=L.pytorch.loggers.MLFlowLogger(
204
+ experiment_name="medical-segmentation"
205
+ )
206
+ )
207
+ ```
208
+
209
+
210
+ ### Annotation Management
211
+
212
+
213
+ ```python
214
+ # Upload segmentation masks
215
+ api.upload_segmentations(
216
+ resource_id="resource-123",
217
+ file_path="segmentation.nii.gz",
218
+ name="liver_segmentation",
219
+ frame_index=0
220
+ )
221
+
222
+ # Add categorical annotations
223
+ api.add_image_category_annotation(
224
+ resource_id="resource-123",
225
+ identifier="diagnosis",
226
+ value="positive"
227
+ )
228
+
229
+ # Add geometric annotations
230
+ api.add_line_annotation(
231
+ point1=(10, 20),
232
+ point2=(50, 80),
233
+ resource_id="resource-123",
234
+ identifier="measurement",
235
+ frame_index=5
236
+ )
237
+ ```
238
+
239
+
240
+ ## 🛠️ Command Line Tools
241
+
242
+ ### Upload Resources
243
+
244
+ **Upload DICOM files with anonymization:**
245
+ ```bash
246
+ datamint-upload \
247
+ --path /path/to/dicoms \
248
+ --recursive \
249
+ --channel "training-data" \
250
+ --anonymize \
251
+ --publish
252
+ ```
253
+
254
+ **Upload with segmentation masks:**
255
+ ```bash
256
+ datamint-upload \
257
+ --path /path/to/images \
258
+ --segmentation_path /path/to/masks \
259
+ --segmentation_names segmentation_config.yaml
260
+ ```
261
+
262
+ ### Configuration Management
263
+
264
+ ```bash
265
+ # Interactive setup
266
+ datamint-config
267
+
268
+ # Set API key
269
+ datamint-config --api-key "your-key"
270
+ ```
271
+
272
+ ## 🔍 Examples
273
+
274
+ ### Medical Image Segmentation Pipeline
275
+
276
+ ```python
277
+ import torch
278
+ import lightning as L
279
+ from datamint.lightning import DatamintDataModule
280
+ from datamint.mlflow.lightning.callbacks import MLFlowModelCheckpoint
281
+
282
+ class SegmentationModel(L.LightningModule):
283
+ def __init__(self):
284
+ super().__init__()
285
+ # Model definition...
286
+
287
+ def training_step(self, batch, batch_idx):
288
+ # Training logic...
289
+ pass
290
+
291
+ # Setup data
292
+ datamodule = DatamintDataModule(
293
+ project_name="liver-segmentation",
294
+ batch_size=8,
295
+ train_split=0.8
296
+ )
297
+
298
+ # Setup model with MLflow tracking
299
+ model = SegmentationModel()
300
+ checkpoint_cb = MLFlowModelCheckpoint(
301
+ monitor="val_dice",
302
+ mode="max",
303
+ register_model_name="liver-segmentation-model"
304
+ )
305
+
306
+ # Train
307
+ trainer = L.Trainer(
308
+ max_epochs=50,
309
+ callbacks=[checkpoint_cb],
310
+ logger=L.pytorch.loggers.MLFlowLogger()
311
+ )
312
+ trainer.fit(model, datamodule)
313
+ ```
314
+
315
+ ## 🆘 Support
316
+
317
+ [Full Documentation](https://datamint-python-api.readthedocs.io/)
318
+ [GitHub Issues](https://github.com/SonanceAI/datamint-python-api/issues)
319
+
320
+
@@ -0,0 +1,267 @@
1
+ # Datamint Python API
2
+
3
+ ![Build Status](https://github.com/SonanceAI/datamint-python-api/actions/workflows/run_test.yaml/badge.svg)
4
+ [![Python 3.10+](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/)
5
+
6
+ A comprehensive Python SDK for interacting with the Datamint platform, providing seamless integration for medical imaging workflows, dataset management, and machine learning experiments.
7
+
8
+ ## 📋 Table of Contents
9
+
10
+ - [Features](#-features)
11
+ - [Installation](#-installation)
12
+ - [Quick Setup](#-quick-setup)
13
+ - [Documentation](#-documentation)
14
+ - [Key Components](#-key-components)
15
+ - [Command Line Tools](#️-command-line-tools)
16
+ - [Examples](#-examples)
17
+ - [Support](#-support)
18
+
19
+ ## 🚀 Features
20
+
21
+ - **Dataset Management**: Download, upload, and manage medical imaging datasets
22
+ - **Annotation Tools**: Create, upload, and manage annotations (segmentations, labels, measurements)
23
+ - **Experiment Tracking**: Integrated MLflow support for experiment management
24
+ - **PyTorch Lightning Integration**: Streamlined ML workflows with Lightning DataModules and callbacks
25
+ - **DICOM Support**: Native handling of DICOM files with anonymization capabilities
26
+ - **Multi-format Support**: PNG, JPEG, NIfTI, and other medical imaging formats
27
+
28
+ See the full documentation at https://sonanceai.github.io/datamint-python-api/
29
+
30
+ ## 📦 Installation
31
+
32
+ > [!NOTE]
33
+ > We recommend using a virtual environment to avoid package conflicts.
34
+
35
+ ### From PyPI
36
+
37
+ To be released soon
38
+
39
+ ### From Source
40
+
41
+ ```bash
42
+ pip install git+https://github.com/SonanceAI/datamint-python-api
43
+ ```
44
+
45
+ ### Virtual Environment Setup
46
+
47
+ <details>
48
+ <summary>Click to expand virtual environment setup instructions</summary>
49
+
50
+ We recommend that you install Datamint in a dedicated virtual environment, to avoid conflicting with your system packages.
51
+ For instance, create the enviroment once with `python3 -m venv datamint-env` and then activate it whenever you need it with:
52
+
53
+ 1. **Create the environment** (one-time setup):
54
+ ```bash
55
+ python3 -m venv datamint-env
56
+ ```
57
+
58
+ 2. **Activate the environment** (run whenever you need it):
59
+
60
+ | Platform | Command |
61
+ |----------|---------|
62
+ | Linux/macOS | `source datamint-env/bin/activate` |
63
+ | Windows CMD | `datamint-env\Scripts\activate.bat` |
64
+ | Windows PowerShell | `datamint-env\Scripts\Activate.ps1` |
65
+
66
+ 3. **Install the package**:
67
+ ```bash
68
+ pip install git+https://github.com/SonanceAI/datamint-python-api
69
+ ```
70
+
71
+ </details>
72
+
73
+ ## Setup API key
74
+
75
+ To use the Datamint API, you need to setup your API key (ask your administrator if you don't have one). Use one of the following methods to setup your API key:
76
+
77
+ ### Method 1: Command-line tool (recommended)
78
+
79
+ Run ``datamint-config`` in the terminal and follow the instructions. See [command_line_tools](https://sonanceai.github.io/datamint-python-api/command_line_tools.html) for more details.
80
+
81
+ ### Method 2: Environment variable
82
+
83
+ Specify the API key as an environment variable.
84
+
85
+ **Bash:**
86
+ ```bash
87
+ export DATAMINT_API_KEY="my_api_key"
88
+ # run your commands (e.g., `datamint-upload`, `python script.py`)
89
+ ```
90
+
91
+ **Python:**
92
+ ```python
93
+ import os
94
+ os.environ["DATAMINT_API_KEY"] = "my_api_key"
95
+ ```
96
+
97
+ ## 📚 Documentation
98
+
99
+ | Resource | Description |
100
+ |----------|-------------|
101
+ | [🚀 Getting Started](docs/source/getting_started.rst) | Step-by-step setup and basic usage |
102
+ | [📖 API Reference](docs/source/client_api.rst) | Complete API documentation |
103
+ | [🔥 PyTorch Integration](docs/source/pytorch_integration.rst) | ML workflow integration |
104
+ | [💡 Examples](examples/) | Practical usage examples |
105
+
106
+ ## 🔗 Key Components
107
+
108
+ ### Dataset Management
109
+
110
+ ```python
111
+ from datamint import Dataset
112
+
113
+ # Load dataset with annotations
114
+ dataset = Dataset(
115
+ project_name="medical-segmentation",
116
+ )
117
+
118
+ # Access data
119
+ for sample in dataset:
120
+ image = sample['image'] # torch.Tensor
121
+ mask = sample['segmentation'] # torch.Tensor (if available)
122
+ metadata = sample['metainfo'] # dict
123
+ ```
124
+
125
+
126
+ ### PyTorch Lightning Integration
127
+
128
+ ```python
129
+ import lightning as L
130
+ from datamint.lightning import DatamintDataModule
131
+ from datamint.mlflow.lightning.callbacks import MLFlowModelCheckpoint
132
+
133
+ # Data module
134
+ datamodule = DatamintDataModule(
135
+ project_name="your-project",
136
+ batch_size=16,
137
+ train_split=0.8
138
+ )
139
+
140
+ # ML tracking callback
141
+ checkpoint_callback = MLFlowModelCheckpoint(
142
+ monitor="val_loss",
143
+ save_top_k=1,
144
+ register_model_name="best-model"
145
+ )
146
+
147
+ # Trainer with MLflow logging
148
+ trainer = L.Trainer(
149
+ max_epochs=100,
150
+ callbacks=[checkpoint_callback],
151
+ logger=L.pytorch.loggers.MLFlowLogger(
152
+ experiment_name="medical-segmentation"
153
+ )
154
+ )
155
+ ```
156
+
157
+
158
+ ### Annotation Management
159
+
160
+
161
+ ```python
162
+ # Upload segmentation masks
163
+ api.upload_segmentations(
164
+ resource_id="resource-123",
165
+ file_path="segmentation.nii.gz",
166
+ name="liver_segmentation",
167
+ frame_index=0
168
+ )
169
+
170
+ # Add categorical annotations
171
+ api.add_image_category_annotation(
172
+ resource_id="resource-123",
173
+ identifier="diagnosis",
174
+ value="positive"
175
+ )
176
+
177
+ # Add geometric annotations
178
+ api.add_line_annotation(
179
+ point1=(10, 20),
180
+ point2=(50, 80),
181
+ resource_id="resource-123",
182
+ identifier="measurement",
183
+ frame_index=5
184
+ )
185
+ ```
186
+
187
+
188
+ ## 🛠️ Command Line Tools
189
+
190
+ ### Upload Resources
191
+
192
+ **Upload DICOM files with anonymization:**
193
+ ```bash
194
+ datamint-upload \
195
+ --path /path/to/dicoms \
196
+ --recursive \
197
+ --channel "training-data" \
198
+ --anonymize \
199
+ --publish
200
+ ```
201
+
202
+ **Upload with segmentation masks:**
203
+ ```bash
204
+ datamint-upload \
205
+ --path /path/to/images \
206
+ --segmentation_path /path/to/masks \
207
+ --segmentation_names segmentation_config.yaml
208
+ ```
209
+
210
+ ### Configuration Management
211
+
212
+ ```bash
213
+ # Interactive setup
214
+ datamint-config
215
+
216
+ # Set API key
217
+ datamint-config --api-key "your-key"
218
+ ```
219
+
220
+ ## 🔍 Examples
221
+
222
+ ### Medical Image Segmentation Pipeline
223
+
224
+ ```python
225
+ import torch
226
+ import lightning as L
227
+ from datamint.lightning import DatamintDataModule
228
+ from datamint.mlflow.lightning.callbacks import MLFlowModelCheckpoint
229
+
230
+ class SegmentationModel(L.LightningModule):
231
+ def __init__(self):
232
+ super().__init__()
233
+ # Model definition...
234
+
235
+ def training_step(self, batch, batch_idx):
236
+ # Training logic...
237
+ pass
238
+
239
+ # Setup data
240
+ datamodule = DatamintDataModule(
241
+ project_name="liver-segmentation",
242
+ batch_size=8,
243
+ train_split=0.8
244
+ )
245
+
246
+ # Setup model with MLflow tracking
247
+ model = SegmentationModel()
248
+ checkpoint_cb = MLFlowModelCheckpoint(
249
+ monitor="val_dice",
250
+ mode="max",
251
+ register_model_name="liver-segmentation-model"
252
+ )
253
+
254
+ # Train
255
+ trainer = L.Trainer(
256
+ max_epochs=50,
257
+ callbacks=[checkpoint_cb],
258
+ logger=L.pytorch.loggers.MLFlowLogger()
259
+ )
260
+ trainer.fit(model, datamodule)
261
+ ```
262
+
263
+ ## 🆘 Support
264
+
265
+ [Full Documentation](https://datamint-python-api.readthedocs.io/)
266
+ [GitHub Issues](https://github.com/SonanceAI/datamint-python-api/issues)
267
+
@@ -61,22 +61,56 @@ class BaseApi:
61
61
  client: Optional HTTP client instance. If None, a new one will be created.
62
62
  """
63
63
  self.config = config
64
- self.client = client or self._create_client()
64
+ self._owns_client = client is None # Track if we created the client
65
+ self.client = client or BaseApi._create_client(config)
65
66
  self.semaphore = asyncio.Semaphore(20)
66
67
  self._api_instance: 'Api | None' = None # Injected by Api class
67
68
 
68
- def _create_client(self) -> httpx.Client:
69
- """Create and configure HTTP client with authentication and timeouts."""
70
- headers = None
71
- if self.config.api_key:
72
- headers = {"apikey": self.config.api_key}
69
+ @staticmethod
70
+ def _create_client(config: ApiConfig) -> httpx.Client:
71
+ """Create and configure HTTP client with authentication and timeouts.
72
+
73
+ The client is designed to be long-lived and reused across multiple requests.
74
+ It maintains connection pooling for improved performance.
75
+ Default limits: max_keepalive_connections=20, max_connections=100
76
+ """
77
+ headers = {"apikey": config.api_key} if config.api_key else None
73
78
 
74
79
  return httpx.Client(
75
- base_url=self.config.server_url,
80
+ base_url=config.server_url,
76
81
  headers=headers,
77
- timeout=self.config.timeout
82
+ timeout=config.timeout,
83
+ limits=httpx.Limits(
84
+ max_keepalive_connections=5, # Increased from default 20
85
+ max_connections=20, # Increased from default 100
86
+ keepalive_expiry=8
87
+ )
78
88
  )
79
89
 
90
+ def close(self) -> None:
91
+ """Close the HTTP client and release resources.
92
+
93
+ Should be called when the API instance is no longer needed.
94
+ Only closes the client if it was created by this instance.
95
+ """
96
+ if self._owns_client and self.client is not None:
97
+ self.client.close()
98
+
99
+ def __enter__(self):
100
+ """Context manager entry."""
101
+ return self
102
+
103
+ def __exit__(self, exc_type, exc_val, exc_tb):
104
+ """Context manager exit - ensures client is closed."""
105
+ self.close()
106
+
107
+ def __del__(self):
108
+ """Destructor - ensures client is closed when instance is garbage collected."""
109
+ try:
110
+ self.close()
111
+ except Exception:
112
+ pass # Ignore errors during cleanup
113
+
80
114
  def _stream_request(self, method: str, endpoint: str, **kwargs):
81
115
  """Make streaming HTTP request with error handling.
82
116
 
@@ -68,6 +68,8 @@ class Api:
68
68
  f" Please check your api_key and/or other configurations. {e}")
69
69
 
70
70
  def _get_endpoint(self, name: str):
71
+ if self._client is None:
72
+ self._client = BaseApi._create_client(self.config)
71
73
  if name not in self._endpoints:
72
74
  api_class = self._API_MAP[name]
73
75
  endpoint = api_class(self.config, self._client)