endoreg-db 0.8.3.2__py3-none-any.whl → 0.8.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of endoreg-db might be problematic. Click here for more details.

@@ -0,0 +1,116 @@
1
+ """
2
+ Django management command to create ModelMeta from Hugging Face model.
3
+ """
4
+
5
+ from pathlib import Path
6
+
7
+ from django.core.files.base import ContentFile
8
+ from django.core.management.base import BaseCommand
9
+ from huggingface_hub import hf_hub_download
10
+
11
+ from endoreg_db.models import AiModel, LabelSet, ModelMeta
12
+
13
+
14
+ class Command(BaseCommand):
15
+ help = "Create ModelMeta by downloading model from Hugging Face"
16
+
17
+ def add_arguments(self, parser):
18
+ parser.add_argument(
19
+ "--model_id",
20
+ type=str,
21
+ default="wg-lux/colo_segmentation_RegNetX800MF_base",
22
+ help="Hugging Face model ID",
23
+ )
24
+ parser.add_argument(
25
+ "--model_name",
26
+ type=str,
27
+ default="image_multilabel_classification_colonoscopy_default",
28
+ help="Name for the AI model",
29
+ )
30
+ parser.add_argument(
31
+ "--labelset_name",
32
+ type=str,
33
+ default="multilabel_classification_colonoscopy_default",
34
+ help="Name of the labelset",
35
+ )
36
+ parser.add_argument(
37
+ "--meta_version",
38
+ type=str,
39
+ default="1",
40
+ help="Version for the model meta",
41
+ )
42
+
43
+ def handle(self, *args, **options):
44
+ model_id = options["model_id"]
45
+ model_name = options["model_name"]
46
+ labelset_name = options["labelset_name"]
47
+ version = options["meta_version"]
48
+
49
+ self.stdout.write(f"Downloading model {model_id} from Hugging Face...")
50
+
51
+ try:
52
+ # Download the model weights
53
+ weights_path = hf_hub_download(
54
+ repo_id=model_id,
55
+ filename="colo_segmentation_RegNetX800MF_base.ckpt",
56
+ local_dir="/tmp",
57
+ )
58
+ self.stdout.write(f"Downloaded weights to: {weights_path}")
59
+
60
+ # Get or create AI model
61
+ ai_model, created = AiModel.objects.get_or_create(
62
+ name=model_name, defaults={"description": f"Model from {model_id}"}
63
+ )
64
+ if created:
65
+ self.stdout.write(f"Created AI model: {ai_model.name}")
66
+
67
+ # Get labelset
68
+ try:
69
+ labelset = LabelSet.objects.get(name=labelset_name)
70
+ except LabelSet.DoesNotExist:
71
+ self.stdout.write(
72
+ self.style.ERROR(f"LabelSet '{labelset_name}' not found")
73
+ )
74
+ return
75
+
76
+ # Create ModelMeta
77
+ model_meta, created = ModelMeta.objects.get_or_create(
78
+ name=model_name,
79
+ model=ai_model,
80
+ version=version,
81
+ defaults={
82
+ "labelset": labelset,
83
+ "activation": "sigmoid",
84
+ "mean": "0.45211223,0.27139644,0.19264949",
85
+ "std": "0.31418097,0.21088019,0.16059452",
86
+ "size_x": 716,
87
+ "size_y": 716,
88
+ "axes": "2,0,1",
89
+ "batchsize": 16,
90
+ "num_workers": 0,
91
+ "description": f"Downloaded from {model_id}",
92
+ },
93
+ )
94
+
95
+ # Save the weights file to the model
96
+ with open(weights_path, "rb") as f:
97
+ model_meta.weights.save(
98
+ f"{model_name}_v{version}_colo_segmentation_RegNetX800MF_base.ckpt",
99
+ ContentFile(f.read()),
100
+ )
101
+
102
+ # Set as active meta
103
+ ai_model.active_meta = model_meta
104
+ ai_model.save()
105
+
106
+ self.stdout.write(
107
+ self.style.SUCCESS(
108
+ f"Successfully {'created' if created else 'updated'} ModelMeta: {model_meta}"
109
+ )
110
+ )
111
+
112
+ except Exception as e:
113
+ self.stdout.write(self.style.ERROR(f"Error creating ModelMeta: {e}"))
114
+ import traceback
115
+
116
+ traceback.print_exc()
@@ -0,0 +1,196 @@
1
+ """
2
+ Django management command to perform complete setup for EndoReg DB when used as an embedded app.
3
+ This command ensures all necessary data and configurations are initialized.
4
+ """
5
+
6
+ import os
7
+ from pathlib import Path
8
+
9
+ from django.core.management import call_command
10
+ from django.core.management.base import BaseCommand
11
+
12
+
13
+ class Command(BaseCommand):
14
+ help = """
15
+ Complete setup for EndoReg DB when used as an embedded app.
16
+ This command performs all necessary initialization steps:
17
+ 1. Loads base database data
18
+ 2. Sets up AI models and labels
19
+ 3. Creates cache table
20
+ 4. Initializes model metadata
21
+ """
22
+
23
+ def add_arguments(self, parser):
24
+ parser.add_argument(
25
+ "--skip-ai-setup",
26
+ action="store_true",
27
+ help="Skip AI model setup (for cases where AI features are not needed)",
28
+ )
29
+ parser.add_argument(
30
+ "--force-recreate",
31
+ action="store_true",
32
+ help="Force recreation of AI model metadata even if it exists",
33
+ )
34
+
35
+ def handle(self, *args, **options):
36
+ skip_ai = options.get("skip_ai_setup", False)
37
+ force_recreate = options.get("force_recreate", False)
38
+
39
+ self.stdout.write(self.style.SUCCESS("🚀 Starting EndoReg DB embedded app setup..."))
40
+
41
+ # Step 1: Load base database data
42
+ self.stdout.write("\n📊 Step 1: Loading base database data...")
43
+ try:
44
+ call_command("load_base_db_data")
45
+ self.stdout.write(self.style.SUCCESS("✅ Base database data loaded successfully"))
46
+ except Exception as e:
47
+ self.stdout.write(self.style.ERROR(f"❌ Failed to load base data: {e}"))
48
+ return
49
+
50
+ # Step 2: Create cache table (only if using database caching)
51
+ self.stdout.write("\n💾 Step 2: Setting up caching...")
52
+ from django.conf import settings
53
+
54
+ cache_backend = settings.CACHES.get("default", {}).get("BACKEND", "")
55
+ if "db" in cache_backend or "database" in cache_backend:
56
+ self.stdout.write("Using database caching - creating cache table...")
57
+ try:
58
+ call_command("createcachetable")
59
+ self.stdout.write(self.style.SUCCESS("✅ Cache table created successfully"))
60
+ except Exception as e:
61
+ self.stdout.write(self.style.ERROR(f"❌ Failed to create cache table: {e}"))
62
+ return
63
+ else:
64
+ self.stdout.write("Using in-memory caching - skipping cache table creation")
65
+
66
+ if skip_ai:
67
+ self.stdout.write(self.style.WARNING("\n⚠️ Skipping AI setup as requested"))
68
+ else:
69
+ # Step 3: Load AI model data
70
+ self.stdout.write("\n🤖 Step 3: Loading AI model data...")
71
+ try:
72
+ call_command("load_ai_model_data")
73
+ self.stdout.write(self.style.SUCCESS("✅ AI model data loaded successfully"))
74
+ except Exception as e:
75
+ self.stdout.write(self.style.ERROR(f"❌ Failed to load AI model data: {e}"))
76
+ return
77
+
78
+ # Step 4: Load AI model label data
79
+ self.stdout.write("\n🏷️ Step 4: Loading AI model label data...")
80
+ try:
81
+ call_command("load_ai_model_label_data")
82
+ self.stdout.write(self.style.SUCCESS("✅ AI model label data loaded successfully"))
83
+ except Exception as e:
84
+ self.stdout.write(self.style.ERROR(f"❌ Failed to load AI model label data: {e}"))
85
+ return
86
+
87
+ # Step 5: Create model metadata
88
+ self.stdout.write("\n📋 Step 5: Creating AI model metadata...")
89
+ try:
90
+ # Check if model metadata already exists
91
+ from endoreg_db.models import AiModel
92
+
93
+ default_model_name = "image_multilabel_classification_colonoscopy_default"
94
+ ai_model = AiModel.objects.filter(name=default_model_name).first()
95
+
96
+ if not ai_model:
97
+ self.stdout.write(self.style.ERROR(f"❌ AI model '{default_model_name}' not found"))
98
+ return
99
+
100
+ existing_meta = ai_model.metadata_versions.first()
101
+ if existing_meta and not force_recreate:
102
+ self.stdout.write(self.style.SUCCESS("✅ Model metadata already exists (use --force-recreate to recreate)"))
103
+ else:
104
+ # Try to create model metadata
105
+ model_path = self._find_model_weights_file()
106
+ if model_path:
107
+ call_command(
108
+ "create_multilabel_model_meta",
109
+ model_name=default_model_name,
110
+ model_meta_version=1,
111
+ image_classification_labelset_name="multilabel_classification_colonoscopy_default",
112
+ model_path=str(model_path),
113
+ )
114
+ self.stdout.write(self.style.SUCCESS("✅ AI model metadata created successfully"))
115
+ else:
116
+ self.stdout.write(self.style.WARNING("⚠️ Model weights file not found. AI features may not work properly."))
117
+
118
+ except Exception as e:
119
+ self.stdout.write(self.style.ERROR(f"❌ Failed to create AI model metadata: {e}"))
120
+ return
121
+
122
+ # Step 6: Verification
123
+ self.stdout.write("\n🔍 Step 6: Verifying setup...")
124
+ try:
125
+ self._verify_setup()
126
+ self.stdout.write(self.style.SUCCESS("✅ Setup verification completed successfully"))
127
+ except Exception as e:
128
+ self.stdout.write(self.style.ERROR(f"❌ Setup verification failed: {e}"))
129
+ return
130
+
131
+ self.stdout.write(self.style.SUCCESS("\n🎉 EndoReg DB embedded app setup completed successfully!"))
132
+ self.stdout.write("\nNext steps:")
133
+ self.stdout.write("1. Run migrations: python manage.py migrate")
134
+ self.stdout.write("2. Create superuser: python manage.py createsuperuser")
135
+ self.stdout.write("3. Start development server: python manage.py runserver")
136
+
137
+ def _find_model_weights_file(self):
138
+ """Find the model weights file in various possible locations."""
139
+ # Check common locations for model weights
140
+ possible_paths = [
141
+ # Test assets (for development)
142
+ Path("tests/assets/colo_segmentation_RegNetX800MF_6.ckpt"),
143
+ # Project root assets
144
+ Path("assets/colo_segmentation_RegNetX800MF_6.ckpt"),
145
+ # Storage directory
146
+ Path("data/storage/model_weights/colo_segmentation_RegNetX800MF_6.ckpt"),
147
+ # Absolute paths based on environment
148
+ Path(os.getenv("STORAGE_DIR", "storage")) / "model_weights" / "colo_segmentation_RegNetX800MF_6.ckpt",
149
+ ]
150
+
151
+ for path in possible_paths:
152
+ if path.exists():
153
+ self.stdout.write(f"Found model weights at: {path}")
154
+ return path
155
+
156
+ self.stdout.write("Model weights file not found in standard locations")
157
+ return None
158
+
159
+ def _verify_setup(self):
160
+ """Verify that the setup was successful."""
161
+ from django.conf import settings
162
+ from django.db import connection
163
+
164
+ # Check that required tables exist
165
+ required_tables = [
166
+ "endoreg_db_aimodel",
167
+ "endoreg_db_modelmeta",
168
+ ]
169
+
170
+ # Only check for cache table if using database caching
171
+ cache_backend = settings.CACHES.get("default", {}).get("BACKEND", "")
172
+ if "db" in cache_backend or "database" in cache_backend:
173
+ required_tables.append("django_cache_table")
174
+
175
+ cursor = connection.cursor()
176
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
177
+ existing_tables = [row[0] for row in cursor.fetchall()]
178
+
179
+ missing_tables = [table for table in required_tables if table not in existing_tables]
180
+ if missing_tables:
181
+ raise Exception(f"Missing required tables: {missing_tables}")
182
+
183
+ # Check that AI models exist (if AI setup was performed)
184
+ from endoreg_db.models import AiModel
185
+
186
+ if AiModel.objects.exists():
187
+ ai_model_count = AiModel.objects.count()
188
+ self.stdout.write(f"Found {ai_model_count} AI model(s)")
189
+
190
+ # Check for model metadata
191
+ from endoreg_db.models import ModelMeta
192
+
193
+ meta_count = ModelMeta.objects.count()
194
+ self.stdout.write(f"Found {meta_count} model metadata record(s)")
195
+
196
+ self.stdout.write("Setup verification passed")
@@ -1,21 +1,21 @@
1
1
  import shutil
2
+ from logging import getLogger
2
3
  from pathlib import Path
3
- from typing import Optional, TYPE_CHECKING, Any, Type
4
- from huggingface_hub import hf_hub_download
4
+ from typing import TYPE_CHECKING, Any, Optional, Type
5
+
5
6
  from django.db import transaction
7
+ from huggingface_hub import hf_hub_download
6
8
 
7
9
  # Assuming ModelMeta, AiModel, LabelSet are importable from the correct locations
8
10
  # Adjust imports based on your project structure if necessary
9
11
  from ..administration.ai.ai_model import AiModel
10
12
  from ..label.label_set import LabelSet
11
- from ..utils import WEIGHTS_DIR, STORAGE_DIR
12
-
13
- from logging import getLogger
13
+ from ..utils import STORAGE_DIR, WEIGHTS_DIR
14
14
 
15
15
  logger = getLogger("ai_model")
16
16
 
17
17
  if TYPE_CHECKING:
18
- from .model_meta import ModelMeta # Import ModelMeta for type hinting
18
+ from .model_meta import ModelMeta # Import ModelMeta for type hinting
19
19
 
20
20
 
21
21
  def get_latest_version_number_logic(
@@ -29,13 +29,13 @@ def get_latest_version_number_logic(
29
29
  """
30
30
  versions_qs = cls.objects.filter(
31
31
  name=meta_name, model__name=model_name
32
- ).values_list('version', flat=True)
32
+ ).values_list("version", flat=True)
33
33
 
34
34
  max_v = 0
35
35
  found_numeric_version = False
36
36
 
37
37
  for v_str in versions_qs:
38
- if v_str is None: # Skip None versions
38
+ if v_str is None: # Skip None versions
39
39
  continue
40
40
  try:
41
41
  v_int = int(v_str)
@@ -47,13 +47,13 @@ def get_latest_version_number_logic(
47
47
  f"Warning: Could not parse version string '{v_str}' as an integer for "
48
48
  f"meta_name='{meta_name}', model_name='{model_name}' while determining the max version."
49
49
  )
50
-
50
+
51
51
  return max_v if found_numeric_version else 0
52
52
 
53
53
 
54
54
  @transaction.atomic
55
55
  def create_from_file_logic(
56
- cls: Type["ModelMeta"], # cls is ModelMeta
56
+ cls: Type["ModelMeta"], # cls is ModelMeta
57
57
  meta_name: str,
58
58
  model_name: str,
59
59
  labelset_name: str,
@@ -94,11 +94,14 @@ def create_from_file_logic(
94
94
  )
95
95
  elif existing and bump_if_exists:
96
96
  target_version = str(latest_version_num + 1)
97
- logger.info(f"Bumping version for {meta_name}/{model_name} to {target_version}")
97
+ logger.info(
98
+ f"Bumping version for {meta_name}/{model_name} to {target_version}"
99
+ )
98
100
  else:
99
101
  target_version = str(latest_version_num + 1)
100
- logger.info(f"Setting next version for {meta_name}/{model_name} to {target_version}")
101
-
102
+ logger.info(
103
+ f"Setting next version for {meta_name}/{model_name} to {target_version}"
104
+ )
102
105
 
103
106
  # --- Prepare Weights File ---
104
107
  source_weights_path = Path(weights_file).resolve()
@@ -108,7 +111,10 @@ def create_from_file_logic(
108
111
  # Construct destination path within MEDIA_ROOT/WEIGHTS_DIR
109
112
  weights_filename = source_weights_path.name
110
113
  # Relative path for the FileField upload_to
111
- relative_dest_path = Path(WEIGHTS_DIR.relative_to(STORAGE_DIR)) / f"{meta_name}_v{target_version}_{weights_filename}"
114
+ relative_dest_path = (
115
+ Path(WEIGHTS_DIR.relative_to(STORAGE_DIR))
116
+ / f"{meta_name}_v{target_version}_{weights_filename}"
117
+ )
112
118
  # Full path for shutil.copy
113
119
  full_dest_path = STORAGE_DIR / relative_dest_path
114
120
 
@@ -125,8 +131,8 @@ def create_from_file_logic(
125
131
  # --- Create/Update ModelMeta Instance ---
126
132
  defaults = {
127
133
  "labelset": label_set,
128
- "weights": relative_dest_path.as_posix(), # Store relative path for FileField
129
- **kwargs, # Pass through other fields like activation, mean, std, etc.
134
+ "weights": relative_dest_path.as_posix(), # Store relative path for FileField
135
+ **kwargs, # Pass through other fields like activation, mean, std, etc.
130
136
  }
131
137
 
132
138
  # Remove None values from defaults to avoid overriding model defaults unnecessarily
@@ -152,35 +158,39 @@ def create_from_file_logic(
152
158
 
153
159
  return model_meta
154
160
 
161
+
155
162
  # --- Add other logic functions referenced by ModelMeta here ---
156
163
  # (get_latest_version_number_logic, get_activation_function_logic, etc.)
157
164
  # Placeholder for get_activation_function_logic
158
165
  def get_activation_function_logic(activation_name: str):
159
- import torch.nn as nn # Import locally as it's specific to this function
166
+ import torch.nn as nn # Import locally as it's specific to this function
167
+
160
168
  if activation_name.lower() == "sigmoid":
161
169
  return nn.Sigmoid()
162
170
  elif activation_name.lower() == "softmax":
163
171
  # Note: Softmax usually requires specifying the dimension
164
- return nn.Softmax(dim=1) # Assuming dim=1 (channels) is common
172
+ return nn.Softmax(dim=1) # Assuming dim=1 (channels) is common
165
173
  elif activation_name.lower() == "none":
166
174
  return nn.Identity()
167
175
  else:
168
176
  # Consider adding more activations or raising an error
169
177
  raise ValueError(f"Unsupported activation function: {activation_name}")
170
178
 
179
+
171
180
  # Placeholder for get_inference_dataset_config_logic
172
181
  def get_inference_dataset_config_logic(model_meta: "ModelMeta") -> dict:
173
182
  # This would typically extract relevant fields from model_meta
174
183
  # for configuring a dataset during inference
175
184
  return {
176
- "mean": [float(x) for x in model_meta.mean.split(',')],
177
- "std": [float(x) for x in model_meta.std.split(',')],
178
- "size_y": model_meta.size_y, # Add size_y key
179
- "size_x": model_meta.size_x, # Add size_x key
180
- "axes": [int(x) for x in model_meta.axes.split(',')],
185
+ "mean": [float(x) for x in model_meta.mean.split(",")],
186
+ "std": [float(x) for x in model_meta.std.split(",")],
187
+ "size_y": model_meta.size_y, # Add size_y key
188
+ "size_x": model_meta.size_x, # Add size_x key
189
+ "axes": [int(x) for x in model_meta.axes.split(",")],
181
190
  # Add other relevant config like normalization type, etc.
182
191
  }
183
192
 
193
+
184
194
  # Placeholder for get_config_dict_logic
185
195
  def get_config_dict_logic(model_meta: "ModelMeta") -> dict:
186
196
  # Returns a dictionary representation of the model's configuration
@@ -202,6 +212,7 @@ def get_config_dict_logic(model_meta: "ModelMeta") -> dict:
202
212
  # Add any other relevant fields
203
213
  }
204
214
 
215
+
205
216
  # Placeholder for get_model_meta_by_name_version_logic
206
217
  def get_model_meta_by_name_version_logic(
207
218
  cls: Type["ModelMeta"],
@@ -227,17 +238,24 @@ def get_model_meta_by_name_version_logic(
227
238
  ) from exc
228
239
  else:
229
240
  # Get latest version
230
- latest = cls.objects.filter(name=meta_name, model=ai_model).order_by("-date_created").first()
241
+ latest = (
242
+ cls.objects.filter(name=meta_name, model=ai_model)
243
+ .order_by("-date_created")
244
+ .first()
245
+ )
231
246
  if latest:
232
247
  return latest
233
248
  else:
234
249
  raise cls.DoesNotExist(
235
250
  f"No ModelMeta found for '{meta_name}' and model '{model_name}'."
236
251
  )
237
-
238
- from huggingface_hub import model_info
252
+
253
+
239
254
  import re
240
255
 
256
+ from huggingface_hub import model_info
257
+
258
+
241
259
  def infer_default_model_meta_from_hf(model_id: str) -> dict[str, Any]:
242
260
  """
243
261
  Infers default model metadata (activation, normalization, input size)
@@ -248,7 +266,9 @@ def infer_default_model_meta_from_hf(model_id: str) -> dict[str, Any]:
248
266
  """
249
267
 
250
268
  if not (info := model_info(model_id)):
251
- logger.info(f"Could not retrieve model info for {model_id}, using ColoReg segmentation defaults.")
269
+ logger.info(
270
+ f"Could not retrieve model info for {model_id}, using ColoReg segmentation defaults."
271
+ )
252
272
  return {
253
273
  "name": "wg-lux/colo_segmentation_RegNetX800MF_base",
254
274
  "activation": "sigmoid",
@@ -295,18 +315,29 @@ def infer_default_model_meta_from_hf(model_id: str) -> dict[str, Any]:
295
315
  "size_y": size_y,
296
316
  "description": f"Inferred defaults for {model_id}",
297
317
  }
298
-
299
- def setup_default_from_huggingface_logic(cls, model_id: str, labelset_name: str | None = None):
318
+
319
+
320
+ def setup_default_from_huggingface_logic(
321
+ cls, model_id: str, labelset_name: str | None = None
322
+ ):
300
323
  """
301
324
  Downloads model weights from Hugging Face and auto-fills ModelMeta fields.
302
325
  """
303
326
  meta = infer_default_model_meta_from_hf(model_id)
304
327
 
305
328
  # Download weights
306
- weights_path = hf_hub_download(repo_id=model_id, filename="pytorch_model.bin", local_dir=WEIGHTS_DIR)
329
+ weights_path = hf_hub_download(
330
+ repo_id=model_id,
331
+ filename="colo_segmentation_RegNetX800MF_base.ckpt",
332
+ local_dir=WEIGHTS_DIR,
333
+ )
307
334
 
308
335
  ai_model, _ = AiModel.objects.get_or_create(name=meta["name"])
309
- labelset = LabelSet.objects.first() if not labelset_name else LabelSet.objects.get(name=labelset_name)
336
+ labelset = (
337
+ LabelSet.objects.first()
338
+ if not labelset_name
339
+ else LabelSet.objects.get(name=labelset_name)
340
+ )
310
341
 
311
342
  return create_from_file_logic(
312
343
  cls,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: endoreg-db
3
- Version: 0.8.3.2
3
+ Version: 0.8.3.3
4
4
  Summary: EndoReg Db Django App
5
5
  Project-URL: Homepage, https://info.coloreg.de
6
6
  Project-URL: Repository, https://github.com/wg-lux/endoreg-db
@@ -248,6 +248,7 @@ endoreg_db/management/__init__.py,sha256=3dsK9Mizq1veuWTcvSOyWMFT9VI8wtyk-P2K9Ri
248
248
  endoreg_db/management/commands/__init__.py,sha256=Ch0jwQfNpOSr4O5KKMfYJ93dsesk1Afb-JtbRVyFXZs,21
249
249
  endoreg_db/management/commands/anonymize_video.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
250
250
  endoreg_db/management/commands/check_auth.py,sha256=TPiYeCZ5QcqIvR33xhbqXunO2nrcNAmHb_izoMTqgpg,5390
251
+ endoreg_db/management/commands/create_model_meta_from_huggingface.py,sha256=RUuoBjTzdchuMY6qcwBENN7FTyTygPTZQBZYWwhugDc,3925
251
252
  endoreg_db/management/commands/create_multilabel_model_meta.py,sha256=qeoyqcF2CWcnhniVRrlYbmJmwNwyZb-VQ0pjkr6arJU,7566
252
253
  endoreg_db/management/commands/fix_missing_patient_data.py,sha256=5TPUTOQwI2fVh3Zd88o4ne0R8N_V98k0GZsI1gW0kGM,7766
253
254
  endoreg_db/management/commands/fix_video_paths.py,sha256=7LLwc38oX3B_tYWbLJA43Li_KBO3m5Lyw0CF6YqN5rU,7145
@@ -289,6 +290,7 @@ endoreg_db/management/commands/load_unit_data.py,sha256=tcux-iL-ByT2ApgmHEkLllZS
289
290
  endoreg_db/management/commands/load_user_groups.py,sha256=D7SK2FvZEHoE4TIXNGCjDw5_12MH9bpGZvoS7eEv0Os,1031
290
291
  endoreg_db/management/commands/register_ai_model.py,sha256=KixTfuQR6TUfRmzB5GOos16BFOz7NL4TzLzBkgtPPgE,2510
291
292
  endoreg_db/management/commands/reset_celery_schedule.py,sha256=U-m_FNRTw6LAwJoT9RUE4qrhmQXm7AyFToPcHYyJpIE,386
293
+ endoreg_db/management/commands/setup_endoreg_db.py,sha256=_mJkNB2IZNcgDQkOExUTkmmjp9qMwEiZH2KEJcyCi_Y,8635
292
294
  endoreg_db/management/commands/start_filewatcher.py,sha256=3jESBqRiYPa9f35--zd70qQaYnyT0tzRO_b_HJuyteQ,4093
293
295
  endoreg_db/management/commands/storage_management.py,sha256=NpToX59ndwTFNmnSoeppmiPdMvpjSHH7mAdIe4SvUoI,22396
294
296
  endoreg_db/management/commands/summarize_db_content.py,sha256=pOIz3qbY4Ktmh0zV_DKFx971VD0pPx027gCD7a47EL0,10766
@@ -462,7 +464,7 @@ endoreg_db/models/medical/risk/risk_type.py,sha256=kEugcaWSTEWH_Vxq4dcF80Iv1L4_K
462
464
  endoreg_db/models/metadata/__init__.py,sha256=8I6oLj3YTmeaPGJpL0AWG5gLwp38QzrEggxSkTisv7c,474
463
465
  endoreg_db/models/metadata/frame_ocr_result.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
464
466
  endoreg_db/models/metadata/model_meta.py,sha256=F_r-PTLeNi4J-4EaGCQkGIguhdl7Bwba7_i56ZAjc-4,7589
465
- endoreg_db/models/metadata/model_meta_logic.py,sha256=27mqScxUTJXNUVc6CqAs5dXjspEsh0TWPmlxdJVulGc,12015
467
+ endoreg_db/models/metadata/model_meta_logic.py,sha256=6w1YX8hVq40UXbVN1fvDO9OljwekBZaDVHEjVZecoV8,12252
466
468
  endoreg_db/models/metadata/pdf_meta.py,sha256=BTmpSgqxmPKi0apcNjyrZAS4AFKCPXVdBd6VBeyyv6E,3174
467
469
  endoreg_db/models/metadata/sensitive_meta.py,sha256=ekLHrW-b5uYcjfkRd0EW5ncx5ef8Bu-K6msDkpWCAbk,13034
468
470
  endoreg_db/models/metadata/sensitive_meta_logic.py,sha256=Oh7ssZQEPfKGfRMF5nXKJpOIxXx-Xibd3rpOu-bQilk,29988
@@ -784,7 +786,7 @@ endoreg_db/views/video/video_meta.py,sha256=C1wBMTtQb_yzEUrhFGAy2UHEWMk_CbU75WXX
784
786
  endoreg_db/views/video/video_processing_history.py,sha256=mhFuS8RG5GV8E-lTtuD0qrq-bIpnUFp8vy9aERfC-J8,770
785
787
  endoreg_db/views/video/video_remove_frames.py,sha256=2FmvNrSPM0fUXiBxINN6vBUUDCqDlBkNcGR3WsLDgKo,1696
786
788
  endoreg_db/views/video/video_stream.py,sha256=kLyuf0ORTmsLeYUQkTQ6iRYqlIQozWhMMR3Lhfe_trk,12148
787
- endoreg_db-0.8.3.2.dist-info/METADATA,sha256=Hdg0xL9WKegEgoyGOY0vgwAX1UVB87Ph86WNsYgcSms,14758
788
- endoreg_db-0.8.3.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
789
- endoreg_db-0.8.3.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
790
- endoreg_db-0.8.3.2.dist-info/RECORD,,
789
+ endoreg_db-0.8.3.3.dist-info/METADATA,sha256=anKqQ1fidx7S7ca0cWHU1UHEDNI67ujUV-RO4IGgr1g,14758
790
+ endoreg_db-0.8.3.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
791
+ endoreg_db-0.8.3.3.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
792
+ endoreg_db-0.8.3.3.dist-info/RECORD,,