ollamadiffuser 1.2.3__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. ollamadiffuser/__init__.py +1 -1
  2. ollamadiffuser/api/server.py +312 -312
  3. ollamadiffuser/cli/config_commands.py +119 -0
  4. ollamadiffuser/cli/lora_commands.py +169 -0
  5. ollamadiffuser/cli/main.py +85 -1233
  6. ollamadiffuser/cli/model_commands.py +664 -0
  7. ollamadiffuser/cli/recommend_command.py +205 -0
  8. ollamadiffuser/cli/registry_commands.py +197 -0
  9. ollamadiffuser/core/config/model_registry.py +562 -11
  10. ollamadiffuser/core/config/settings.py +24 -2
  11. ollamadiffuser/core/inference/__init__.py +5 -0
  12. ollamadiffuser/core/inference/base.py +182 -0
  13. ollamadiffuser/core/inference/engine.py +204 -1405
  14. ollamadiffuser/core/inference/strategies/__init__.py +1 -0
  15. ollamadiffuser/core/inference/strategies/controlnet_strategy.py +170 -0
  16. ollamadiffuser/core/inference/strategies/flux_strategy.py +136 -0
  17. ollamadiffuser/core/inference/strategies/generic_strategy.py +164 -0
  18. ollamadiffuser/core/inference/strategies/gguf_strategy.py +113 -0
  19. ollamadiffuser/core/inference/strategies/hidream_strategy.py +104 -0
  20. ollamadiffuser/core/inference/strategies/sd15_strategy.py +134 -0
  21. ollamadiffuser/core/inference/strategies/sd3_strategy.py +80 -0
  22. ollamadiffuser/core/inference/strategies/sdxl_strategy.py +131 -0
  23. ollamadiffuser/core/inference/strategies/video_strategy.py +108 -0
  24. ollamadiffuser/mcp/__init__.py +0 -0
  25. ollamadiffuser/mcp/server.py +184 -0
  26. ollamadiffuser/ui/templates/index.html +62 -1
  27. ollamadiffuser/ui/web.py +116 -54
  28. {ollamadiffuser-1.2.3.dist-info → ollamadiffuser-2.0.0.dist-info}/METADATA +321 -108
  29. ollamadiffuser-2.0.0.dist-info/RECORD +61 -0
  30. {ollamadiffuser-1.2.3.dist-info → ollamadiffuser-2.0.0.dist-info}/WHEEL +1 -1
  31. {ollamadiffuser-1.2.3.dist-info → ollamadiffuser-2.0.0.dist-info}/entry_points.txt +1 -0
  32. ollamadiffuser/core/models/registry.py +0 -384
  33. ollamadiffuser/ui/samples/.DS_Store +0 -0
  34. ollamadiffuser-1.2.3.dist-info/RECORD +0 -45
  35. {ollamadiffuser-1.2.3.dist-info → ollamadiffuser-2.0.0.dist-info}/licenses/LICENSE +0 -0
  36. {ollamadiffuser-1.2.3.dist-info → ollamadiffuser-2.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,61 @@
1
+ ollamadiffuser/__init__.py,sha256=4QEEVdgJd6EtYyjBBJHPhhTmUxP0v0Dxj6M5hZmXJCg,1127
2
+ ollamadiffuser/__main__.py,sha256=tNWMvEHq4ddtKLp7DrhIoOdnFw3F8RNrETC_u5xpkFI,141
3
+ ollamadiffuser/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ ollamadiffuser/api/server.py,sha256=MqEKjckz8x8pU9dhZlLXET5XWt7ERfVcAfvdVfDLFWw,15464
5
+ ollamadiffuser/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ ollamadiffuser/cli/commands.py,sha256=Pe0vyfGiffwd10QlVxBCTtNnMqHi8nJ3oNn_k8nAi5k,8903
7
+ ollamadiffuser/cli/config_commands.py,sha256=7qWwBFGQsL0UpJlLwqOSXmAAmoZmJmXcxyIyGZ1mw3w,4066
8
+ ollamadiffuser/cli/lora_commands.py,sha256=3d6VdFV1BL9tdA6slBCFAtlJeSBNQ3gmL4Oa2GNZ-4E,5767
9
+ ollamadiffuser/cli/main.py,sha256=ZvH9eDgKBX5TnIQKU__tFIRqDw-5sorp2qOrQOlV6XA,4207
10
+ ollamadiffuser/cli/model_commands.py,sha256=Q_fHxgtcjgXVvfcRL10uonRQct5p-rvOcdiFs2bk6-o,28493
11
+ ollamadiffuser/cli/recommend_command.py,sha256=OnbsDpOBuIp4ATB_Xk1PNudgPdtUfjxrdbQYfMlpBLw,6774
12
+ ollamadiffuser/cli/registry_commands.py,sha256=Ncnv5zGAB8ZYehS5ukP1nwRJKMBzwWM5wdHohJNOm_g,6836
13
+ ollamadiffuser/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ ollamadiffuser/core/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ ollamadiffuser/core/config/model_registry.py,sha256=x1fDf6vplm-m1wTr-8ATVhB-J_vomiTtkoZwDoaAl20,53793
16
+ ollamadiffuser/core/config/settings.py,sha256=Bu6sKoPAZq02vHlHDv41NuKT6q0u9U6yA3a66t6zRCE,5940
17
+ ollamadiffuser/core/inference/__init__.py,sha256=ppFbZKmTUBG2W8Pq36t6pK12wjGxcK1A45UexBWnUHE,124
18
+ ollamadiffuser/core/inference/base.py,sha256=AyoM6j37nhhh5RXQeH9Ycn9x1_eRPQQfC5nwfWPIjIM,6284
19
+ ollamadiffuser/core/inference/engine.py,sha256=xUCSQmGke9yAIoKGzh9jRaH1XE3GP9-uGM2hseuUzao,8139
20
+ ollamadiffuser/core/inference/strategies/__init__.py,sha256=5LQgTeS5JVin-HiGX7UvjlPzd0awombKrNhvAVr3SSw,53
21
+ ollamadiffuser/core/inference/strategies/controlnet_strategy.py,sha256=_lGlCaYhrrdn7N6Aw0X9a4L90wKPjCrr6EBfQqPVH2E,6712
22
+ ollamadiffuser/core/inference/strategies/flux_strategy.py,sha256=NPlwoKC9TsoVOkCLP0Gzf33D2cWg-77McO6_hYWWbsU,5216
23
+ ollamadiffuser/core/inference/strategies/generic_strategy.py,sha256=fGD4nnUspSlOtK7XWTjDJRRN5uDfQIQIuefBJSJhCEA,6843
24
+ ollamadiffuser/core/inference/strategies/gguf_strategy.py,sha256=kIGT85tDCcSsliXdaxEJoQz4Gm7Xt7TfEcu6xcmTvJg,3893
25
+ ollamadiffuser/core/inference/strategies/hidream_strategy.py,sha256=D1BeqEXiMRzJER5SEPAGJAGm9B_lnczMM94wu6sVrHE,3707
26
+ ollamadiffuser/core/inference/strategies/sd15_strategy.py,sha256=qz5eGA2xkcA_3oNywP-rCliXzP7jYpH60728QmOT5fw,4966
27
+ ollamadiffuser/core/inference/strategies/sd3_strategy.py,sha256=6DjWebeyjaH7jiRm8hf2ismkJ3Gth69u71enVgMMPi8,2772
28
+ ollamadiffuser/core/inference/strategies/sdxl_strategy.py,sha256=tslfENJIvEhDuj1D6aClFF6hv8i0JO2PukFQZsTCwQY,5137
29
+ ollamadiffuser/core/inference/strategies/video_strategy.py,sha256=xJJU5GbHol8SMpNBllga8AkjQRTgZ0sZUkAwFIoJqk8,3939
30
+ ollamadiffuser/core/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
+ ollamadiffuser/core/models/gguf_loader.py,sha256=ocfl3_MDVXC9nSjW8YJdz4kX1Q-Qe2ltu6w4fbqhxVY,35724
32
+ ollamadiffuser/core/models/manager.py,sha256=rTEAameGih3wPcVG_Y-4k_brBeEqEoBjoI7fjggNtiY,16799
33
+ ollamadiffuser/core/utils/__init__.py,sha256=ZdXZWX1hfDnnV6OmRD6UStNljDJIQ892da2CtC-zdDw,31
34
+ ollamadiffuser/core/utils/controlnet_preprocessors.py,sha256=v21X_Bk-a4gKbUZUKoeP2W8TSGlv-ST8IYNsn3NrZ2c,15446
35
+ ollamadiffuser/core/utils/download_utils.py,sha256=dqHf7loKSqDHzgQEV-p1kDwoHxwlgurLPX8J96vD5fA,22421
36
+ ollamadiffuser/core/utils/lora_manager.py,sha256=SrZydPSGJqCS_Vek35bEdG2Q51qCOLZmPvnNzUjjIN0,14328
37
+ ollamadiffuser/mcp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
+ ollamadiffuser/mcp/server.py,sha256=ez_VQ0iFneUoyb97Wqk3kj6VVtMS5MmA4zwCPxV0imc,6271
39
+ ollamadiffuser/ui/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
+ ollamadiffuser/ui/web.py,sha256=exa_TXtTCOHR4a4l0o_y4ubBUoxtuvTlqsPgXwXPc9E,29741
41
+ ollamadiffuser/ui/samples/metadata.json,sha256=ZDlbE__qlihTdwv8Ao_1Hs0557Qqdksduo_OC9VxX3o,3286
42
+ ollamadiffuser/ui/samples/canny/geometric_shapes.png,sha256=-rkuIgFlS3be4WqIjC8D_H6ltIlQf08hXNKPbz2i48I,3833
43
+ ollamadiffuser/ui/samples/canny/house_outline.png,sha256=UvEh9A8e-cJiuMagggugowhkYw_YKMf0eCfU-Iw3q9A,2703
44
+ ollamadiffuser/ui/samples/canny/portrait_outline.png,sha256=FIhkgBsw0ermJFtLfP0AekszkU57feXK38uENIV1ZnE,3152
45
+ ollamadiffuser/ui/samples/depth/linear_perspective.png,sha256=pJV1wQD-4m5nGU4f93PVRlhqx2p8WhaaYVpKHnu-n1o,1943
46
+ ollamadiffuser/ui/samples/depth/radial_gradient.png,sha256=QXxZDtFyn6iaXkrhVuAQCnPqju1EIrqQbVH2cKYGR5E,18464
47
+ ollamadiffuser/ui/samples/depth/sphere_3d.png,sha256=43Zm8YTtZ__iuPUXoIynhF0AiWaQlPYpcPtQov0GZuI,20842
48
+ ollamadiffuser/ui/samples/openpose/running_pose.png,sha256=T_5qMe4wW-fAae0NA4-ma7f9D5wbXT3vROJOhI_773g,2266
49
+ ollamadiffuser/ui/samples/openpose/sitting_pose.png,sha256=JO7F_IoS8eUbyl-J68fIHQ0cj35XMVvWKJ1Yez_dYZ4,2426
50
+ ollamadiffuser/ui/samples/openpose/standing_pose.png,sha256=wfgI2aIPi_TpXYgvuan7mUjx7RwmyC0jUKT5A3idPz4,2326
51
+ ollamadiffuser/ui/samples/scribble/car_sketch.png,sha256=Y7pUIDrWZf3z9pJamNhlx5-nGiA3B64eEvW14qLz5TY,2244
52
+ ollamadiffuser/ui/samples/scribble/face_sketch.png,sha256=MVVYy_aS48xoS_RnIDzLUaDQ8m_y55TuAAP7X5P_xtk,3025
53
+ ollamadiffuser/ui/samples/scribble/tree_sketch.png,sha256=3P-NGgW25xRwreDxiBYKcDhd2oHZAwKSkjNVM5oPTWY,3017
54
+ ollamadiffuser/ui/templates/index.html,sha256=XcrYZqtDR65dAiu959Ea19t3MbtYmXl9PVyMnR1Telk,42358
55
+ ollamadiffuser/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
+ ollamadiffuser-2.0.0.dist-info/licenses/LICENSE,sha256=cnGL9l2P510Uk3TCnv62kot6vAfdSawhOZh7Y-oYoIE,1071
57
+ ollamadiffuser-2.0.0.dist-info/METADATA,sha256=em3gkEIu5lTqo_3QXReRM7rzi011KEFPZMVJgXHjvOw,31378
58
+ ollamadiffuser-2.0.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
59
+ ollamadiffuser-2.0.0.dist-info/entry_points.txt,sha256=Bp-ZzV3F7QpQu02Mcafeza-oTMjDslomz9qrhvfcQUA,116
60
+ ollamadiffuser-2.0.0.dist-info/top_level.txt,sha256=97wOGgTCxDE765Nr_o7B4Kwr_M_jy8fCCeQ81sMKlC4,15
61
+ ollamadiffuser-2.0.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,2 +1,3 @@
1
1
  [console_scripts]
2
2
  ollamadiffuser = ollamadiffuser.__main__:main
3
+ ollamadiffuser-mcp = ollamadiffuser.mcp.server:main
@@ -1,384 +0,0 @@
1
- """
2
- Dynamic Model Registry - Similar to Ollama's approach
3
- Fetches model information from external sources with local fallbacks
4
- """
5
-
6
- import json
7
- import logging
8
- import requests
9
- import yaml
10
- from pathlib import Path
11
- from typing import Dict, Any, Optional, List
12
- from datetime import datetime, timedelta
13
- import hashlib
14
-
15
- logger = logging.getLogger(__name__)
16
-
17
- class ModelRegistry:
18
- """
19
- Dynamic model registry that can fetch from external sources
20
- Similar to how Ollama manages their model library
21
- """
22
-
23
- def __init__(self, cache_dir: Optional[Path] = None):
24
- self.cache_dir = cache_dir or Path.home() / ".ollamadiffuser" / "registry"
25
- self.cache_dir.mkdir(parents=True, exist_ok=True)
26
-
27
- # Load configuration
28
- self._load_config()
29
-
30
- # Cache settings from config
31
- self.cache_duration = timedelta(hours=self.config.get('cache_duration_hours', 24))
32
- self.registry_cache_file = self.cache_dir / "models.json"
33
- self.last_update_file = self.cache_dir / "last_update.txt"
34
-
35
- # Registry sources from config
36
- self.registry_sources = self.config.get('sources', [])
37
-
38
- # Local models (built-in fallback)
39
- self._builtin_models = self._load_builtin_models()
40
-
41
- # Cached models
42
- self._cached_models = {}
43
- self._load_cache()
44
-
45
- def _load_config(self):
46
- """Load registry configuration from YAML file"""
47
- try:
48
- # Try to find config file
49
- config_paths = [
50
- Path(__file__).parent.parent.parent / "config" / "registry.yaml",
51
- Path.home() / ".ollamadiffuser" / "registry.yaml",
52
- Path("/etc/ollamadiffuser/registry.yaml")
53
- ]
54
-
55
- config = {}
56
- for config_path in config_paths:
57
- if config_path.exists():
58
- with open(config_path, 'r') as f:
59
- config = yaml.safe_load(f)
60
- logger.debug(f"Loaded config from {config_path}")
61
- break
62
-
63
- # Use registry section if it exists
64
- self.config = config.get('registry', {})
65
-
66
- # Set defaults if no config found
67
- if not self.config:
68
- logger.warning("No registry config found, using defaults")
69
- self.config = {
70
- 'cache_duration_hours': 24,
71
- 'sources': [
72
- {
73
- "name": "builtin",
74
- "url": None,
75
- "timeout": 10,
76
- "enabled": True,
77
- "description": "Built-in models only"
78
- }
79
- ]
80
- }
81
-
82
- except Exception as e:
83
- logger.warning(f"Failed to load registry config: {e}")
84
- self.config = {'cache_duration_hours': 24, 'sources': []}
85
-
86
- def _load_builtin_models(self) -> Dict[str, Any]:
87
- """Load built-in model definitions as fallback"""
88
- return {
89
- # FLUX.1 models
90
- "flux.1-dev": {
91
- "name": "flux.1-dev",
92
- "repo_id": "black-forest-labs/FLUX.1-dev",
93
- "model_type": "flux",
94
- "description": "High-quality text-to-image model from Black Forest Labs",
95
- "license": {"type": "Non-commercial", "commercial_use": False},
96
- "size_gb": 23.8,
97
- "hardware_requirements": {
98
- "min_vram_gb": 12,
99
- "recommended_vram_gb": 24,
100
- "min_ram_gb": 16,
101
- "recommended_ram_gb": 32
102
- },
103
- "parameters": {
104
- "num_inference_steps": 50,
105
- "guidance_scale": 3.5,
106
- "max_sequence_length": 512
107
- },
108
- "tags": ["flux", "high-quality", "non-commercial"],
109
- "downloads": 250000,
110
- "updated": "2024-12-01"
111
- },
112
-
113
- "flux.1-schnell": {
114
- "name": "flux.1-schnell",
115
- "repo_id": "black-forest-labs/FLUX.1-schnell",
116
- "model_type": "flux",
117
- "description": "Fast text-to-image model optimized for speed",
118
- "license": {"type": "Apache 2.0", "commercial_use": True},
119
- "size_gb": 23.8,
120
- "hardware_requirements": {
121
- "min_vram_gb": 12,
122
- "recommended_vram_gb": 24,
123
- "min_ram_gb": 16,
124
- "recommended_ram_gb": 32
125
- },
126
- "parameters": {
127
- "num_inference_steps": 4,
128
- "guidance_scale": 0.0,
129
- "max_sequence_length": 512
130
- },
131
- "tags": ["flux", "fast", "commercial", "apache"],
132
- "downloads": 180000,
133
- "updated": "2024-12-01"
134
- },
135
-
136
- # GGUF variants - generate dynamically
137
- **self._generate_gguf_variants()
138
- }
139
-
140
- def _generate_gguf_variants(self) -> Dict[str, Any]:
141
- """Generate GGUF model variants dynamically"""
142
- base_gguf = {
143
- "repo_id": "city96/FLUX.1-dev-gguf",
144
- "model_type": "flux_gguf",
145
- "description": "Quantized FLUX.1-dev model for efficient inference",
146
- "license": {"type": "Non-commercial", "commercial_use": False},
147
- "tags": ["flux", "gguf", "quantized", "efficient"],
148
- "updated": "2024-12-01"
149
- }
150
-
151
- variants = {
152
- "q2_k": {"size_gb": 4.03, "vram_gb": 4, "description": "Ultra-light quantization"},
153
- "q3_k_s": {"size_gb": 5.23, "vram_gb": 5, "description": "Light quantization"},
154
- "q4_k_s": {"size_gb": 6.81, "vram_gb": 6, "description": "Recommended quantization", "recommended": True},
155
- "q4_0": {"size_gb": 6.79, "vram_gb": 6, "description": "Alternative Q4 quantization"},
156
- "q4_1": {"size_gb": 7.53, "vram_gb": 7, "description": "Higher quality Q4"},
157
- "q5_k_s": {"size_gb": 8.29, "vram_gb": 8, "description": "High quality quantization"},
158
- "q5_0": {"size_gb": 8.27, "vram_gb": 8, "description": "Alternative Q5 quantization"},
159
- "q5_1": {"size_gb": 9.01, "vram_gb": 9, "description": "Highest Q5 quality"},
160
- "q6_k": {"size_gb": 9.86, "vram_gb": 10, "description": "Very high quality"},
161
- "q8_0": {"size_gb": 12.7, "vram_gb": 12, "description": "Near-original quality"},
162
- "f16": {"size_gb": 23.8, "vram_gb": 24, "description": "Full precision"}
163
- }
164
-
165
- gguf_models = {}
166
- for variant, info in variants.items():
167
- model_name = f"flux.1-dev-gguf:{variant}"
168
- gguf_models[model_name] = {
169
- **base_gguf,
170
- "name": model_name,
171
- "variant": variant,
172
- "file_name": f"flux1-dev-{variant.upper()}.gguf",
173
- "quantization": variant.upper(),
174
- "size_gb": info["size_gb"],
175
- "description": f"{base_gguf['description']} - {info['description']}",
176
- "hardware_requirements": {
177
- "min_vram_gb": info["vram_gb"],
178
- "recommended_vram_gb": info["vram_gb"] + 2,
179
- "min_ram_gb": 8,
180
- "recommended_ram_gb": 16
181
- },
182
- "parameters": {
183
- "num_inference_steps": 16,
184
- "guidance_scale": 2.0,
185
- "max_sequence_length": 512
186
- },
187
- "downloads": 50000 - (info["vram_gb"] * 1000), # Simulate popularity
188
- "recommended": info.get("recommended", False)
189
- }
190
-
191
- return gguf_models
192
-
193
- def _load_cache(self):
194
- """Load cached model registry"""
195
- try:
196
- if self.registry_cache_file.exists():
197
- with open(self.registry_cache_file, 'r') as f:
198
- self._cached_models = json.load(f)
199
- logger.debug(f"Loaded {len(self._cached_models)} models from cache")
200
- except Exception as e:
201
- logger.warning(f"Failed to load model cache: {e}")
202
- self._cached_models = {}
203
-
204
- def _save_cache(self, models: Dict[str, Any]):
205
- """Save model registry to cache"""
206
- try:
207
- with open(self.registry_cache_file, 'w') as f:
208
- json.dump(models, f, indent=2)
209
-
210
- with open(self.last_update_file, 'w') as f:
211
- f.write(datetime.now().isoformat())
212
-
213
- logger.debug(f"Saved {len(models)} models to cache")
214
- except Exception as e:
215
- logger.warning(f"Failed to save model cache: {e}")
216
-
217
- def _is_cache_expired(self) -> bool:
218
- """Check if cache is expired"""
219
- try:
220
- if not self.last_update_file.exists():
221
- return True
222
-
223
- with open(self.last_update_file, 'r') as f:
224
- last_update = datetime.fromisoformat(f.read().strip())
225
-
226
- return datetime.now() - last_update > self.cache_duration
227
- except:
228
- return True
229
-
230
- def _fetch_from_source(self, source: Dict[str, Any]) -> Optional[Dict[str, Any]]:
231
- """Fetch models from a specific source"""
232
- try:
233
- logger.debug(f"Fetching models from {source['name']}: {source['url']}")
234
-
235
- response = requests.get(
236
- source['url'],
237
- timeout=source['timeout'],
238
- headers={'User-Agent': 'OllamaDiffuser/1.0'}
239
- )
240
- response.raise_for_status()
241
-
242
- data = response.json()
243
-
244
- # Normalize the data format
245
- if 'models' in data:
246
- models = data['models']
247
- elif isinstance(data, dict):
248
- models = data
249
- else:
250
- logger.warning(f"Unexpected data format from {source['name']}")
251
- return None
252
-
253
- logger.info(f"Fetched {len(models)} models from {source['name']}")
254
- return models
255
-
256
- except requests.exceptions.Timeout:
257
- logger.warning(f"Timeout fetching from {source['name']}")
258
- except requests.exceptions.RequestException as e:
259
- logger.warning(f"Failed to fetch from {source['name']}: {e}")
260
- except json.JSONDecodeError as e:
261
- logger.warning(f"Invalid JSON from {source['name']}: {e}")
262
- except Exception as e:
263
- logger.warning(f"Unexpected error fetching from {source['name']}: {e}")
264
-
265
- return None
266
-
267
- def refresh(self, force: bool = False) -> bool:
268
- """Refresh model registry from external sources"""
269
- if not force and not self._is_cache_expired():
270
- logger.debug("Cache is still fresh, skipping refresh")
271
- return True
272
-
273
- logger.info("Refreshing model registry...")
274
-
275
- # Try each source in priority order
276
- for source in self.registry_sources:
277
- if not source.get('enabled', True):
278
- continue
279
-
280
- models = self._fetch_from_source(source)
281
- if models:
282
- # Merge with built-in models
283
- combined_models = {**self._builtin_models, **models}
284
-
285
- # Update cache
286
- self._cached_models = combined_models
287
- self._save_cache(combined_models)
288
-
289
- logger.info(f"Successfully updated registry with {len(combined_models)} models")
290
- return True
291
-
292
- logger.warning("Failed to fetch from any source, using cached/builtin models")
293
- return False
294
-
295
- def get_models(self, refresh: bool = False) -> Dict[str, Any]:
296
- """Get all available models"""
297
- if refresh or not self._cached_models:
298
- self.refresh()
299
-
300
- # Return cached models if available, otherwise built-in
301
- return self._cached_models if self._cached_models else self._builtin_models
302
-
303
- def get_model(self, model_name: str, refresh: bool = False) -> Optional[Dict[str, Any]]:
304
- """Get specific model information"""
305
- models = self.get_models(refresh=refresh)
306
- return models.get(model_name)
307
-
308
- def search_models(self, query: str = "", tags: List[str] = None, model_type: str = None) -> Dict[str, Any]:
309
- """Search models by query, tags, or type"""
310
- models = self.get_models()
311
- results = {}
312
-
313
- query_lower = query.lower()
314
- tags = tags or []
315
-
316
- for name, model in models.items():
317
- # Check query match
318
- query_match = (
319
- not query or
320
- query_lower in name.lower() or
321
- query_lower in model.get('description', '').lower()
322
- )
323
-
324
- # Check type match
325
- type_match = not model_type or model.get('model_type') == model_type
326
-
327
- # Check tags match
328
- model_tags = model.get('tags', [])
329
- tags_match = not tags or any(tag in model_tags for tag in tags)
330
-
331
- if query_match and type_match and tags_match:
332
- results[name] = model
333
-
334
- return results
335
-
336
- def get_popular_models(self, limit: int = 10) -> Dict[str, Any]:
337
- """Get most popular models"""
338
- models = self.get_models()
339
-
340
- # Sort by downloads
341
- sorted_models = sorted(
342
- models.items(),
343
- key=lambda x: x[1].get('downloads', 0),
344
- reverse=True
345
- )
346
-
347
- return dict(sorted_models[:limit])
348
-
349
- def get_recommended_models(self) -> Dict[str, Any]:
350
- """Get recommended models"""
351
- models = self.get_models()
352
-
353
- return {
354
- name: model for name, model in models.items()
355
- if model.get('recommended', False)
356
- }
357
-
358
- def add_local_model(self, model_name: str, model_config: Dict[str, Any]):
359
- """Add a local model configuration"""
360
- # Add to current models
361
- current_models = self.get_models()
362
- current_models[model_name] = model_config
363
-
364
- # Save to cache
365
- self._save_cache(current_models)
366
- self._cached_models = current_models
367
-
368
- logger.info(f"Added local model: {model_name}")
369
-
370
- def remove_local_model(self, model_name: str) -> bool:
371
- """Remove a local model configuration"""
372
- current_models = self.get_models()
373
-
374
- if model_name in current_models:
375
- del current_models[model_name]
376
- self._save_cache(current_models)
377
- self._cached_models = current_models
378
- logger.info(f"Removed local model: {model_name}")
379
- return True
380
-
381
- return False
382
-
383
- # Global registry instance
384
- model_registry = ModelRegistry()
Binary file
@@ -1,45 +0,0 @@
1
- ollamadiffuser/__init__.py,sha256=XpeN67zInXOAhQe1fQf-0ugJVSPPW6AeAEr-odcq-Ug,1127
2
- ollamadiffuser/__main__.py,sha256=tNWMvEHq4ddtKLp7DrhIoOdnFw3F8RNrETC_u5xpkFI,141
3
- ollamadiffuser/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- ollamadiffuser/api/server.py,sha256=kc-Up50zmLfSM4f4InYR4Btkl35lyEN6UFcxlvxICSQ,18237
5
- ollamadiffuser/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- ollamadiffuser/cli/commands.py,sha256=Pe0vyfGiffwd10QlVxBCTtNnMqHi8nJ3oNn_k8nAi5k,8903
7
- ollamadiffuser/cli/main.py,sha256=qj0VKTOjw_gox2dPVtbU-9kCo25TqshyVyqW8qsv4Pk,56081
8
- ollamadiffuser/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- ollamadiffuser/core/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- ollamadiffuser/core/config/model_registry.py,sha256=o408Qk-UQJ4NBM-AcyWAj4EhRUzrvl6zdOkKk1-ATHg,31238
11
- ollamadiffuser/core/config/settings.py,sha256=VhI1vLGmOAQ7-XtyHrT5KoMpcGeGt-Mij-9NxX_ZKsI,4881
12
- ollamadiffuser/core/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- ollamadiffuser/core/inference/engine.py,sha256=-EVcH4NyRVBQ-puvI_Az3KOZJYG9b-ySzsvoBTY3mlY,73962
14
- ollamadiffuser/core/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- ollamadiffuser/core/models/gguf_loader.py,sha256=ocfl3_MDVXC9nSjW8YJdz4kX1Q-Qe2ltu6w4fbqhxVY,35724
16
- ollamadiffuser/core/models/manager.py,sha256=rTEAameGih3wPcVG_Y-4k_brBeEqEoBjoI7fjggNtiY,16799
17
- ollamadiffuser/core/models/registry.py,sha256=YPx3xcHnCHogyowi9fQ6oXZg7_jz0fM5bDyyg-BgSFY,15125
18
- ollamadiffuser/core/utils/__init__.py,sha256=ZdXZWX1hfDnnV6OmRD6UStNljDJIQ892da2CtC-zdDw,31
19
- ollamadiffuser/core/utils/controlnet_preprocessors.py,sha256=v21X_Bk-a4gKbUZUKoeP2W8TSGlv-ST8IYNsn3NrZ2c,15446
20
- ollamadiffuser/core/utils/download_utils.py,sha256=dqHf7loKSqDHzgQEV-p1kDwoHxwlgurLPX8J96vD5fA,22421
21
- ollamadiffuser/core/utils/lora_manager.py,sha256=SrZydPSGJqCS_Vek35bEdG2Q51qCOLZmPvnNzUjjIN0,14328
22
- ollamadiffuser/ui/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- ollamadiffuser/ui/web.py,sha256=hK41sF9uVR_14lc4AlTXaGjw5m7aiJXpVp1cSOOmkT8,27715
24
- ollamadiffuser/ui/samples/.DS_Store,sha256=LcBPjptcaMu-icn9SdgIBfMMGa2R7DzMwMVd1cODPiU,6148
25
- ollamadiffuser/ui/samples/metadata.json,sha256=ZDlbE__qlihTdwv8Ao_1Hs0557Qqdksduo_OC9VxX3o,3286
26
- ollamadiffuser/ui/samples/canny/geometric_shapes.png,sha256=-rkuIgFlS3be4WqIjC8D_H6ltIlQf08hXNKPbz2i48I,3833
27
- ollamadiffuser/ui/samples/canny/house_outline.png,sha256=UvEh9A8e-cJiuMagggugowhkYw_YKMf0eCfU-Iw3q9A,2703
28
- ollamadiffuser/ui/samples/canny/portrait_outline.png,sha256=FIhkgBsw0ermJFtLfP0AekszkU57feXK38uENIV1ZnE,3152
29
- ollamadiffuser/ui/samples/depth/linear_perspective.png,sha256=pJV1wQD-4m5nGU4f93PVRlhqx2p8WhaaYVpKHnu-n1o,1943
30
- ollamadiffuser/ui/samples/depth/radial_gradient.png,sha256=QXxZDtFyn6iaXkrhVuAQCnPqju1EIrqQbVH2cKYGR5E,18464
31
- ollamadiffuser/ui/samples/depth/sphere_3d.png,sha256=43Zm8YTtZ__iuPUXoIynhF0AiWaQlPYpcPtQov0GZuI,20842
32
- ollamadiffuser/ui/samples/openpose/running_pose.png,sha256=T_5qMe4wW-fAae0NA4-ma7f9D5wbXT3vROJOhI_773g,2266
33
- ollamadiffuser/ui/samples/openpose/sitting_pose.png,sha256=JO7F_IoS8eUbyl-J68fIHQ0cj35XMVvWKJ1Yez_dYZ4,2426
34
- ollamadiffuser/ui/samples/openpose/standing_pose.png,sha256=wfgI2aIPi_TpXYgvuan7mUjx7RwmyC0jUKT5A3idPz4,2326
35
- ollamadiffuser/ui/samples/scribble/car_sketch.png,sha256=Y7pUIDrWZf3z9pJamNhlx5-nGiA3B64eEvW14qLz5TY,2244
36
- ollamadiffuser/ui/samples/scribble/face_sketch.png,sha256=MVVYy_aS48xoS_RnIDzLUaDQ8m_y55TuAAP7X5P_xtk,3025
37
- ollamadiffuser/ui/samples/scribble/tree_sketch.png,sha256=3P-NGgW25xRwreDxiBYKcDhd2oHZAwKSkjNVM5oPTWY,3017
38
- ollamadiffuser/ui/templates/index.html,sha256=qTQVFxiTbeZ90O-iNqWC_4pYP6yyIs2z6U69VJPqAB4,38176
39
- ollamadiffuser/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
- ollamadiffuser-1.2.3.dist-info/licenses/LICENSE,sha256=cnGL9l2P510Uk3TCnv62kot6vAfdSawhOZh7Y-oYoIE,1071
41
- ollamadiffuser-1.2.3.dist-info/METADATA,sha256=2Wmn95NaJekTIbqn1Ic-mtKA9Tx6Y_7nUklONwu0t5s,22134
42
- ollamadiffuser-1.2.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
43
- ollamadiffuser-1.2.3.dist-info/entry_points.txt,sha256=tHXXO3N0GSnIobDe_eSOLfHPjjVFjeTg2Fd-APoD6sY,64
44
- ollamadiffuser-1.2.3.dist-info/top_level.txt,sha256=97wOGgTCxDE765Nr_o7B4Kwr_M_jy8fCCeQ81sMKlC4,15
45
- ollamadiffuser-1.2.3.dist-info/RECORD,,